The Structured Audio Information Retrieval System (STAIRS) project targets environments where workers need access to information, but cannot use traditional hands-and-eyes devices, such as a PDA. The information to be accessed is stored in an information base, either as pre-recorded audio or as text to be run through a text-to-speech engine. Given the inherent limitations of the simple audio interface used in STAIRS, it is important to structure the information base in a way which makes navigation as easy as possible for the user.
@InProceedings{schnelle_et_al:DagSemProc.05181.13, author = {Schnelle, Dirk and James, Frankie}, title = {{Structured Audio Information Retrieval System}}, booktitle = {Mobile Computing and Ambient Intelligence: The Challenge of Multimedia}, pages = {1--12}, series = {Dagstuhl Seminar Proceedings (DagSemProc)}, ISSN = {1862-4405}, year = {2005}, volume = {5181}, editor = {Nigel Davies and Thomas Kirste and Heidrun Schumann}, publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik}, address = {Dagstuhl, Germany}, URL = {https://drops.dagstuhl.de/entities/document/10.4230/DagSemProc.05181.13}, URN = {urn:nbn:de:0030-drops-3700}, doi = {10.4230/DagSemProc.05181.13}, annote = {Keywords: STAIRS, mobile worker, hands and eyes free, audio, Voice user Interface} }
Feedback for Dagstuhl Publishing