Bild von Institut mit Unilogo
home uni uni suche suche sitemap sitemap kontakt kontakt
unilogo Universität Stuttgart

Abteilung Visualisierung und Interaktive Systeme : Veröffentlichungen

Bibliographie 2008 BibTeX

suche englishicon
 
@inproceedings {INPROC-2008-48,
   author = {Mike Eissele and Matthias Kreiser and Thomas Ertl},
   title = {{Context-Controlled Flow Visualization in Augmented Reality}},
   booktitle = {ACM International Conference Proceeding Series},
   publisher = {ACM Press},
   institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany},
   pages = {89--96},
   type = {Konferenz-Beitrag},
   month = {Juni},
   year = {2008},
   keywords = {Context-aware; augmented reality; visualization; flow},
   language = {Englisch},
   cr-category = {I.3.7 Three-Dimensional Graphics and Realism,     I.3.6 Computer Graphics Methodology and Techniques},
   department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme},
   abstract = {A major challenge of novel scientific visualization using Augmented Reality is the accuracy of the user/camera position tracking. Many alternative techniques have been proposed, but still there is no general solution. Therefore, this paper presents a system that copes with different conditions and makes use of context information, e.g. available tracking quality, to select adequate Augmented Reality visualization methods. This way, users will automatically benefit from highquality visualizations if the system can estimate the pose of the realworld camera accurately enough. Otherwise, specially-designed alternative visualization techniques which require a less accurate positioning are used for the augmentation of real-world views. The proposed system makes use of multiple tracking systems and a simple estimation of the currently available overall accuracy of the pose estimation, used as context information to control the resulting visualization. Results of a prototypical implementation for visualization of 3D scientific flow data are presented to show the practicality.},
   url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2008-48&engl=0}
}
@inproceedings {INPROC-2008-130,
   author = {Andreas Hub},
   title = {{Precise Indoor and Outdoor Navigation for the Blind and Visually Impaired Using Augmented Maps and the TANIA System}},
   booktitle = {Proceedings of the 9th International Conference on Low Vision (Vision 2008); July 7-11, Montreal, Canada, 2008},
   publisher = {Online},
   institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany},
   pages = {1--4},
   type = {Konferenz-Beitrag},
   month = {Juli},
   year = {2008},
   keywords = {Blind Navigation; Blind Users; Impaired Vision},
   language = {Englisch},
   cr-category = {H.5.2 Information Interfaces and Presentation User Interfaces,     K.4.2 Computers and Society Social Issues},
   ee = {ftp://ftp.informatik.uni-stuttgart.de/pub/library/ncstrl.ustuttgart_fi/INPROC-2008-130/INPROC-2008-130.pdf,     http://www.opto.umontreal.ca/vision2008/},
   contact = {andreas.hub@vis.uni-stuttgart.de},
   department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme},
   abstract = {The use of a small, portable Tactile Acoustical Navigation and Information Assistant (TANIA) by ambulatory blind people in complex environments is presented. TANIA utilizes an inertial sensor, tablet computer, and enhanced mapping to provide precise navigation of up to one-step accuracy. Its operation is relatively simple, even for elderly people with no computer experience. Previously-installed beacon or tag infrastructure is not required, which expands environmental access for blind users to any area where adequate digital mapping has been done. Current development in pilot locations is described, including examples of how maps are augmented with specific, location-based information. Such data can be presented to the user acoustically or in Braille. Given the ever-increasing availability of global positioning and information services, systems such as TANIA suggest the potential for independent and precise worldwide navigation by blind people.},
   url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2008-130&engl=0}
}
@inproceedings {INPROC-2008-129,
   author = {Andreas Hub},
   title = {{Map Requirements and Attainable Public Policy for an Installationfree Worldwide Navigation and Information System for the Blind}},
   booktitle = {Proceedings of the 9th International Conference on Low Vision (Vision 2008); July 7-11, Montreal, Canada, 2008},
   publisher = {Online},
   institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany},
   pages = {1--4},
   type = {Konferenz-Beitrag},
   month = {Juli},
   year = {2008},
   keywords = {Blind Navigation; Blind Users; Impaired Vision},
   language = {Englisch},
   cr-category = {H.5.2 Information Interfaces and Presentation User Interfaces,     K.4.2 Computers and Society Social Issues},
   ee = {ftp://ftp.informatik.uni-stuttgart.de/pub/library/ncstrl.ustuttgart_fi/INPROC-2008-129/INPROC-2008-129.pdf,     http://www.opto.umontreal.ca/vision2008/},
   contact = {andreas.hub@vis.uni-stuttgart.de},
   department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme},
   abstract = {The aim of this work is to demonstrate that safe and independent mobility is feasible for blind pedestrians worldwide, requiring only adequate mapping and attainable public policy. A basic map format and an installation-free guidance and information system are presented. The developed map format can be used as the basis for a worldwide navigation system for the blind. In order to achieve such expanded accessibility, however, certain public policy changes are necessary. National and local organizations of blind and deafblind people must educate themselves and their supporters about the need for, and benefits of, detailed mapping of buildings and cities. They must take the lead in raising public awareness and in lobbying institutions and cities to offer maps in formats which support safe and independent navigation.},
   url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2008-129&engl=0}
}
@inproceedings {INPROC-2008-128,
   author = {Andreas Hub},
   title = {{Integration of Active Tactile Control Braille Technology into Portable Navigation and Object Recognition Systems for the Blind and Deafblind}},
   booktitle = {Proceedings of the 9th International Conference on Low Vision (Vision 2008); July 7-11, Montreal, Canada, 2008},
   publisher = {Online},
   institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany},
   pages = {1--3},
   type = {Konferenz-Beitrag},
   month = {Juli},
   year = {2008},
   keywords = {Blind Navigation, Blind Users, Impaired Vision},
   language = {Englisch},
   cr-category = {H.5.2 Information Interfaces and Presentation User Interfaces,     K.4.2 Computers and Society Social Issues},
   ee = {ftp://ftp.informatik.uni-stuttgart.de/pub/library/ncstrl.ustuttgart_fi/INPROC-2008-128/INPROC-2008-128.pdf,     http://www.opto.umontreal.ca/vision2008/},
   contact = {andreas.hub@vis.uni-stuttgart.de},
   department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme},
   abstract = {A small Braille display with Active Tactile Control (ATC) Braille technology was developed in association with the Handy Tech Company. With ATC technology text information can be read without pressing a scrolling button, as the system automatically shifts to the next line when the reading finger is detected at the end of the last word. This Braille display was connected to two compatible systems. The first is the TANIA (Tactile Acoustical Navigation and Information Assistant) navigation system, based on detailed maps, a movement sensor, and the Global Positioning System (GPS). The second is an object recognition system, which uses 3D environmental models, a movement sensor and stereo camera. Either system, or both in combination, provide information acoustically or in Braille. Blind and deafblind users report that the use of the portable ATC display technology combined with these systems represents an additional step toward increasing independent orientation and mobility.},
   url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2008-128&engl=0}
}
@inproceedings {INPROC-2008-127,
   author = {Andreas Hub},
   title = {{Guiding Grids in Augmented Maps for Precise Installation-Free Worldwide Blind Navigation}},
   booktitle = {Conference Proceedings of the California State University, Northridge Center on Disabilities' 23rd Annual International Technology and Persons with Disabilities Conference (CSUN 2008); March 19-24; Los Angeles, CA, USA},
   publisher = {Online},
   institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany},
   pages = {1--4},
   type = {Konferenz-Beitrag},
   month = {M{\"a}rz},
   year = {2008},
   keywords = {Blind Navigation; Blind Users; Impaired Vision},
   language = {Englisch},
   cr-category = {H.5.2 Information Interfaces and Presentation User Interfaces,     K.4.2 Computers and Society Social Issues},
   ee = {ftp://ftp.informatik.uni-stuttgart.de/pub/library/ncstrl.ustuttgart_fi/INPROC-2008-127/INPROC-2008-127.pdf},
   contact = {andreas.hub@vis.uni-stuttgart.de},
   department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme},
   abstract = {Guiding grids were integrated into the TANIA system’s maps. Precise navigation support is provided indoors and outdoors even in large open spaces without physical guidelines.},
   url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2008-127&engl=0}
}
@inproceedings {INPROC-2008-125,
   author = {M. Moser and D. Weiskopf},
   title = {{Interactive Volume Rendering on Mobile Devices}},
   booktitle = {Vision, Modeling, and Visualization VMV '08 Conference Proceedings},
   publisher = {Akademische Verlagsgesellschaft AKA GmbH},
   institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany},
   pages = {217--226},
   type = {Konferenz-Beitrag},
   month = {Dezember},
   year = {2008},
   language = {Englisch},
   cr-category = {I.3.7 Three-Dimensional Graphics and Realism},
   department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme},
   abstract = {A rendering technique for interactive direct volume visualization on mobile devices is presented. Utilizing basic graphics hardware functionality such as rasterization and 2D texture mapping, native volume rendering is possible by adopting and extending the 2D texture-slicing approach. Limitations of mobile graphics devices are discussedœ{\^o}{\`o}{\^o}in particular concerning graphics performance and available functionality. These limitations lead to modifications of the traditional texture-based volumerendering algorithm: we propose a screen-adaptive hybrid low/high-resolution rendering technique that achieves a good compromise between image quality and interactivity; furthermore, compressed and paletted texture formats are explored for fast texture update, for example, during interactive specification of the transfer function. The implementation of the rendering algorithm is based on OpenGL ES 1.0 and was tested on the Dell Axim X50v/X51v PDA. Performance characteristics are documented in the form of several kinds of performance measurements.},
   url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2008-125&engl=0}
}
@inbook {INBOOK-2008-12,
   author = {Andreas Hub},
   title = {{Taktil-Akustische Navigationsunterst{\"u}tzung f{\"u}r {\"a}ltere blinde und sehbehinderte Menschen auf der Basis erweiterter Umgebungsmodelle}},
   series = {UDay VI – Seniorengerechte Schnittstellen zur Technik},
   address = {Lengerich},
   publisher = {Pabst Science Publishers},
   pages = {104--107},
   type = {Beitrag in Buch},
   month = {April},
   year = {2008},
   isbn = {978-3-89967-467-5},
   keywords = {Navigation; Blind},
   language = {Deutsch},
   cr-category = {H.5.2 Information Interfaces and Presentation User Interfaces,     K.4.2 Computers and Society Social Issues},
   contact = {andreas.hub@vis.uni-stuttgart.de},
   department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme},
   abstract = {Mit dem aktuellen Prototyp eines portablen Taktil-Akustischen Navigations- und Informations-Assistenten (TANIA) wurden Gebrauchstests mit {\"a}lteren blinden und hochgradig sehbehinderten Personen unter allt{\"a}glichen Bedingungen durchgef{\"u}hrt. Erste Ergebnisse und Reaktionen der Benutzer deuten daraufhin, dass auch {\"a}ltere Personen mit sensorischen Behinderungen den Umgang mit innovativen Assistenzsystemen erlernen und von deren Einsatz profitieren k{\"o}nnen.},
   url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INBOOK-2008-12&engl=0}
}