|
|
@inproceedings {INPROC-2007-72, author = {Mike Eissele and Thomas Ertl}, title = {{Mobile Navigation and Augmentation utilizing Real-World Text}}, booktitle = {Proceedings of Mensch und Computer 2007, Workshop on Nomadic and Wearable Computing 2007}, publisher = {-}, institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany}, type = {Konferenz-Beitrag}, month = {November}, year = {2007}, language = {Englisch}, cr-category = {I.3 Computer Graphics}, department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme}, abstract = {Mobile, smart devices are already available in everyday life. Most of them
offer a rich feature set, including camera, GPS antenna, or even WIFI modules.
Navigation software utilizing GPS to acquire location data became very popular
on these mobile clients. However, due to hardware restrictions GPS positioning
cannot be used for indoor scenarios. In contrast, this paper is focused on
navigation for indoor environments targeting on a low-cost solution, without
the requirement of additional hardware installations or markers inside
buildings. A server-based optical character recognition service is used to map
images containing unique text passages – acquired with a mobile client – to
additional meta data, e.g. locations or orientation. We also show that
augmented-reality methods can be used to provide an intuitive presentation and
interaction system by overlying real- world images with additional informa-tion
like navigation symbols or internet hyperlinks.}, url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2007-72&engl=0} }
@inproceedings {INPROC-2007-71, author = {Martin Rotard and Eissele Mike and Raoul van Putten and Thomas Ertl}, title = {{ZOOMABLE USER INTERFACES IN SCALABLE VECTOR GRAPHICS}}, booktitle = {Proceedings of Open SVG 2007}, publisher = {Online}, institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany}, type = {Konferenz-Beitrag}, month = {November}, year = {2007}, language = {Deutsch}, cr-category = {I.3.3 Picture/Image Generation}, department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme}, abstract = {Zoomable user interfaces are an evolutionary outgrowth of graphical user
interfaces. In this paper we propose a zoomable user interface based on
Scalable Vector Graphics. Three-level zooming is proposed as a new paradigm to
combine different zooming functionalities in a common interface and support
zooming within the window manager. This helps to unify zooming techniques of
different applications. To meet the demand of efficient and easy navigation on
a user interface, several novel interaction techniques are shown that further
support the integration of three-level zooming within the underlying
presentation system. For mobile small-screen devices, where the benefit of
zooming user interfaces is even higher, the proposed system can be operated
with simple pen tap or tap and drag operations. We also present a prototypical
implementation, which demonstrates how applications based on the SPARK toolkit
can transparently benefit form the proposed technology.}, url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2007-71&engl=0} }
@inproceedings {INPROC-2007-70, author = {Martin Krau{\ss} and Mike Eissele and Magnus Strengert}, title = {{GPU-Based Edge-Directed Image Interpolation}}, booktitle = {In Image Analysis (Proceedings of SCIA 2007)}, publisher = {Springer-Verlag}, institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany}, series = {Lecture Notes in Computer Science}, volume = {4522}, pages = {532--541}, type = {Konferenz-Beitrag}, month = {November}, year = {2007}, keywords = {Zoom, GPU, HDTV}, language = {Englisch}, cr-category = {I.3.3 Picture/Image Generation}, department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme}, abstract = {The rendering of lower resolution image data on higher resolution displays has
become a very common task, in particular because of the increasing popularity
of webcams, camera phones, and low-bandwidth video streaming. Thus, there is a
strong demand for real-time, highquality image magnification. In this work, we
suggest to exploit the high performance of programmable graphics processing
units (GPUs) for an adaptive image magnification method. To this end, we
propose a GPUfriendly algorithm for image up-sampling by edge-directed image
interpolation, which avoids ringing artifacts, excessive blurring, and
staircasing of oblique edges. At the same time it features gray-scale
invariance, is applicable to color images, and allows for real-time processing
of full-screen images on today’s GPUs.}, url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2007-70&engl=0} }
@inproceedings {INPROC-2007-124, author = {Andreas Hub and Stefan Kombrink and Klaus Bosse and Thomas Ertl}, title = {{Conference Navigation and Communication Assistant for the Deafblind based on Tactile and Acoustically Amplified Augmented Map Informationfor the 14th Deafblind International World Conference}}, booktitle = {Proceedings of the 14th Deafblind International World Conference (DbI 2007); Perth, Australia, September 25-30, 2007}, publisher = {Online}, institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany}, pages = {1--2}, type = {Konferenz-Beitrag}, month = {September}, year = {2007}, keywords = {Blind Navigation; Blind Users; Impaired Vision}, language = {Englisch}, cr-category = {H.5.2 Information Interfaces and Presentation User Interfaces,
K.4.2 Computers and Society Social Issues}, ee = {ftp://ftp.informatik.uni-stuttgart.de/pub/library/ncstrl.ustuttgart_fi/INPROC-2007-124/INPROC-2007-124.pdf}, contact = {andreas.hub@vis.uni-stuttgart.de}, department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme}, abstract = {We have developed a portable electronic navigation assistant for the deafblind
that facilitates independent navigation even in new and complex environments,
such as large conference sites. The device includes a keyboard, loudspeaker and
small Braille display, allowing deafblind users to communicate with anyone
capable of typing.}, url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2007-124&engl=0} }
@inproceedings {INPROC-2007-123, author = {Andreas Hub and Stefan Kombrink and Klaus Bosse and Thomas Ertl}, title = {{TANIA – A Tactile-Acoustical Navigation and Information Assistant for the 2007 CSUN Conference}}, booktitle = {Proceedings of the California State University, Northridge Center on Disabilities' 22nd Annual International Technology and Persons with Disabilities Conference (CSUN 2007); March 19-24; Los Angeles,CA, USA}, address = {Los Angeles}, publisher = {Online}, institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany}, pages = {1--4}, type = {Konferenz-Beitrag}, month = {M{\"a}rz}, year = {2007}, keywords = {Indoor Navigation; Blind Users; Impaired Vision; Mobile Computing}, language = {Englisch}, cr-category = {H.5.2 Information Interfaces and Presentation User Interfaces,
K.4.2 Computers and Society Social Issues}, ee = {ftp://ftp.informatik.uni-stuttgart.de/pub/library/ncstrl.ustuttgart_fi/INPROC-2007-123/INPROC-2007-123.pdf}, contact = {andreas.hub@vis.uni-stuttgart.de}, department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme}, abstract = {A navigation assistant based on a tactile-acoustical interface and augmented
map information is presented, affording blind people real and virtual
explorations of the 2007 CSUN Conference environment. By tapping on a touch
screen, hotel layout and conference-related data are provided.}, url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2007-123&engl=0} }
@inproceedings {INPROC-2007-122, author = {M. Kraus and M. Eissele and M. Strengert}, title = {{GPU-Based Edge Directed Image Interpolation}}, booktitle = {Image Analysis (Proceedings of SCIA 2007)}, publisher = {Springer-Verlag}, institution = {Universit{\"a}t Stuttgart : Sonderforschungsbereich SFB 627 (Nexus: Umgebungsmodelle f{\"u}r mobile kontextbezogene Systeme), Germany}, series = {Lecture Notes in Computer Science}, pages = {532--541}, type = {Konferenz-Beitrag}, month = {Januar}, year = {2007}, language = {Deutsch}, cr-category = {I.3 Computer Graphics}, department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme}, abstract = {The rendering of lower resolution image data on higher resolution displays has
become a very common task, in particular because of the increasing popularity
of webcams, camera phones, and low-bandwidth video streaming. Thus, there is a
strong demand for real-time, highquality image magnification. In this work, we
suggest to exploit the high performance of programmable graphics processing
units (GPUs) for an adaptive image magnification method. To this end, we
propose a GPUfriendly algorithm for image up-sampling by edge-directed image
interpolation, which avoids ringing artifacts, excessive blurring, and
staircasing of oblique edges. At the same time it features gray-scale
invariance, is applicable to color images, and allows for real-time processing
of full-screen images on todayœ{\^o}{\`o}{\`u}s GPUs.}, url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2007-122&engl=0} }
@article {ART-2007-20, author = {Andreas Hub and Tim Hartter and Stefan Kombrink and Thomas Ertl}, title = {{Real and virtual explorations of the environment and interactive tracking of movable objects for the blind on the basis of tactile-acoustical maps and 3D environment models}}, journal = {Disability and Rehabilitation: Assistive Technology}, address = {London}, publisher = {Informa Healthcare}, volume = {3}, number = {1}, pages = {57--68}, type = {Artikel in Zeitschrift}, month = {Mai}, year = {2007}, doi = {10.1080/17483100701275677}, language = {Englisch}, cr-category = {H.5.2 Information Interfaces and Presentation User Interfaces,
K.4.2 Computers and Society Social Issues}, ee = {http://dx.doi.org/10.1080/17483100701275677}, contact = {andreas.hub@vis.uni-stuttgart.de}, department = {Universit{\"a}t Stuttgart, Institut f{\"u}r Visualisierung und Interaktive Systeme, Visualisierung und Interaktive Systeme}, abstract = {This study describes the development of a multi-functional assistant system for
the blind which combines localisation, real and virtual navigation within
modelled environments and the identification and tracking of fixed and movable
objects.}, url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=ART-2007-20&engl=0} }
|
|