%Encoding: UTF-8
@inproceedings{kiss2020mum,
 abstract = { Augmented Reality (AR) and wearable sensors offer new possibilities to expand our
senses and change how we interact with the world. Sensory augmentation can be integrated
into everyday activities, but controls remain a challenge for user experience. In
this paper, we investigate how users can control a futuristic interface that enables
in-situ magnification. We designed an interactive system to enable users to zoom in
on objects up to a microscopic level and implemented a prototype using the Microsoft
Hololens. In a user-study, we compared full-screen to windowed visualizations and
four interaction techniques for zooming: a clicker, two types of gestures, and voice.
Our results indicate that the clicker enabled users to zoom at the fastest rate and
lowered cognitive load. We also found a preference for windowed views. With our work,
we provide insights for future augmented vision systems.},
 address = {New York, NY, USA},
 author = {Kiss, Francisco and Wo\'{z}niak, Pawe\l{} W. and Biener, Verena and Knierim, Pascal and Schmidt, Albrecht},
 booktitle = {19th International Conference on Mobile and Ubiquitous Multimedia},
 doi = {10.1145_3428361.3428386},
 isbn = {9781450388702},
 keywords = {augmented reality, virtual microscope},
 location = {Essen, Germany},
 numpages = {8},
 pages = {259–266},
 publisher = {Association for Computing Machinery},
 series = {MUM 2020},
 title = {VUM: Understanding Requirements for a Virtual Ubiquitous Microscope},
 url = {https://www.unibw.de/usable-security-and-privacy/publikationen/pdf/kiss2020mum.pdf},
 year = {2020}
}

