Hey 🙂 – Meet GenerIO (and Project TeKKI)
Exciting News from GenerIO: Launching our EU-funded Project TeKKI! Supported by the European Union and the State of NRW, Germany, GenerIO specializes in creating uniquely tailored PBR textures for materials such as wood, brick, and stone. GenerIO’s commitment is to enhance creative potential in media production and game development, leveraging human-centered AI solutions. For more information, visit GenerIO.
List of publications
2024
Liebers, Carina; Megarajan, Pranav; Auda, Jonas; Stratmann, Tim C; Pfingsthorn, Max; Gruenefeld, Uwe; Schneegass, Stefan
Keep the Human in the Loop: Arguments for Human Assistance in the Synthesis of Simulation Data for Robot Training Journal Article
In: Multimodal Technologies and Interaction, vol. 8, no. 3, pp. 18, 2024.
BibTeX | Tags:
@article{liebers2024keep,
title = {Keep the Human in the Loop: Arguments for Human Assistance in the Synthesis of Simulation Data for Robot Training},
author = {Carina Liebers and Pranav Megarajan and Jonas Auda and Tim C Stratmann and Max Pfingsthorn and Uwe Gruenefeld and Stefan Schneegass},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {Multimodal Technologies and Interaction},
volume = {8},
number = {3},
pages = {18},
publisher = {MDPI},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2023
Auda, Jonas; Faltaous, Sarah; Gruenefeld, Uwe; Mayer, Sven; Schneegass, Stefan
Proceedings of the 1st Joint Workshop on Cross Reality, held in conjunction with the IEEE International Symposium on Mixed and Augmented Reality (ISMAR 2023), IEEE, Sydney, Australia, 2023.
@workshop{audaactuality,
title = {The Actuality-Time Continuum: Visualizing Interactions and Transitions Taking Place in Cross-Reality Systems},
author = {Jonas Auda and Sarah Faltaous and Uwe Gruenefeld and Sven Mayer and Stefan Schneegass},
url = {https://www.researchgate.net/publication/374589796_The_Actuality-Time_Continuum_Visualizing_Interactions_and_Transitions_Taking_Place_in_Cross-Reality_Systems},
year = {2023},
date = {2023-10-16},
urldate = {2023-10-16},
booktitle = {Proceedings of the 1st Joint Workshop on Cross Reality, held in conjunction with the IEEE International Symposium on Mixed and Augmented Reality (ISMAR 2023)},
publisher = {IEEE},
address = {Sydney, Australia},
keywords = {},
pubstate = {published},
tppubtype = {workshop}
}
Auda, Jonas; Gruenefeld, Uwe; Faltaous, Sarah; Mayer, Sven; Schneegass, Stefan
A Scoping Survey on Cross-Reality Systems Journal Article
In: ACM Comput. Surv., 2023, ISSN: 0360-0300, (Just Accepted).
Abstract | Links | BibTeX | Tags: Augmented Reality, Augmented Virtuality, Bystander Inclusion, Collaboration, Cross-Reality Systems, Reality-Virtuality Continuum, Transitional Interfaces, Virtual Reality
@article{10.1145/3616536,
title = {A Scoping Survey on Cross-Reality Systems},
author = {Jonas Auda and Uwe Gruenefeld and Sarah Faltaous and Sven Mayer and Stefan Schneegass},
url = {https://doi.org/10.1145/3616536
https://jonasauda.de/wp-content/uploads/2023/09/Auda_CSUR_2023_A_Scoping_Survey_on_Cross-Reality_Systems_Literature_Classification.xlsx},
doi = {10.1145/3616536},
issn = {0360-0300},
year = {2023},
date = {2023-09-01},
urldate = {2023-09-01},
journal = {ACM Comput. Surv.},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
abstract = {Immersive technologies such as Virtual Reality (VR) and Augmented Reality (AR) empower users to experience digital realities. Known as distinct technology classes, the lines between them are becoming increasingly blurry with recent technological advancements. New systems enable users to interact across technology classes or transition between them – referred to as cross-reality systems. Nevertheless, these systems are not well-understood. Hence, in this paper, we conducted a scoping literature review to classify and analyze cross-reality systems proposed in previous work. First, we define these systems by distinguishing three different types. Thereafter, we compile a literature corpus of 306 relevant publications, analyze the proposed systems, and present a comprehensive classification, including research topics, involved environments, and transition types. Based on the gathered literature, we extract nine guiding principles that can inform the development of cross-reality systems. We conclude with research challenges and opportunities.},
note = {Just Accepted},
keywords = {Augmented Reality, Augmented Virtuality, Bystander Inclusion, Collaboration, Cross-Reality Systems, Reality-Virtuality Continuum, Transitional Interfaces, Virtual Reality},
pubstate = {published},
tppubtype = {article}
}
Liebers, Carina; Prochazka, Marvin; PfĂĽtzenreuter, Niklas; Liebers, Jonathan; Auda, Jonas; Gruenefeld, Uwe; Schneegass, Stefan
Pointing It Out! Comparing Manual Segmentation of 3D Point Clouds between Desktop, Tablet, and Virtual Reality Journal Article
In: International Journal of Human–Computer Interaction, vol. 0, no. 0, pp. 1-15, 2023.
Links | BibTeX | Tags: Point Clouds, Virtual Reality
@article{doi:10.1080/10447318.2023.2238945,
title = {Pointing It Out! Comparing Manual Segmentation of 3D Point Clouds between Desktop, Tablet, and Virtual Reality},
author = {Carina Liebers and Marvin Prochazka and Niklas PfĂĽtzenreuter and Jonathan Liebers and Jonas Auda and Uwe Gruenefeld and Stefan Schneegass},
url = {https://doi.org/10.1080/10447318.2023.2238945},
doi = {10.1080/10447318.2023.2238945},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {International Journal of Human–Computer Interaction},
volume = {0},
number = {0},
pages = {1-15},
publisher = {Taylor & Francis},
keywords = {Point Clouds, Virtual Reality},
pubstate = {published},
tppubtype = {article}
}
2022
Faltaous, Sarah; Prochazka, Marvin; Auda, Jonas; Keppel, Jonas; Wittig, Nick; Gruenefeld, Uwe; Schneegass, Stefan
2022.
Abstract | Links | BibTeX | Tags: Illusions, Virtual Reality
@proceedings{faltaous2022give_weight_to_vr,
title = {Give Weight to VR: Manipulating Users’ Perception of Weight in Virtual Reality with Electric Muscle Stimulation},
author = {Faltaous, Sarah and Prochazka, Marvin and Auda, Jonas and Keppel, Jonas and Wittig, Nick and Gruenefeld, Uwe and Schneegass, Stefan},
url = {https://doi.org/10.1145/3543758.3547571},
doi = {10.1145/3543758.3547571},
year = {2022},
date = {2022-09-04},
urldate = {2022-09-04},
issue = {Proceedings of Mensch Und Computer 2022},
abstract = {Virtual Reality (VR) devices empower users to experience virtual worlds through rich visual and auditory sensations. However, believable haptic feedback that communicates the physical properties of virtual objects, such as their weight, is still unsolved in VR. The current trend towards hand tracking-based interactions, neglecting the typical controllers, further amplifies this problem. Hence, in this work, we investigate the combination of passive haptics and electric muscle stimulation to manipulate users’ perception of weight, and thus, simulate objects with different weights. In a laboratory user study, we investigate four differing electrode placements, stimulating different muscles, to determine which muscle results in the most potent perception of weight with the highest comfort. We found that actuating the biceps brachii or the triceps brachii muscles increased the weight perception of the users. Our findings lay the foundation for future investigations on weight perception in VR.},
keywords = {Illusions, Virtual Reality},
pubstate = {published},
tppubtype = {proceedings}
}
Auda, Jonas; Gruenefeld, Uwe; Schneegass, Stefan
If The Map Fits! Exploring Minimaps as Distractors from Non-Euclidean Spaces in Virtual Reality Proceedings Article
In: Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems, 2022.
Abstract | Links | BibTeX | Tags: Illusions, Locomotion, Virtual Reality
@inproceedings{auda2022ifthemapfits,
title = {If The Map Fits! Exploring Minimaps as Distractors from Non-Euclidean Spaces in Virtual Reality},
author = {Jonas Auda and Uwe Gruenefeld and Stefan Schneegass},
url = {https://dl.acm.org/doi/10.1145/3491101.3519621},
doi = {10.1145/3491101.3519621},
year = {2022},
date = {2022-04-30},
urldate = {2022-04-30},
booktitle = {Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems},
series = {Extended Abstracts of the 2022 CHI Conference on Human Factors in Computing Systems},
abstract = {With non-Euclidean spaces, Virtual Reality (VR) experiences can more efficiently exploit the available physical space by using overlapping virtual rooms. However, the illusion created by these spaces can be discovered, if the overlap is too large. Thus, in this work, we investigate if users can be distracted from the overlap by showing a minimap that suggests that there is none. When done correctly, more VR space can be mapped into the existing physical space, allowing for more spacious virtual experiences. Through a user study, we found that participants uncovered the overlap of two virtual rooms when it was at 100% or the overlapping room extended even further. Our results show that the additional minimap renders overlapping virtual rooms more believable and can serve as a helpful tool to use physical space more efficiently for natural locomotion in VR.},
keywords = {Illusions, Locomotion, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Gruenefeld, Uwe; Auda, Jonas; Mathis, Florian; Schneegass, Stefan; Khamis, Mohamed; Gugenheimer, Jan; Mayer, Sven
VRception: Rapid Prototyping of Cross-Reality Systems in Virtual Reality Proceedings Article
In: 🏆Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems, 2022.
Abstract | Links | BibTeX | Tags: Augmented Reality, Augmented Virtuality, Bystander Inclusion, Collaboration, Cross-Reality Systems, Reality-Virtuality Continuum, Transitional Interfaces, Virtual Reality
@inproceedings{gruenefeld2022vrception,
title = {VRception: Rapid Prototyping of Cross-Reality Systems in Virtual Reality},
author = {Uwe Gruenefeld and Jonas Auda and Florian Mathis and Stefan Schneegass and Mohamed Khamis and Jan Gugenheimer and Sven Mayer},
url = {https://dl.acm.org/doi/10.1145/3491102.3501821},
doi = {10.1145/3491102.3501821},
year = {2022},
date = {2022-04-30},
urldate = {2022-04-30},
booktitle = {🏆Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems},
series = {Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems},
abstract = {Cross-reality systems empower users to transition along the reality-virtuality continuum or collaborate with others experiencing different manifestations of it. However, prototyping these systems is challenging, as it requires sophisticated technical skills, time, and often expensive hardware. We present VRception, a concept and toolkit for quick and easy prototyping of cross-reality systems. By simulating all levels of the reality-virtuality continuum entirely in Virtual Reality, our concept overcomes the asynchronicity of realities, eliminating technical obstacles. Our VRception Toolkit leverages this concept to allow rapid prototyping of cross-reality systems and easy remixing of elements from all continuum levels. We replicated six cross-reality papers using our toolkit and presented them to their authors. Interviews with them revealed that our toolkit sufficiently replicates their core functionalities and allows quick iterations. Additionally, remote participants used our toolkit in pairs to collaboratively implement prototypes in about eight minutes that they would have otherwise expected to take days. },
keywords = {Augmented Reality, Augmented Virtuality, Bystander Inclusion, Collaboration, Cross-Reality Systems, Reality-Virtuality Continuum, Transitional Interfaces, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Auda, Jonas; Gruenefeld, Uwe; Kosch, Thomas; Schneegass, Stefan
The Butterfly Effect: Novel Opportunities for Steady-State Visually-Evoked Potential Stimuli in Virtual Reality Proceedings Article
In: Augmented Humans, 2022.
Abstract | Links | BibTeX | Tags: Brain-Computer Interfaces, EEG, SSVEP, Virtual Reality
@inproceedings{auda2022thebutterflyeffect,
title = {The Butterfly Effect: Novel Opportunities for Steady-State Visually-Evoked Potential Stimuli in Virtual Reality},
author = {Jonas Auda and Uwe Gruenefeld and Thomas Kosch and Stefan Schneegass},
url = {https://dl.acm.org/doi/fullHtml/10.1145/3519391.3519397},
doi = {10.1145/3519391.3519397},
year = {2022},
date = {2022-03-13},
urldate = {2022-03-13},
booktitle = {Augmented Humans},
series = {Augmented Humans 2022},
abstract = {In Virtual Reality (VR), Steady-State-Visual Evoked Potentials (SSVEPs) can be used to interact with the virtual environment using brain signals. However, the design of SSVEP-eliciting stimuli often does not match the virtual environment, and thus, disrupts the virtual experience. In this paper, we investigate stimulus designs with varying suitability to blend in virtual environments. Therefore, we created differently-shaped, virtual butterflies. The shapes vary from rectangular wings, over round wings, to a wing shape of a real butterfly. These butterflies elicit SSVEP responses through different animations-flickering or flapping wings. To evaluate our stimuli, we first extracted suitable frequencies for SSVEP responses from the literature. In a first study, we determined three frequencies yielding the best detection accuracy in VR. We used these frequencies in a second study to analyze detection accuracy and appearance ratings using our stimuli designs. Our work contributes insights into the design of SSVEP stimuli that blend into virtual environments and still elicit SSVEP responses.},
keywords = {Brain-Computer Interfaces, EEG, SSVEP, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Keppel, Jonas; Liebers, Jonathan; Auda, Jonas; Gruenefeld, Uwe; Schneegass, Stefan
ExplAInable Pixels: Investigating One-Pixel Attacks on Deep Learning Models with Explainable Visualizations Proceedings Article
In: Proceedings of the 21st International Conference on Mobile and Ubiquitous Multimedia, pp. 231–242, Association for Computing Machinery, Lisbon, Portugal, 2022, ISBN: 9781450398206.
Abstract | Links | BibTeX | Tags: Deep Learning, Security
@inproceedings{10.1145/3568444.3568469,
title = {ExplAInable Pixels: Investigating One-Pixel Attacks on Deep Learning Models with Explainable Visualizations},
author = {Jonas Keppel and Jonathan Liebers and Jonas Auda and Uwe Gruenefeld and Stefan Schneegass},
url = {https://doi.org/10.1145/3568444.3568469},
doi = {10.1145/3568444.3568469},
isbn = {9781450398206},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Proceedings of the 21st International Conference on Mobile and Ubiquitous Multimedia},
pages = {231–242},
publisher = {Association for Computing Machinery},
address = {Lisbon, Portugal},
series = {MUM '22},
abstract = {Nowadays, deep learning models enable numerous safety-critical applications, such as biometric authentication, medical diagnosis support, and self-driving cars. However, previous studies have frequently demonstrated that these models are attackable through slight modifications of their inputs, so-called adversarial attacks. Hence, researchers proposed investigating examples of these attacks with explainable artificial intelligence to understand them better. In this line, we developed an expert tool to explore adversarial attacks and defenses against them. To demonstrate the capabilities of our visualization tool, we worked with the publicly available CIFAR-10 dataset and generated one-pixel attacks. After that, we conducted an online evaluation with 16 experts. We found that our tool is usable and practical, providing evidence that it can support understanding, explaining, and preventing adversarial examples.},
keywords = {Deep Learning, Security},
pubstate = {published},
tppubtype = {inproceedings}
}
2021
Auda, Jonas; Verheyen, Nils; Mayer, Sven; Schneegass, Stefan
Flyables: Haptic Input Devices for Virtual Reality using Quadcopters Proceedings Article
In: 🏆ACM Symposium on Virtual Reality Software and Technology, 2021.
Abstract | Links | BibTeX | Tags: Drones, Haptics, Toolkit, Virtual Reality
@inproceedings{auda2021flyables,
title = {Flyables: Haptic Input Devices for Virtual Reality using Quadcopters},
author = {Jonas Auda and Nils Verheyen and Sven Mayer and Stefan Schneegass},
url = {https://www.researchgate.net/publication/355437552_Flyables_Haptic_Input_Devices_for_Virtual_Reality_using_Quadcopters
https://www.youtube.com/watch?v=udBv0pOpJZM},
doi = {10.1145/3489849.3489855},
year = {2021},
date = {2021-12-08},
urldate = {2021-12-08},
booktitle = {🏆ACM Symposium on Virtual Reality Software and Technology},
series = {VRST '21},
abstract = {Virtual Reality (VR) has made its way into everyday life. While VR delivers an ever-increasing level of immersion, controls and their haptics are still limited. Current VR headsets come with dedicated controllers that are used to control every virtual interface element. However, the controller input mostly differs from the virtual interface. This reduces immersion. To provide a more realistic input, we present Flyables, a toolkit that provides matching haptics for virtual user interface elements using quadcopters. We took five common virtual UI elements and built their physical counterparts. We attached them to quadcopters to deliver on-demand haptic feedback. In a user study, we compared Flyables to controller-based VR input. While controllers still outperform Flyables in terms of precision and task completion time, we found that Flyables present a more natural and playful way to interact with VR environments. Based on the results from the study, we outline research challenges that could improve interaction with Flyables in the future.},
keywords = {Drones, Haptics, Toolkit, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Auda, Jonas; Heger, Roman; Gruenefeld, Uwe; Schneegass, Stefan
VRSketch: Investigating 2D Sketching in Virtual Reality with Different Levels of Hand and Pen Transparency Proceedings Article
In: IFIP Conference on Human-Computer Interaction, pp. 195–211, Springer, Cham 2021.
@inproceedings{auda2021vrsketch,
title = {VRSketch: Investigating 2D Sketching in Virtual Reality with Different Levels of Hand and Pen Transparency},
author = {Jonas Auda and Roman Heger and Uwe Gruenefeld and Stefan Schneegass},
url = {https://link.springer.com/chapter/10.1007/978-3-030-85607-6_14},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {IFIP Conference on Human-Computer Interaction},
pages = {195--211},
organization = {Springer, Cham},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Auda, Jonas; Busse, Leon; Pfeuffer, Ken; Gruenefeld, Uwe; Rivu, Radiah; Alt, Florian; Schneegass, Stefan
I’m in Control! Transferring Object Ownership Between Remote Users with Haptic Props in Virtual Reality Proceedings Article
In: Symposium on Spatial User Interaction, pp. 1–10, 2021.
Links | BibTeX | Tags: Collaboration, Haptics, Virtual Reality
@inproceedings{auda2021imincontrol,
title = {I’m in Control! Transferring Object Ownership Between Remote Users with Haptic Props in Virtual Reality},
author = {Jonas Auda and Leon Busse and Ken Pfeuffer and Uwe Gruenefeld and Radiah Rivu and Florian Alt and Stefan Schneegass},
url = {https://dl.acm.org/doi/10.1145/3485279.3485287
https://www.youtube.com/watch?v=FznSmSAtAiY},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Symposium on Spatial User Interaction},
pages = {1--10},
keywords = {Collaboration, Haptics, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Auda, Jonas; Weigel, Martin; Cauchard, Jessica R; Schneegass, Stefan
Understanding Drone Landing on the Human Body Proceedings Article
In: Proceedings of the 23rd International Conference on Mobile Human-Computer Interaction, pp. 1–13, 2021.
@inproceedings{auda2021understanding,
title = {Understanding Drone Landing on the Human Body},
author = {Jonas Auda and Martin Weigel and Jessica R Cauchard and Stefan Schneegass},
url = {https://dl.acm.org/doi/abs/10.1145/3447526.3472031
https://www.youtube.com/watch?v=YO1zkHfC6Gk},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Proceedings of the 23rd International Conference on Mobile Human-Computer Interaction},
pages = {1--13},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Borsum, Florian; Pascher, Max; Auda, Jonas; Schneegass, Stefan; Lux, Gregor; Gerken, Jens
In: Mensch und Computer 2021, pp. 354–365, ACM, 2021.
Abstract | Links | BibTeX | Tags:
@incollection{borsum2021stay,
title = {Stay on Course in VR: Comparing the Precision of Movement between Gamepad, Armswinger, and Treadmill: Kurs halten in VR: Vergleich der Bewegungspräzision von Gamepad, Armswinger und Laufstall},
author = {Florian Borsum and Max Pascher and Jonas Auda and Stefan Schneegass and Gregor Lux and Jens Gerken},
url = {https://dl.acm.org/doi/abs/10.1145/3473856.3473880},
doi = {10.1145/3473856.3473880},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Mensch und Computer 2021},
pages = {354--365},
publisher = {ACM},
series = {Mensch und Computer 2021},
abstract = {In diesem Beitrag wird untersucht, inwieweit verschiedene Formen von Fortbewegungstechniken in Virtual Reality Umgebungen Einfluss auf die Präzision bei der Interaktion haben. Dabei wurden insgesamt drei Techniken untersucht: Zwei der Techniken integrieren dabei eine körperliche Aktivität, um einen hohen Grad an Realismus in der Bewegung zu erzeugen (Armswinger, Laufstall). Als dritte Technik wurde ein Gamepad als Baseline herangezogen. In einer Studie mit 18 Proband:innen wurde die Präzision dieser drei Fortbewegungstechniken über sechs unterschiedliche Hindernisse in einem VR-Parcours untersucht. Die Ergebnisse zeigen, dass für einzelne Hindernisse, die zum einen eine Kombination aus Vorwärts- und Seitwärtsbewegung erfordern (Slalom, Klippe) sowie auf Geschwindigkeit abzielen (Schiene), der Laufstall eine signifikant präzisere Steuerung ermöglicht als der Armswinger. Auf den gesamten Parcours gesehen ist jedoch kein Eingabegerät signifikant präziser als ein anderes. Die Benutzung des Laufstalls benötigt zudem signifikant mehr Zeit als Gamepad und Armswinger. Ebenso zeigte sich, dass das Ziel, eine reale Laufbewegung 1:1 abzubilden, auch mit einem Laufstall nach wie vor nicht erreicht wird, die Bewegung aber dennoch als intuitiv und immersiv wahrgenommen wird.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Auda, Jonas; Gruenefeld, Uwe; Schneegass, Stefan
Enabling Reusable Haptic Props for Virtual Reality by Hand Displacement Book Section
In: Mensch und Computer 2021, pp. 412–417, 2021.
Links | BibTeX | Tags: Haptics, Illusions, Virtual Reality
@incollection{auda2021enabling,
title = {Enabling Reusable Haptic Props for Virtual Reality by Hand Displacement},
author = {Jonas Auda and Uwe Gruenefeld and Stefan Schneegass},
url = {https://dl.acm.org/doi/fullHtml/10.1145/3473856.3474000},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Mensch und Computer 2021},
pages = {412--417},
keywords = {Haptics, Illusions, Virtual Reality},
pubstate = {published},
tppubtype = {incollection}
}
Liebers, Jonathan; Abdelaziz, Mark; Mecke, Lukas; Saad, Alia; Auda, Jonas; Gruenefeld, Uwe; Alt, Florian; Schneegass, Stefan
Understanding User Identification in Virtual Reality Through Behavioral Biometrics and the Effect of Body Normalization Proceedings Article
In: Proceedings of the 2021 CHI Conference on Human Factors in Computing Systems, pp. 1–11, 2021.
@inproceedings{liebers2021understanding,
title = {Understanding User Identification in Virtual Reality Through Behavioral Biometrics and the Effect of Body Normalization},
author = {Jonathan Liebers and Mark Abdelaziz and Lukas Mecke and Alia Saad and Jonas Auda and Uwe Gruenefeld and Florian Alt and Stefan Schneegass},
url = {https://dl.acm.org/doi/abs/10.1145/3411764.3445528},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Proceedings of the 2021 CHI Conference on Human Factors in Computing Systems},
pages = {1--11},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2020
Auda, Jonas; Gruenefeld, Uwe; Mayer, Sven
2020.
@workshop{auda2020takes,
title = {It Takes Two To Tango: Conflicts Between Users on the Reality-Virtuality Continuum and Their Bystanders},
author = {Jonas Auda and Uwe Gruenefeld and Sven Mayer},
url = {https://xr.famnit.upr.si/papers/XR_2020_paper_7.pdf},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
keywords = {},
pubstate = {published},
tppubtype = {workshop}
}
Agarwal, Shivam; Auda, Jonas; SchneegaĂź, Stefan; Beck, Fabian
A Design and Application Space for Visualizing User Sessions of Virtual and Mixed Reality Environments Proceedings
The Eurographics Association, 2020.
@proceedings{agarwal2020design,
title = {A Design and Application Space for Visualizing User Sessions of Virtual and Mixed Reality Environments},
author = {Shivam Agarwal and Jonas Auda and Stefan SchneegaĂź and Fabian Beck},
url = {http://diglib.eg.org/handle/10.2312/vmv20201194},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
publisher = {The Eurographics Association},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
Auda, Jonas; Weigel, Martin; Cauchard, Jessica R; Schneegass, Stefan
ProxyDrone: Autonomous Drone Landing on the Human Body. Proceedings Article
In: iHDI@CHI, 2020.
Links | BibTeX | Tags: Drones, Virtual Reality
@inproceedings{auda2020proxydrone,
title = {ProxyDrone: Autonomous Drone Landing on the Human Body.},
author = {Jonas Auda and Martin Weigel and Jessica R Cauchard and Stefan Schneegass},
url = {http://ceur-ws.org/Vol-2617/paper3.pdf},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {iHDI@CHI},
keywords = {Drones, Virtual Reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Auda, Jonas; Heger, Roman; Kosch, Thomas; Gruenefeld, Uwe; Schneegass, Stefan
EasyEG: A 3D-printable Brain-Computer Interface Proceedings Article
In: Adjunct Publication of the 33rd Annual ACM Symposium on User Interface Software and Technology, pp. 70–72, 2020.
@inproceedings{auda2020easyeg,
title = {EasyEG: A 3D-printable Brain-Computer Interface},
author = {Jonas Auda and Roman Heger and Thomas Kosch and Uwe Gruenefeld and Stefan Schneegass},
url = {https://dl.acm.org/doi/abs/10.1145/3379350.3416189},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Adjunct Publication of the 33rd Annual ACM Symposium on User Interface Software and Technology},
pages = {70--72},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Poguntke, Romina; Schneegass, Christina; der Vekens, Lucas Van; Rzayev, Rufat; Auda, Jonas; Schneegass, Stefan; Schmidt, Albrecht
NotiModes: an investigation of notification delay modes and their effects on smartphone users Proceedings Article
In: Proceedings of the Conference on Mensch und Computer, pp. 415–419, 2020.
Abstract | Links | BibTeX | Tags:
@inproceedings{poguntke2020notimodes,
title = {NotiModes: an investigation of notification delay modes and their effects on smartphone users},
author = {Romina Poguntke and Christina Schneegass and Lucas Van der Vekens and Rufat Rzayev and Jonas Auda and Stefan Schneegass and Albrecht Schmidt},
url = {https://dl.acm.org/doi/10.1145/3404983.3410006},
doi = {10.1145/3404983.3410006},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Proceedings of the Conference on Mensch und Computer},
pages = {415--419},
series = {Proceedings of the Conference on Mensch und Computer},
abstract = {Despite the extensive analysis of the consequences of interruptions caused by smartphone notifications, research on the effects on users has so far been sparse. Therefore, in this work we (1) explore concepts on preventing interruptions elicited by notification delay in a focus group; (2) implement a smartphone application manipulating the notification delay in three distinct ways varying in the degree of user-control; (3) evaluate all three concepts with 13 users in a four-week field trial. We thereby gather qualitative feedback in 52 semi-structured interviews, one per participant after each mode and an additional control week. The results show that through the intensive preoccupation with their notification management, users reflect critically about advantages and disadvantages of their continuous reachability. Based on the results from the focus group and field trial, we derive four design implications summarizing the users' experiences and suggestions on notification delay mechanisms.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2019
Pfeiffer, Max; Medrano, Samuel Navas; Auda, Jonas; Schneegass, Stefan
STOP! Enhancing Drone Gesture Interaction with Force Feedback Proceedings Article
In: 1st International Workshop on Human-Drone Interaction, 2019.
BibTeX | Tags:
@inproceedings{pfeiffer2019stop,
title = {STOP! Enhancing Drone Gesture Interaction with Force Feedback},
author = {Max Pfeiffer and Samuel Navas Medrano and Jonas Auda and Stefan Schneegass},
year = {2019},
date = {2019-01-01},
booktitle = {1st International Workshop on Human-Drone Interaction},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Auda, Jonas; Pascher, Max; Schneegass, Stefan
Around the (Virtual) World: Infinite Walking in Virtual Reality Using Electrical Muscle Stimulation Proceedings Article
In: Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems, pp. 1–8, 2019.
@inproceedings{auda2019around,
title = {Around the (Virtual) World: Infinite Walking in Virtual Reality Using Electrical Muscle Stimulation},
author = {Jonas Auda and Max Pascher and Stefan Schneegass},
url = {https://dl.acm.org/doi/abs/10.1145/3290605.3300661
https://youtu.be/c0qCP0xfb4w},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems},
pages = {1--8},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2018
Auda, Jonas; Faltaous, Sarah; Schneegass, Stefan
Control, Intervention, or Autonomy? Understanding the Future of Smart Home. Proceedings Article
In: SmartObjects@ CHI, pp. 39–43, 2018.
BibTeX | Tags:
@inproceedings{auda2018control,
title = {Control, Intervention, or Autonomy? Understanding the Future of Smart Home.},
author = {Jonas Auda and Sarah Faltaous and Stefan Schneegass},
year = {2018},
date = {2018-01-01},
booktitle = {SmartObjects@ CHI},
pages = {39--43},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Antoun, Sara; Auda, Jonas; Schneegass, Stefan
SlidAR: Towards using AR in Education Proceedings Article
In: Proceedings of the 17th International Conference on Mobile and Ubiquitous Multimedia, pp. 491–498, 2018.
Abstract | Links | BibTeX | Tags:
@inproceedings{antoun2018slidar,
title = {SlidAR: Towards using AR in Education},
author = {Sara Antoun and Jonas Auda and Stefan Schneegass},
url = {https://dl.acm.org/doi/10.1145/3282894.3289744},
doi = {10.1145/3282894.3289744},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {Proceedings of the 17th International Conference on Mobile and Ubiquitous Multimedia},
pages = {491--498},
series = {Proceedings of the 17th International Conference on Mobile and Ubiquitous Multimedia},
abstract = {Applying Augmented Reality in education is being explored by many scientists. Therefore, we augment digital slides of lectures in higher education. We implemented a web server application, which allows professors to create their own AR slides. We also developed a mobile app for students to scan the slides and view the augmentation in lectures or learning sessions. To assess the usability of our system, we conducted a study with fifteen students and two professors. Students' feedback indicated that our AR app could be integrated into education. Professors, on the other hand, reported improvement suggestions. However, both groups supported applying the system in real lectures.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Auda, Jonas; Hoppe, Matthias; Amiraslanov, Orkhan; Zhou, Bo; Knierim, Pascal; Schneegass, Stefan; Schmidt, Albrecht; Lukowicz, Paul
Lyra: smart wearable in-flight service assistant Proceedings Article
In: Proceedings of the 2018 ACM International Symposium on Wearable Computers, pp. 212–213, 2018.
@inproceedings{auda2018lyra,
title = {Lyra: smart wearable in-flight service assistant},
author = {Jonas Auda and Matthias Hoppe and Orkhan Amiraslanov and Bo Zhou and Pascal Knierim and Stefan Schneegass and Albrecht Schmidt and Paul Lukowicz},
url = {https://dl.acm.org/doi/abs/10.1145/3267242.3267282},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {Proceedings of the 2018 ACM International Symposium on Wearable Computers},
pages = {212--213},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Auda, Jonas; Weber, Dominik; Voit, Alexandra; Schneegass, Stefan
Understanding user preferences towards rule-based notification deferral Proceedings Article
In: Extended Abstracts of the 2018 CHI Conference on Human Factors in Computing Systems, pp. 1–6, 2018.
Links | BibTeX | Tags: Notifications, Ubiquitous computing
@inproceedings{auda2018understanding,
title = {Understanding user preferences towards rule-based notification deferral},
author = {Jonas Auda and Dominik Weber and Alexandra Voit and Stefan Schneegass},
url = {https://dl.acm.org/doi/abs/10.1145/3170427.3188688
https://jonasauda.de/wp-content/uploads/2022/12/auda2018understanding.pdf},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {Extended Abstracts of the 2018 CHI Conference on Human Factors in Computing Systems},
pages = {1--6},
keywords = {Notifications, Ubiquitous computing},
pubstate = {published},
tppubtype = {inproceedings}
}
Weber, Dominik; Voit, Alexandra; Auda, Jonas; Schneegass, Stefan; Henze, Niels
Snooze! investigating the user-defined deferral of mobile notifications Proceedings Article
In: Proceedings of the 20th International Conference on Human-Computer Interaction with Mobile Devices and Services, pp. 1–13, 2018.
@inproceedings{weber2018snooze,
title = {Snooze! investigating the user-defined deferral of mobile notifications},
author = {Dominik Weber and Alexandra Voit and Jonas Auda and Stefan Schneegass and Niels Henze},
url = {https://dl.acm.org/doi/abs/10.1145/3229434.3229436},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {Proceedings of the 20th International Conference on Human-Computer Interaction with Mobile Devices and Services},
pages = {1--13},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2016
Auda, Jonas
Investigation of delay opportunities of mobile notifications Masters Thesis
2016.
Abstract | Links | BibTeX | Tags:
@mastersthesis{auda2016investigation,
title = {Investigation of delay opportunities of mobile notifications},
author = {Jonas Auda},
url = {https://elib.uni-stuttgart.de/handle/11682/9787},
year = {2016},
date = {2016-01-01},
urldate = {2016-01-01},
abstract = {Notifications and interrupts are closely related. Interruptions might mitigate the performance of its receipt and therefore impede the fulfillment of tasks. Notifications can cause such interruptions. Nowadays, smartphones became ubiquitous. A certain amount of notifications is issued to their users every day. Logically, the user of a smartphone is exposed to a certain amount of interruptions. Previews work focused on identifying opportune moments for the notification delivery at which the adverse effects of an interruption is minimal. Smartphones provide various sensors. The data which these sensors acquire can be used to determine the context of the user. With contextual information more suitable moments for the notification delivery can be found. Snoozing mobile notifications by its recipient was not a popular research target. Motivated to investigate this field we developed a concept for snoozing mobile notifications. Further, we developed a second concept of rules that are capable of acting on incoming notifications automatically. We implemented both concepts in a mobile Android application. Afterward, we conducted two studies to gather data. For the first study, we published our application. We collected snooze behavior data from users all over the world. The second study we conducted was a controlled in-situ user study with 18 participants. 16 participants successfully participated. These participants used an updated version of our application. This version allowed them to create rules that can handle their incoming notifications automatically. The rules were able to either suppress incoming notifications, add them to a notification summary, or snooze them to a particular point in time. Further, we gathered qualitative statements through interviews we conducted at the end of the study. We evaluated the data of both studies and discussed the results. We found that snoozing is used for SMS and instant messaging more often that for other application categories. Further, we found that a rule-based approach can bring benefits to the rule creator.},
keywords = {},
pubstate = {published},
tppubtype = {mastersthesis}
}
2014
Alt, Florian; Schneegass, Stefan; Auda, Jonas; Rzayev, Rufat; Broy, Nora
Using eye-tracking to support interaction with layered 3D interfaces on stereoscopic displays Proceedings Article
In: Proceedings of the 19th international conference on Intelligent User Interfaces, pp. 267–272, 2014.
@inproceedings{alt2014using,
title = {Using eye-tracking to support interaction with layered 3D interfaces on stereoscopic displays},
author = {Florian Alt and Stefan Schneegass and Jonas Auda and Rufat Rzayev and Nora Broy},
url = {https://dl.acm.org/doi/abs/10.1145/2557500.2557518},
year = {2014},
date = {2014-01-01},
urldate = {2014-01-01},
booktitle = {Proceedings of the 19th international conference on Intelligent User Interfaces},
pages = {267--272},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}