Prof. Dr. Danny Schott
Publications
2025

Kunz, M; Schott, D; Wunderling, T; Halloul, M; Hansen, C; Albrecht, A; Braun-Dullaeus, R
Embryonic heart development as an immersive experience: Unveiling learning effects and influential factors in virtual learning environments Journal Article
In: Comput. Biol. Med., vol. 187, no. C, 2025, ISSN: 0010-4825.
@article{kunz_embryonic_2025,
title = {Embryonic heart development as an immersive experience: Unveiling learning effects and influential factors in virtual learning environments},
author = {M Kunz and D Schott and T Wunderling and M Halloul and C Hansen and A Albrecht and R Braun-Dullaeus},
url = {https://doi.org/10.1016/j.compbiomed.2024.109638},
doi = {10.1016/j.compbiomed.2024.109638},
issn = {0010-4825},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
journal = {Comput. Biol. Med.},
volume = {187},
number = {C},
abstract = {As the quality and availability of Virtual Reality (VR) technologies improve, their potential applications in medical education, particularly VR Learning Environments (VRLEs), are increasingly explored. VRLEs offer a dynamic platform where educators and students can interact, access materials, and engage in learning beyond traditional classrooms. However, questions remain about their long-term learning effects and potential confounding factors. This study investigates these aspects through a VR application designed for teaching heart embryology. For this reason we conducted a user study with medical students in their early years of training (N = 143). Our findings reveal significant short-term and sustained learning outcomes two to four weeks following a single VR session. Importantly, these outcomes appear largely independent of users’ technical affinity and are minimally influenced by their immersion tendencies. Instead, the quality of the VRLE and its user experience emerge as critical factors. These results underscore the efficacy of well-designed VRLEs in higher education and highlight key areas for future development.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schreiter, J; Heinrich, F; Hatscher, B; Schott, D; Hansen, C
Multimodal human–computer interaction in interventional radiology and surgery: a systematic literature review Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 20, no. 4, pp. 807–816, 2025, ISSN: 1861-6429.
@article{schreiter_multimodal_2025,
title = {Multimodal human–computer interaction in interventional radiology and surgery: a systematic literature review},
author = {J Schreiter and F Heinrich and B Hatscher and D Schott and C Hansen},
url = {https://doi.org/10.1007/s11548-024-03263-3},
doi = {10.1007/s11548-024-03263-3},
issn = {1861-6429},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {20},
number = {4},
pages = {807–816},
abstract = {As technology advances, more research dedicated to medical interactive systems emphasizes the integration of touchless and multimodal interaction (MMI). Particularly in surgical and interventional settings, this approach is advantageous because it maintains sterility and promotes a natural interaction. Past reviews have focused on investigating MMI in terms of technology and interaction with robots. However, none has put particular emphasis on analyzing these kind of interactions for surgical and interventional scenarios.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Mielke, T; Allgaier, M; Schott, D; Hansen, C; Heinrich, F
Virtual Studies, Real Results? Assessing the Impact of Virtualization on Human-Robot Interaction Proceedings Article
In: Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, ACM, Yokohama Japan, 2025, ISBN: 979-8-4007-1395-8.
@inproceedings{mielke_virtual_2025,
title = {Virtual Studies, Real Results? Assessing the Impact of Virtualization on Human-Robot Interaction},
author = {T Mielke and M Allgaier and D Schott and C Hansen and F Heinrich},
url = {https://dl.acm.org/doi/10.1145/3706599.3719724},
doi = {10.1145/3706599.3719724},
isbn = {979-8-4007-1395-8},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
booktitle = {Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {ACM},
address = {Yokohama Japan},
abstract = {Extended Reality (XR) shows potential for human-centered evaluation of real-world scenarios and could improve efficiency and safety in robotic research. However, the validity of XR Human-Robot Interaction (HRI) studies remains underexplored. This paper investigates the transferability of HRI studies across virtualization levels for three tasks. Our results indicate XR study validity is task-specific, with task virtualization as a key influencing factor. Partially virtualized settings with virtual tasks and a real robot, as well as fully virtualized setups with a simulated robot, yielded results comparable to real setups for pick-and-place and robotic ultrasound. However, for precision-dependent peg-in-hole, differences were observed between real and virtualized conditions regarding completion time, perceived workload, and ease. Demonstrating the task dependency of XR transferability and comparing virtualization levels, our work takes an important step in assessing XR study validity. Future work should isolate factors affecting transferability and assess relative validity in the absence of absolute validity.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Heinrich, F; Schott, D; Schwenderling, L; Hansen, C
Do You See What I See? Evaluating Relative Depth Judgments Between Real and Virtual Projections Proceedings Article
In: Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, Association for Computing Machinery, New York, NY, USA, 2025, ISBN: 979-8-4007-1395-8.
@inproceedings{heinrich_you_2025,
title = {Do You See What I See? Evaluating Relative Depth Judgments Between Real and Virtual Projections},
author = {F Heinrich and D Schott and L Schwenderling and C Hansen},
url = {https://doi.org/10.1145/3706599.3720157},
doi = {10.1145/3706599.3720157},
isbn = {979-8-4007-1395-8},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
booktitle = {Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI EA '25},
abstract = {Projector-based augmented reality (AR) is promising in different domains with less issues in discomfort or shortage of space. However, due to limitations like high costs and cumbersome calibration, this AR modality remains underused. To address this problem, a stereoscopic projector-based AR simulation was implemented for a cost-effective video see-through AR headset. To evaluate the validity of this simulation, a relative depth judgment experiment was conducted to compare this method with a physical projection system. Consistent results suggest that a known interaction effect between visualization and disparity mode could be successfully reproduced using both the physical projection and the virtual simulation. In addition, first findings indicate that there are no significant differences between these projection modalities. The results indicate that other perception-related effects observed for projector-based AR may also be applicable to virtual projection simulations and that future findings determined using only these simulations may also be applicable to real projections.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schott, D; Kunz, M; Albrecht, A; Braun-Dullaeus, R; Hansen, C
Too Heart to Handle? Exploring Self-Directed And Collaborative Virtual Learning Environments in Anatomy Education. Journal Article
In: EuroVis 2025 - Dirk Bartz Prize, 2025, (Artwork Size: 5 pages ISBN: 9783038682813 Publisher: The Eurographics Association).
@article{schott_too_2025,
title = {Too Heart to Handle? Exploring Self-Directed And Collaborative Virtual Learning Environments in Anatomy Education.},
author = {D Schott and M Kunz and A Albrecht and R Braun-Dullaeus and C Hansen},
url = {https://diglib.eg.org/handle/10.2312/evm20251973},
doi = {10.2312/EVM.20251973},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {EuroVis 2025 - Dirk Bartz Prize},
abstract = {The integration of Extended Reality (XR) into medical education represents a transformative shift, particularly in anatomy training, where immersive simulations enhance cognitive engagement and knowledge retention. The developing heart is characterized by rapid morphological changes within a short time frame, which poses a significant pedagogical challenge. Conventional 2D imaging and static models often fail to convey these processes, limiting learners' ability to conceptualize critical spatial relationships-a barrier in understanding congenital anomalies. To address these limitations, this work leverages XRdriven visualization and interaction paradigms to create virtual learning environments. Based on this, we propose methods for designing XR educational modules that adapt to both collaborative and self-directed learning contexts, using embryonic cardiogenesis as an illustrating case study. We present findings from mixed-methods user studies involving a total of 264 students, along with feedback from lecturers, highlighting the importance of an iterative, user-centered design approach.},
note = {Artwork Size: 5 pages
ISBN: 9783038682813
Publisher: The Eurographics Association},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2024

Chheang, V; Schott, D; Saalfeld, P; Vradelis, L; Huber, T; Huettl, F; Lang, H; Preim, B; Hansen, C
Advanced liver surgery training in collaborative VR environments Journal Article
In: Computers & Graphics, vol. 119, pp. 103879, 2024, ISSN: 0097-8493.
@article{chheang_advanced_2024,
title = {Advanced liver surgery training in collaborative VR environments},
author = {V Chheang and D Schott and P Saalfeld and L Vradelis and T Huber and F Huettl and H Lang and B Preim and C Hansen},
url = {https://www.sciencedirect.com/science/article/pii/S0097849324000050},
doi = {10.1016/j.cag.2024.01.006},
issn = {0097-8493},
year = {2024},
date = {2024-04-01},
urldate = {2024-04-01},
journal = {Computers & Graphics},
volume = {119},
pages = {103879},
abstract = {Virtual surgical training systems are crucial for enabling mental preparation, supporting decision-making, and improving surgical skills. Many virtual surgical training environments focus only on training for a specific medical skill and take place in a single virtual room. However, surgical education and training include the planning of procedures as well as interventions in the operating room context. Moreover, collaboration among surgeons and other medical professionals is only applicable to a limited extent. This work presents a collaborative VR environment similar to a virtual teaching hospital to support surgical training and interprofessional collaboration in a co-located or remote environment. The environment supports photo-realistic avatars and scenarios ranging from planning to training procedures in the virtual operating room. It includes a lobby, a virtual surgical planning room with four surgical planning stations, laparoscopic liver surgery training with the integration of laparoscopic surgical instruments, and medical training scenarios for interprofessional team training in a virtual operating room. Each component was evaluated by domain experts as well as in a series of user studies, providing insights on usability, usefulness, and potential research directions. The proposed environment may serve as a foundation for future medical training simulators.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schott, D; Kunz, M; Mandel, J; Schwenderling, L; Braun-Dullaeus, R; Hansen, C
An AR-Based Multi-User Learning Environment for Anatomy Seminars Proceedings Article
In: 2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 949–950, IEEE, Orlando, FL, USA, 2024, ISBN: 979-8-3503-7449-0.
@inproceedings{schott_ar-based_2024,
title = {An AR-Based Multi-User Learning Environment for Anatomy Seminars},
author = {D Schott and M Kunz and J Mandel and L Schwenderling and R Braun-Dullaeus and C Hansen},
url = {https://ieeexplore.ieee.org/document/10536568/},
doi = {10.1109/VRW62533.2024.00271},
isbn = {979-8-3503-7449-0},
year = {2024},
date = {2024-03-01},
urldate = {2024-03-01},
booktitle = {2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {949–950},
publisher = {IEEE},
address = {Orlando, FL, USA},
abstract = {Understanding the intricate and rapid changes in shape during em-bryonic formation is vital for medical students. Using the example of embryonic human heart development, we introduce an AR-based multi-user approach to enhance understanding and foster a participatory learning environment. Through a user-centered approach, we created a prototype accommodating two player roles and enabling multi-modal inputs to encourage dynamic group discussions. We in-vited four anatomy experts to evaluate three system configurations in an interdisciplinary workshop to assess the feasibility of integration into anatomy seminars. The gathered data and feedback indicate the potential of our collaborative concept for integration into the medical curriculum.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schott, D; Heinrich, F; Kunz, M; Mandel, J; Albrecht, A; Braun-Dullaeus, R; Hansen, C
CardioCoLab: Collaborative Learning of Embryonic Heart Anatomy in Mixed Reality Journal Article
In: Eurographics Workshop on Visual Computing for Biology and Medicine, 2024, (Artwork Size: 5 pages Edition: 1191 ISBN: 9783038682448 Publisher: The Eurographics Association).
@article{schott_cardiocolab_2024,
title = {CardioCoLab: Collaborative Learning of Embryonic Heart Anatomy in Mixed Reality},
author = {D Schott and F Heinrich and M Kunz and J Mandel and A Albrecht and R Braun-Dullaeus and C Hansen},
url = {https://diglib.eg.org/handle/10.2312/vcbm20241191},
doi = {10.2312/VCBM.20241191},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {Eurographics Workshop on Visual Computing for Biology and Medicine},
abstract = {The complexity of embryonic heart development presents significant challenges for medical education, particularly in illustrating dynamic morphological changes over short time periods. Traditional teaching methods, such as 2D textbook illustrations and static models, are often insufficient for conveying these intricate processes. To address this gap, we developed a multi-user Mixed Reality (MR) system designed to enhance collaborative learning and interaction with virtual heart models. Building on previous research, we identified the needs of both students and teachers, implementing various interaction and visualization features iteratively. An evaluation with teachers and students (N = 12) demonstrated the system's effectiveness in improving engagement and understanding of embryonic heart development. The study highlights the potential of MR in medical seminar settings as a valuable addition to medical education by enhancing traditional learning methods.},
note = {Artwork Size: 5 pages
Edition: 1191
ISBN: 9783038682448
Publisher: The Eurographics Association},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Wagnerberger, D; Schott, D; Schwenderling, L; Hansen, C; Schumacher, D
Empowering Patients: Improve Gender-Sensitive Medical Knowledge Through Interactive Edutainment Proceedings Article
In: Proceedings of the 13th Nordic Conference on Human-Computer Interaction, pp. 1–12, Association for Computing Machinery, New York, NY, USA, 2024, ISBN: 979-8-4007-0966-1.
@inproceedings{wagnerberger_empowering_2024,
title = {Empowering Patients: Improve Gender-Sensitive Medical Knowledge Through Interactive Edutainment},
author = {D Wagnerberger and D Schott and L Schwenderling and C Hansen and D Schumacher},
url = {https://dl.acm.org/doi/10.1145/3679318.3685500},
doi = {10.1145/3679318.3685500},
isbn = {979-8-4007-0966-1},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {Proceedings of the 13th Nordic Conference on Human-Computer Interaction},
pages = {1–12},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {NordiCHI '24},
abstract = {Disregarding crucial gender-specific differences and potential risks in medicine leads to widespread gender inequalities. This paper introduces interactive edutainment concepts developed through a user-centered design approach to raise awareness of gender medicine. An interactive exhibition course and an accompanying deck of cards provide an engaging and sensitizing experience of medical gender inequalities. Qualitative feedback, self-assessment, and user experience and behavior were evaluated during a public display of the concepts (n=14). The results highlight the potential of our playful approach to raising awareness among the public as well as health-related professionals, paving new ways for communication and empowerment of patients of all genders. We believe these insights have broader applicability across various domains, supporting efforts to address all forms of inequality.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schott, D; Kunz, M; Heinrich, F; Mandel, J; Albrecht, A; Braun-Dullaeus, R; Hansen, C
Stand Alone or Stay Together: An In-situ Experiment of Mixed-Reality Applications in Embryonic Anatomy Education Proceedings Article
In: Proceedings of the 30th ACM Symposium on Virtual Reality Software and Technology, pp. 1–11, Association for Computing Machinery, New York, NY, USA, 2024, ISBN: 979-8-4007-0535-9.
@inproceedings{schott_stand_2024,
title = {Stand Alone or Stay Together: An In-situ Experiment of Mixed-Reality Applications in Embryonic Anatomy Education},
author = {D Schott and M Kunz and F Heinrich and J Mandel and A Albrecht and R Braun-Dullaeus and C Hansen},
url = {https://dl.acm.org/doi/10.1145/3641825.3687706},
doi = {10.1145/3641825.3687706},
isbn = {979-8-4007-0535-9},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {Proceedings of the 30th ACM Symposium on Virtual Reality Software and Technology},
pages = {1–11},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {VRST '24},
abstract = {Where traditional media and methods reach their limits in anatomy education, mixed-reality (MR) environments can provide effective learning support because of their high interactivity and spatial visualization capabilities. However, the underlying design and pedagogical requirements are as diverse as the technologies themselves. This paper examines the effectiveness of individual- and collaborative learning environments for anatomy education, using embryonic heart development as an example. Both applications deliver the same content using identical visualizations and hardware but differ in interactivity and pedagogical approach. The environments were evaluated in a user study with medical students (n = 90) during their examination phase, assessing usability, user experience, social interaction/co-presence, cognitive load, and personal preference. Additionally, we conducted a knowledge test before and after an MR learning session to determine educational effects compared to a conventional anatomy seminar. Results indicate that the individual learning environment was generally preferred. However, no significant difference in learning effectiveness could be shown between the conventional approach and the MR applications. This suggests that both can effectively complement traditional seminars despite their different natures. Our study contributes to understanding how different MR settings could be tailored for anatomical education.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2023

Mielke, T; Joeres, F; Schott, D; Hansen, C
Interactive Registration Methods for Augmented Reality in Robotics: A Comparative Evaluation Proceedings Article
In: 2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct), pp. 501–506, IEEE, Sydney, Australia, 2023, ISBN: 979-8-3503-2891-2.
@inproceedings{mielke_interactive_2023,
title = {Interactive Registration Methods for Augmented Reality in Robotics: A Comparative Evaluation},
author = {T Mielke and F Joeres and D Schott and C Hansen},
url = {https://ieeexplore.ieee.org/document/10322246/},
doi = {10.1109/ISMAR-Adjunct60411.2023.00109},
isbn = {979-8-3503-2891-2},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)},
pages = {501–506},
publisher = {IEEE},
address = {Sydney, Australia},
abstract = {Augmented Reality (AR) visualization has shown potential for supporting intuitive and efficient human-robot interaction in a range of tasks. Since all these tasks are spatially related to the robot, the precise positioning of the AR content is critical to the applicability. However, most research has primarily focused on developing visualizations rather than exploring methods for aligning AR content in the robotic workspace. This paper aims to bridge this gap by implementing and comparing different interactive registration methods, including two point-based and one manual approach. We comparatively evaluated these registration methods in a user study (n=21), measuring registration accuracy, duration, and subjective user feedback. Our results indicate that the point-based methods outperform the manual approach in terms of both accuracy and perceived workload. Furthermore, participants achieved significantly faster performance with a point-based approach using physically defined registration points compared to a point-based approach using markers attached to the robot.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schott, D; Heinrich, F; Stallmeister, L; Moritz, J; Hensen, B; Hansen, C
Is this the vReal Life? Manipulating Visual Fidelity of Immersive Environments for Medical Task Simulation Proceedings Article
In: 2023 IEEE International Symposium on Mixed and Augmented Reality (ISMAR), pp. 1171–1180, IEEE, Sydney, Australia, 2023, ISBN: 979-8-3503-2838-7.
@inproceedings{schott_is_2023,
title = {Is this the vReal Life? Manipulating Visual Fidelity of Immersive Environments for Medical Task Simulation},
author = {D Schott and F Heinrich and L Stallmeister and J Moritz and B Hensen and C Hansen},
url = {https://ieeexplore.ieee.org/document/10316533/},
doi = {10.1109/ISMAR59233.2023.00134},
isbn = {979-8-3503-2838-7},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},
pages = {1171–1180},
publisher = {IEEE},
address = {Sydney, Australia},
abstract = {Recent developments and research advances contribute to an ever-increasing trend towards quality levels close to what we experience in reality. In this work, we investigate how different degrees of these quality characteristics affect user performance, qualia of user experience (UX), and sense of presence in an example medical task. To this end, a two-way within-subjects design user study was conducted, in which three different levels of visual fidelity were compared. In addition, two different interaction modalities were considered: (1) the use of conventional VR controllers and (2) natural hand interaction using 3D-printed, spatially-registered replicas of medical devices, to interact with their virtual representations. Consistent results indicate that higher degrees of visual fidelity evoke a higher sense of presence and UX. However, user performance was less affected. Moreover, no differences were detected between both interaction modalities for the examined task. Future work should investigate the discovered interaction effects between quality levels and interaction modalities in more detail and examine whether these results can be reproduced in tasks that require more precision. This work provides insights into the implications to consider when studying interactions in VR and paves the way for investigations into early phases of medical product development and workflow analysis.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schott, D; Moritz, J; Hansen, C; Joeres, F
The UUXR-Framework: A Draft Classification for Using Extended Reality in Usability and User Experience Research Proceedings Article
In: 2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct), pp. 460–465, IEEE, Sydney, Australia, 2023, ISBN: 979-8-3503-2891-2.
@inproceedings{schott_uuxr-framework_2023,
title = {The UUXR-Framework: A Draft Classification for Using Extended Reality in Usability and User Experience Research},
author = {D Schott and J Moritz and C Hansen and F Joeres},
url = {https://ieeexplore.ieee.org/document/10322234/},
doi = {10.1109/ISMAR-Adjunct60411.2023.00100},
isbn = {979-8-3503-2891-2},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)},
pages = {460–465},
publisher = {IEEE},
address = {Sydney, Australia},
abstract = {Conducting human-centered evaluations in extended reality (XR) environments is a growing trend in user research and usability engineering. However, there has been little to no systematic investigation of the emerging methods in this field published to date. The motivation behind our work is to explore and classify strategies and methods for utilizing XR technologies in the context of usability and user experience (UUX) activities. This paper proposes a draft classification framework for the use of XR technologies in UUX activities, combining an informal exploration of relevant literature with established UUX methods. Within this framework, we propose 12 dimensions that we consider potentially relevant for determining whether and how the use of XR technologies can benefit product development and user research. To evaluate the structure and phrasing of our proposed dimensions, we conducted an initial evaluation with UUX professionals (N = 11). We believe that our dimensions form an early-stage foundation for future guidelines aimed at UUX researchers. The framework serves as a tool for assessing different levels of virtualization in UUX work and facilitating knowledge transfer between academia and industry.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schott, D; Kunz, M; Wunderling, T; Heinrich, F; Braun-Dullaeus, R; Hansen, C
CardioGenesis4D: Interactive Morphological Transitions of Embryonic Heart Development in a Virtual Learning Environment Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 29, no. 5, pp. 2615–2625, 2023, ISSN: 1941-0506.
@article{schott_cardiogenesis4d_2023,
title = {CardioGenesis4D: Interactive Morphological Transitions of Embryonic Heart Development in a Virtual Learning Environment},
author = {D Schott and M Kunz and T Wunderling and F Heinrich and R Braun-Dullaeus and C Hansen},
url = {https://ieeexplore.ieee.org/document/10049681},
doi = {10.1109/TVCG.2023.3247110},
issn = {1941-0506},
year = {2023},
date = {2023-05-01},
urldate = {2023-05-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {29},
number = {5},
pages = {2615–2625},
abstract = {In the embryonic human heart, complex dynamic shape changes take place in a short period of time on a microscopic scale, making this development difficult to visualize. However, spatial understanding of these processes is essential for students and future cardiologists to properly diagnose and treat congenital heart defects. Following a user centered approach, the most crucial embryological stages were identified and translated into a virtual reality learning environment (VRLE) to enable the understanding of the morphological transitions of these stages through advanced interactions. To address individual learning types, we implemented different features and evaluated the application regarding usability, perceived task load, and sense of presence in a user study. We also assessed spatial awareness and knowledge gain, and finally obtained feedback from domain experts. Overall, students and professionals rated the application positively. To minimize distraction from interactive learning content, such VRLEs should consider features for different learning types, allow for gradual habituation, and at the same time provide enough playful stimuli. Our work previews how VR can be integrated into a cardiac embryology education curriculum.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2022

Schreiter, J; Mielke, T; Schott, D; Thormann, M; Omari, J; Pech, M; Hansen, C
A multimodal user interface for touchless control of robotic ultrasound Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 18, no. 8, pp. 1429–1436, 2022, ISSN: 1861-6429.
@article{schreiter_multimodal_2022,
title = {A multimodal user interface for touchless control of robotic ultrasound},
author = {J Schreiter and T Mielke and D Schott and M Thormann and J Omari and M Pech and C Hansen},
url = {https://link.springer.com/10.1007/s11548-022-02810-0},
doi = {10.1007/s11548-022-02810-0},
issn = {1861-6429},
year = {2022},
date = {2022-12-01},
urldate = {2022-12-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {18},
number = {8},
pages = {1429–1436},
abstract = {Purpose Past research contained the investigation and development of robotic ultrasound. In this context, interfaces which allow for interaction with the robotic system are of paramount importance. Few researchers have addressed the issue of developing non-tactile interaction approaches, although they could be beneficial for maintaining sterility during medical procedures. Interaction could be supported by multimodality, which has the potential to enable intuitive and natural interaction. To assess the feasibility of multimodal interaction for non-tactile control of a co-located robotic ultrasound system, a novel human–robot interaction concept was developed.
Methods The medical use case of needle-based interventions under hybrid computed tomography and ultrasound imaging was analyzed by interviewing four radiologists. From the resulting workflow, interaction tasks were derived which include human–robot interaction. Based on this, characteristics of a multimodal, touchless human–robot interface were elaborated, suitable interaction modalities were identified, and a corresponding interface was developed, which was thereafter evaluated in a user study with eight participants.
Results The implemented interface includes voice commands, combined with hand gesture control for discrete control and navigation interaction of the robotic US probe, respectively. The interaction concept was evaluated by the users in the form of a quantitative questionnaire with a average usability. Qualitative analysis of interview results revealed user satisfaction with the implemented interaction methods and potential improvements to the system.
Conclusion A multimodal, touchless interaction concept for a robotic US for the use case of needle-based procedures in interventional radiology was developed, incorporating combined voice and hand gesture control. Future steps will include the integration of a solution for the missing haptic feedback and the evaluation of its clinical suitability.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Methods The medical use case of needle-based interventions under hybrid computed tomography and ultrasound imaging was analyzed by interviewing four radiologists. From the resulting workflow, interaction tasks were derived which include human–robot interaction. Based on this, characteristics of a multimodal, touchless human–robot interface were elaborated, suitable interaction modalities were identified, and a corresponding interface was developed, which was thereafter evaluated in a user study with eight participants.
Results The implemented interface includes voice commands, combined with hand gesture control for discrete control and navigation interaction of the robotic US probe, respectively. The interaction concept was evaluated by the users in the form of a quantitative questionnaire with a average usability. Qualitative analysis of interview results revealed user satisfaction with the implemented interaction methods and potential improvements to the system.
Conclusion A multimodal, touchless interaction concept for a robotic US for the use case of needle-based procedures in interventional radiology was developed, incorporating combined voice and hand gesture control. Future steps will include the integration of a solution for the missing haptic feedback and the evaluation of its clinical suitability.

Schreiter, J; Schott, D; Schwenderling, L; Hansen, C; Heinrich, F; Joeres, F
AR-Supported Supervision of Conditional Autonomous Robots: Considerations for Pedicle Screw Placement in the Future Journal Article
In: Journal of Imaging, vol. 8, no. 10, pp. 255, 2022, ISSN: 2313-433X, (Publisher: Multidisciplinary Digital Publishing Institute).
@article{schreiter_ar-supported_2022,
title = {AR-Supported Supervision of Conditional Autonomous Robots: Considerations for Pedicle Screw Placement in the Future},
author = {J Schreiter and D Schott and L Schwenderling and C Hansen and F Heinrich and F Joeres},
url = {https://www.mdpi.com/2313-433X/8/10/255},
doi = {10.3390/jimaging8100255},
issn = {2313-433X},
year = {2022},
date = {2022-10-01},
urldate = {2022-10-01},
journal = {Journal of Imaging},
volume = {8},
number = {10},
pages = {255},
abstract = {Robotic assistance is applied in orthopedic interventions for pedicle screw placement (PSP). While current robots do not act autonomously, they are expected to have higher autonomy under surgeon supervision in the mid-term. Augmented reality (AR) is promising to support this supervision and to enable human–robot interaction (HRI). To outline a futuristic scenario for robotic PSP, the current workflow was analyzed through literature review and expert discussion. Based on this, a hypothetical workflow of the intervention was developed, which additionally contains the analysis of the necessary information exchange between human and robot. A video see-through AR prototype was designed and implemented. A robotic arm with an orthopedic drill mock-up simulated the robotic assistance. The AR prototype included a user interface to enable HRI. The interface provides data to facilitate understanding of the robot’s ”intentions”, e.g., patient-specific CT images, the current workflow phase, or the next planned robot motion. Two-dimensional and three-dimensional visualization illustrated patient-specific medical data and the drilling process. The findings of this work contribute a valuable approach in terms of addressing future clinical needs and highlighting the importance of AR support for HRI.},
note = {Publisher: Multidisciplinary Digital Publishing Institute},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schott, D; Heinrich, F; Stallmeister, L; Hansen, C
Exploring object and multi-target instrument tracking for AR-guided interventions Journal Article
In: Current Directions in Biomedical Engineering, vol. 8, no. 1, pp. 74–77, 2022, ISSN: 2364-5504, (Publisher: De Gruyter).
@article{schott_exploring_2022,
title = {Exploring object and multi-target instrument tracking for AR-guided interventions},
author = {D Schott and F Heinrich and L Stallmeister and C Hansen},
url = {https://www.degruyterbrill.com/document/doi/10.1515/cdbme-2022-0019/html},
doi = {10.1515/cdbme-2022-0019},
issn = {2364-5504},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
journal = {Current Directions in Biomedical Engineering},
volume = {8},
number = {1},
pages = {74–77},
abstract = {The rapid development of available hard- and software for computer-assisted or augmented reality (AR) guided interventions creates a need for fast and inexpensive prototyping environments. However, intraoperative tracking systems in particular represent a high cost threshold. Therefore, this work presents a low-cost tracking method based on a conventional RGB camera. Here, a combined approach of multiple image targets and 3D object target recognition is implemented. The system is evaluated with a systematic accuracy assessment analyzing a total of 385 3D positions. On average, a deviation of 15,69+-9,95 mm was measured. In addition, a prototypical AR-based needle navigation visualization was developed using Microsoft HoloLens 2. This system’s feasibility and usability was evaluated positively in a pilot study (n=3).},
note = {Publisher: De Gruyter},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schott, D; Heinrich, F; Labsch, D; Hensen, B; Hansen, C
Towards multimodal interaction for needlebased procedures in a virtual radiology suite Journal Article
In: Current Directions in Biomedical Engineering, vol. 8, no. 1, pp. 70–73, 2022, ISSN: 2364-5504, (Publisher: De Gruyter).
@article{schott_towards_2022,
title = {Towards multimodal interaction for needlebased procedures in a virtual radiology suite},
author = {D Schott and F Heinrich and D Labsch and B Hensen and C Hansen},
url = {https://www.degruyterbrill.com/document/doi/10.1515/cdbme-2022-0018/html},
doi = {10.1515/cdbme-2022-0018},
issn = {2364-5504},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
journal = {Current Directions in Biomedical Engineering},
volume = {8},
number = {1},
pages = {70–73},
abstract = {Touchless interaction is popular in the medical domain because it maintains sterility and ensures physicians’ autonomy. Evaluating these technologies, however, proves difficult due to technical and human hurdles. Virtual reality leaves these limitations behind and allows for the exploration of promising concepts by simulating an environment and the interactions that takes place within it.We present a virtual radiology suite in the context of needle-based MR-interventions to evaluate touchless interactions. Hand and foot inputs were implemented on a custom interface and evaluated in a user study (n= 16). Results show that activating the system and manipulating values was faster with foot input. However, multimodal interaction is preferable because it is less demanding.},
note = {Publisher: De Gruyter},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Chheang, V; Schott, D; Saalfeld, P; Vradelis, L; Huber, T; Huettl, F; Lang, H; Preim, B; Hansen, C
Towards Virtual Teaching Hospitals for Advanced Surgical Training Proceedings Article
In: 2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 410–414, 2022.
@inproceedings{chheang_towards_2022,
title = {Towards Virtual Teaching Hospitals for Advanced Surgical Training},
author = {V Chheang and D Schott and P Saalfeld and L Vradelis and T Huber and F Huettl and H Lang and B Preim and C Hansen},
url = {https://ieeexplore.ieee.org/document/9757460},
doi = {10.1109/VRW55335.2022.00089},
year = {2022},
date = {2022-03-01},
urldate = {2022-03-01},
booktitle = {2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {410–414},
abstract = {Existing virtual training environments in medicine usually focus only on the training of a specific medical skill and are conducted in a single virtual room. However, medical challenges are often in the context of the planning procedure and operating room intervention. Therefore, it is crucial to provide a training environment that tackles these issues. In this paper, we present a prototype of surgical department within a virtual teaching hospital. It supports multiple users, photo-realistic avatars, and training scenarios ranging from planning procedures to laparoscopic surgery in the virtual operating room. Medical data for each virtual patient is synchronized across the different rooms, allowing users to view, analyze and train on key decision points from diagnosis to surgical incision for each patient. The environment was evaluated in a pilot study with five surgical residents and one medical student. The experts assessed the environment as an essential tool to evaluate and improve surgical planning as well as the training during critical situations of the surgical procedures. We conclude that the development of the virtual teaching hospital components described here has the potential to be a basis for future generations of medical training simulators.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2021

Joeres, F; Heinrich, F; Schott, D; Hansen, C
Towards natural 3D interaction for laparoscopic augmented reality registration Journal Article
In: Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization, vol. 9, no. 4, pp. 384–391, 2021, ISSN: 2168-1163, (Publisher: Taylor & Francis _eprint: https://doi.org/10.1080/21681163.2020.1834877).
@article{joeres_towards_2021,
title = {Towards natural 3D interaction for laparoscopic augmented reality registration},
author = {F Joeres and F Heinrich and D Schott and C Hansen},
url = {https://doi.org/10.1080/21681163.2020.1834877},
doi = {10.1080/21681163.2020.1834877},
issn = {2168-1163},
year = {2021},
date = {2021-07-01},
urldate = {2021-07-01},
journal = {Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization},
volume = {9},
number = {4},
pages = {384–391},
abstract = {Augmented reality (AR) is a widely researched route for navigation support in laparoscopic surgery. Accurate registration is a crucial component for such AR systems. We introduce two methods for interactive registration that aim to be minimally invasive to the workflow and to mimic natural manipulation of 3D objects. The methods utilise spatially tracked laparoscopic tools to manipulate the virtual 3D content. We comparatively evaluated the methods against a reference, landmark-based registration method in a user study with 12 participants. We tested the methods for registration accuracy, time, and subjective usability perception. Our methods did not outperform the reference method on these parameters but showed promising results. The results indicate that our methods present no finalised solutions but that one of them is a promising approach for which we identified concrete improvement measures to be implemented in future research.},
note = {Publisher: Taylor & Francis
_eprint: https://doi.org/10.1080/21681163.2020.1834877},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schott, D; Saalfeld, P; Schmidt, G; Joeres, F; Boedecker, C; Huettl, F; Lang, H; Huber, T; Preim, B; Hansen, C
A VR/AR Environment for Multi-User Liver Anatomy Education Proceedings Article
In: 2021 IEEE Virtual Reality and 3D User Interfaces (VR), pp. 296–305, IEEE, Lisboa, Portugal, 2021, ISBN: 978-1-6654-1838-6.
@inproceedings{schott_vrar_2021,
title = {A VR/AR Environment for Multi-User Liver Anatomy Education},
author = {D Schott and P Saalfeld and G Schmidt and F Joeres and C Boedecker and F Huettl and H Lang and T Huber and B Preim and C Hansen},
url = {https://ieeexplore.ieee.org/document/9417662/},
doi = {10.1109/VR50410.2021.00052},
isbn = {978-1-6654-1838-6},
year = {2021},
date = {2021-03-01},
urldate = {2021-03-01},
booktitle = {2021 IEEE Virtual Reality and 3D User Interfaces (VR)},
pages = {296–305},
publisher = {IEEE},
address = {Lisboa, Portugal},
abstract = {We present a Virtual and Augmented Reality multi-user prototype of a learning environment for liver anatomy education. Our system supports various training scenarios ranging from small learning groups to classroom-size education, where students and teachers can participate in virtual reality, augmented reality, or via desktop PCs. In an iterative development process with surgeons and teachers, a virtual organ library was created. Nineteen liver data sets were used comprising 3D surface models, 2D image data, pathology information, diagnosis and treatment decisions. These data sets can interactively be sorted and investigated individually regarding their volumetric and meta information. The three participation modes were evaluated within a user study with surgery lecturers (5) and medical students (5). We assessed the usability and presence using questionnaires. Additionally, we collected qualitative data with semistructured interviews. A total of 435 individual statements were recorded and summarized to 49 statements. The results show that our prototype is usable, induces presence, and potentially support the teaching of liver anatomy and surgery in the future.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2020

Schott, D; Hatscher, B; Joeres, F; Gabele, M; Hußlein, S; Hansen, C
Lean-Interaction: passive image manipulation in concurrent multitasking Journal Article
In: 2020.
@article{schott_lean-interaction_2020,
title = {Lean-Interaction: passive image manipulation in concurrent multitasking},
author = {D Schott and B Hatscher and F Joeres and M Gabele and S Hußlein and C Hansen},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
abstract = {Complex bi-manual tasks often benefit from supporting visual information and guidance. Controlling the system that provides this information is a secondary task that forces the user to perform concurrent multitasking, which in turn may affect the main task performance. Interactions based on natural behavior are a promising solution to this challenge. We investigated the performance of these interactions in a hands-free image manipulation task during a primary manual task with an upright stance. Essential tasks were extracted from the example of clinical workflow and turned into an abstract simulation to gain general insights into how different interaction techniques impact the user’s performance and workload. The interaction techniques we compared were full-body movements, facial expression, gesture and speech input. We found that leaning as an interaction technique facilitates significantly faster image manipulation at lower subjective workloads than facial expression. Our results pave the way towards efficient, natural, hands-free interaction in a challenging multitasking environment.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2019

Schott, D
2019.
@phdthesis{schott_entwicklung_2019,
title = {Entwicklung von multimodalen Interaktionstechniken zur handfreien Selektion und Manipulation von medizinischen Bilddaten},
author = {D Schott},
url = {https://www.var.ovgu.de/pub/2019_Masterthesis_Schott_geschwaerzt.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}