Dr. Mareen Allgaier
Publications
2025

Mielke, T; Allgaier, M; Hansen, C; Heinrich, F
Extended Reality Check: Evaluating XR Prototyping for Human-Robot Interaction in Contact-Intensive Tasks Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 31, iss. 11, pp. 10035 - 10044, 2025.
@article{mielke_extended_2025,
title = {Extended Reality Check: Evaluating XR Prototyping for Human-Robot Interaction in Contact-Intensive Tasks},
author = {T Mielke and M Allgaier and C Hansen and F Heinrich},
doi = {10.1109/TVCG.2025.3616753},
year = {2025},
date = {2025-10-02},
urldate = {2025-10-02},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {31},
issue = {11},
pages = {10035 - 10044},
abstract = {Extended Reality (XR) has the potential to improve efficiency and safety in the user-centered development of human-robot interaction. However, the validity of using XR prototyping for user studies for contact-intensive robotic tasks remains underexplored. These in-contact tasks are particularly relevant due to challenges arising from indirect force perception in robot control. Therefore, in this work, we investigate a representative example of such a task: robotic ultrasound. A user study was conducted to assess the transferability of results from a simulated user study to real-world conditions, comparing two force-assistance approaches. The XR simulation replicates the physical study set-up employing a virtual robotic arm, its control interface, ultrasound imaging, and two force-assistance methods: automation and force visualization. Our results indicate that while differences in force deviation, perceived workload, and trust emerge between real and simulated setups, the overall findings remain consistent. Specifically, partial automation of robot control improves performance and trust while reducing workload, and visual feedback decreases force deviation in both real and simulated conditions. These findings highlight the potential of XR for comparative studies, even in complex robotic tasks.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Mielke, T; Allgaier, M; Schott, D; Hansen, C; Heinrich, F
Virtual Studies, Real Results? Assessing the Impact of Virtualization on Human-Robot Interaction Proceedings Article
In: Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, ACM, Yokohama Japan, 2025, ISBN: 979-8-4007-1395-8.
@inproceedings{mielke_virtual_2025,
title = {Virtual Studies, Real Results? Assessing the Impact of Virtualization on Human-Robot Interaction},
author = {T Mielke and M Allgaier and D Schott and C Hansen and F Heinrich},
url = {https://dl.acm.org/doi/10.1145/3706599.3719724},
doi = {10.1145/3706599.3719724},
isbn = {979-8-4007-1395-8},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
booktitle = {Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {ACM},
address = {Yokohama Japan},
abstract = {Extended Reality (XR) shows potential for human-centered evaluation of real-world scenarios and could improve efficiency and safety in robotic research. However, the validity of XR Human-Robot Interaction (HRI) studies remains underexplored. This paper investigates the transferability of HRI studies across virtualization levels for three tasks. Our results indicate XR study validity is task-specific, with task virtualization as a key influencing factor. Partially virtualized settings with virtual tasks and a real robot, as well as fully virtualized setups with a simulated robot, yielded results comparable to real setups for pick-and-place and robotic ultrasound. However, for precision-dependent peg-in-hole, differences were observed between real and virtualized conditions regarding completion time, perceived workload, and ease. Demonstrating the task dependency of XR transferability and comparing virtualization levels, our work takes an important step in assessing XR study validity. Future work should isolate factors affecting transferability and assess relative validity in the absence of absolute validity.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Allgaier, M; Dangszat, E; Huettl, F; Hanke, L; Huber, T; Preim, B; Hansen, C
Impact of Input and Output Devices on a Virtual Ultrasound Training Proceedings Article
In: 2025 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 937–941, IEEE, Saint Malo, France, 2025, ISBN: 979-8-3315-1484-6.
@inproceedings{allgaier_impact_2025,
title = {Impact of Input and Output Devices on a Virtual Ultrasound Training},
author = {M Allgaier and E Dangszat and F Huettl and L Hanke and T Huber and B Preim and C Hansen},
url = {https://ieeexplore.ieee.org/document/10972939/},
doi = {10.1109/VRW66409.2025.00191},
isbn = {979-8-3315-1484-6},
year = {2025},
date = {2025-03-01},
urldate = {2025-03-01},
booktitle = {2025 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {937–941},
publisher = {IEEE},
address = {Saint Malo, France},
abstract = {Performing ultrasound requires mental skills that have to be trained hands on. Virtual simulations can be employed to provide novice surgeons with a safe training environment prior to performing ultrasound on the patient. For both input and output there is a wide range of devices that are used in existing ultrasound simulations. Because the devices have their limitations and benefits regarding realism, costs, and access, we compared three technical setups: desktop with mouse interaction, desktop with a haptic device, and virtual reality with a haptic device. In a user study with 19 participants we investigated the usability and assessed qualitative user feedback in a semi-structured interview. Significant differences regarding usability and training time were found between both versions with the haptic device and the desktop with mouse version. Based on the feedback, using a haptic device seems to be more relevant than the two output devices in this training case.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2024

Allgaier, M; Huettl, F; Hanke, L; Huber, T; Preim, B; Saalfeld, S; Hansen, C
Gamification Concepts for a VR-based Visuospatial Training for Intraoperative Liver Ultrasound Proceedings Article
In: Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, Association for Computing Machinery, New York, NY, USA, 2024, ISBN: 979-8-4007-0331-7.
@inproceedings{allgaier_gamification_2024,
title = {Gamification Concepts for a VR-based Visuospatial Training for Intraoperative Liver Ultrasound},
author = {M Allgaier and F Huettl and L Hanke and T Huber and B Preim and S Saalfeld and C Hansen},
url = {https://doi.org/10.1145/3613905.3650736},
doi = {10.1145/3613905.3650736},
isbn = {979-8-4007-0331-7},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI EA '24},
abstract = {Gamification is widely used due to its positive influence on learning by adding emotions and steering behavior. In medical VR training applications, the use of gamification is rare, and when it is implemented, it often lacks thoughtful design decisions and empirical evaluation. Using a VR-based training for intraoperative ultrasound for liver surgery, we analyzed game elements regarding their suitability and examined two in more detail: difficulty levels and a kit, where the user has to assemble a virtual liver using US. In a broad audience study, levels achieved significantly better results regarding enjoyment. Qualitative feedback from medical students directly comparing the elements revealed that they prefer the kit as well as levels for training. Our studies indicate that levels and the more interactive kit improve the learning experience, which could also be taken as a basis for similar VR-based medical training applications.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2023

Allgaier, M; Huettl, F; Hanke, L; Lang, H; Huber, T; Preim, B; Saalfeld, S; Hansen, C
LiVRSono - Virtual Reality Training with Haptics for Intraoperative Ultrasound Proceedings Article
In: 2023 IEEE International Symposium on Mixed and Augmented Reality (ISMAR), pp. 980–989, IEEE, Sydney, Australia, 2023, ISBN: 979-8-3503-2838-7.
@inproceedings{allgaier_livrsono_2023,
title = {LiVRSono - Virtual Reality Training with Haptics for Intraoperative Ultrasound},
author = {M Allgaier and F Huettl and L Hanke and H Lang and T Huber and B Preim and S Saalfeld and C Hansen},
url = {https://ieeexplore.ieee.org/document/10316488/},
doi = {10.1109/ISMAR59233.2023.00114},
isbn = {979-8-3503-2838-7},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},
pages = {980–989},
publisher = {IEEE},
address = {Sydney, Australia},
abstract = {One of the biggest challenges in using ultrasound (US) is learning to create a spatial mental model of the interior of the scanned object based on the US image and the probe position. As intraoperative ultrasound (IOUS) cannot be easily trained on patients, we present LiVRSono, an immersive VR application to train this skill. The immersive environment, including an US simulation with patientspecific data as well as haptics to support hand-eye coordination, provides a realistic setting. Four clinically relevant training scenarios were identified based on the described learning goal and the workflow of IOUS for liver. The realism of the setting and the training scenarios were evaluated with eleven physicians, of which six participants are experts in IOUS for liver and five participants are potential users of the training system. The setting, handling of the US probe, and US image were considered realistic enough for the learning goal. Regarding the haptic feedback, a limitation is the restricted workspace of the input device. Three of the four training scenarios were rated as meaningful and effective. A pilot study regarding learning outcome shows positive results, especially with respect to confidence and perceived competence. Besides the drawbacks of the input device, our training system provides a realistic learning environment with meaningful scenarios to train the creation of a mental 3D model when performing IOUS. We also identified important improvements to the training scenarios to further enhance the training experience.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2022

Allgaier, M; Chheang, V; Saalfeld, P; Apilla, V; Huber, T; Huettl, F; Neyazi, B; Sandalcioglu, E; Hansen, C; Preim, B; Saalfeld, P
A comparison of input devices for precise interaction tasks in VR-based surgical planning and training Journal Article
In: Computers in Biology and Medicine, vol. 145, pp. 105429, 2022, ISSN: 0010-4825.
@article{allgaier_comparison_2022,
title = {A comparison of input devices for precise interaction tasks in VR-based surgical planning and training},
author = {M Allgaier and V Chheang and P Saalfeld and V Apilla and T Huber and F Huettl and B Neyazi and E Sandalcioglu and C Hansen and B Preim and P Saalfeld},
url = {https://www.sciencedirect.com/science/article/pii/S0010482522002219},
doi = {10.1016/j.compbiomed.2022.105429},
issn = {0010-4825},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
journal = {Computers in Biology and Medicine},
volume = {145},
pages = {105429},
abstract = {To exploit the potential of virtual reality (VR) in medicine, the input devices must be selected carefully due to their different benefits. In this work, input devices for common interaction tasks in medical VR planning and training are compared. Depending on the specific purpose, different requirements exist. Therefore, an appropriate trade-off between meeting task-specific requirements and having a widely applicable device has to be found. We focus on two medical use cases, liver surgery planning and craniotomy training, to cover a broad medical domain. Based on these, relevant input devices are compared with respect to their suitability for performing precise VR interaction tasks. The devices are standard VR controllers, a pen-like VR Ink, data gloves and a real craniotome, the medical instrument used for craniotomy. The input devices were quantitatively compared with respect to their performance based on different measurements. The controllers and VR Ink performed significantly better than the remaining two devices regarding precision. Qualitative data concerning task load, cybersickness, and usability and appropriateness of the devices were assessed. Although no device stands out for both applications, most participants preferred using the VR Ink, followed by the controller and finally the data gloves and craniotome. These results can guide the selection of an appropriate device for future medical VR applications.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}