Prof. Dr. Christian Hansen
Short Bio
I am a Full Professor (W3) of Virtual and Augmented Reality at the Faculty of Computer Science and the Research Campus STIMULATE at Otto von Guericke University Magdeburg, Germany.
I studied Computational Visualistics at Otto von Guericke University Magdeburg, completing my bachelor’s research at the Aristotle University of Thessaloniki in 2004 and my master’s thesis at the Center for Medical Diagnostic Systems and Visualization at the University of Bremen in 2006. After graduating, I joined the Fraunhofer Institute for Digital Medicine MEVIS in Bremen as a research scientist (2006–2013) and received my PhD in Computer Science from Jacobs University Bremen in 2012.
In 2013, I was appointed Junior Professor of Computer-Assisted Surgery at Otto von Guericke University Magdeburg. I held a DFG-funded research fellowship at Harvard Medical School from 2017 to 2018. From 2020 to 2024, I served as Extraordinary Professor of Virtual and Augmented Reality before being promoted to Full Professor in 2024.
Research Interests
My research focuses on virtual and augmented reality, human–machine interaction, and medical visualization. In particular, I investigate immersive, interaction-driven systems for interventional radiology, liver surgery, and urology. My work aims to integrate advanced image analysis, efficient user interfaces, and workflow-aware solutions into clinical environments.
Find me also here:
Google Scholar |
ORCID |
ResearchGate |
LinkedIn |
Publications
2025

Mielke, T; Allgaier, M; Hansen, C; Heinrich, F
Extended Reality Check: Evaluating XR Prototyping for Human-Robot Interaction in Contact-Intensive Tasks Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 31, iss. 11, pp. 10035 - 10044, 2025.
@article{mielke_extended_2025,
title = {Extended Reality Check: Evaluating XR Prototyping for Human-Robot Interaction in Contact-Intensive Tasks},
author = {T Mielke and M Allgaier and C Hansen and F Heinrich},
doi = {10.1109/TVCG.2025.3616753},
year = {2025},
date = {2025-10-02},
urldate = {2025-10-02},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {31},
issue = {11},
pages = {10035 - 10044},
abstract = {Extended Reality (XR) has the potential to improve efficiency and safety in the user-centered development of human-robot interaction. However, the validity of using XR prototyping for user studies for contact-intensive robotic tasks remains underexplored. These in-contact tasks are particularly relevant due to challenges arising from indirect force perception in robot control. Therefore, in this work, we investigate a representative example of such a task: robotic ultrasound. A user study was conducted to assess the transferability of results from a simulated user study to real-world conditions, comparing two force-assistance approaches. The XR simulation replicates the physical study set-up employing a virtual robotic arm, its control interface, ultrasound imaging, and two force-assistance methods: automation and force visualization. Our results indicate that while differences in force deviation, perceived workload, and trust emerge between real and simulated setups, the overall findings remain consistent. Specifically, partial automation of robot control improves performance and trust while reducing workload, and visual feedback decreases force deviation in both real and simulated conditions. These findings highlight the potential of XR for comparative studies, even in complex robotic tasks.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Mielke, T; Heinrich, F; Hansen, C
Gesturing Towards Efficient Robot Control: Exploring Sensor Placement and Control Modes for Mid-Air Human-Robot Interaction Proceedings Article
In: 2025 IEEE International Conference on Robotics and Automation (ICRA), 2025.
@inproceedings{mielke_gesturing_2025,
title = {Gesturing Towards Efficient Robot Control: Exploring Sensor Placement and Control Modes for Mid-Air Human-Robot Interaction},
author = {T Mielke and F Heinrich and C Hansen},
doi = {10.1109/ICRA55743.2025.11127519},
year = {2025},
date = {2025-09-02},
urldate = {2025-01-01},
booktitle = {2025 IEEE International Conference on Robotics and Automation (ICRA)},
abstract = {While collaborative robots effectively combine robotic precision with human capabilities, traditional control methods such as button presses or hand guidance can be slow and physically demanding. This has led to an increasing interest in natural user interfaces that integrate hand gesturebased interactions for more intuitive and flexible robot control. Therefore, this paper systematically explores mid-air robot control by comparing position and rate control modes with different state-of-the-art and novel sensor placements. A user study was conducted to evaluate each combination in terms of accuracy, task duration, perceived workload, and physical exertion. Our results indicate that position control is more efficient than rate control. Traditional desk-mounted sensors can provide a good balance between accuracy and comfort. However, robot-mounted sensors are a viable alternative for short-term, accurate control with less spatial requirements. Legmounted sensors, while comfortable, pose challenges to handeye coordination. Based on these findings, we provide design implications for improving the usability and comfort of midair human-robot interaction. Future research should extend this evaluation to a wider range of tasks and environments.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Mielke, T; Heinrich, F; Hansen, C
Enhancing AR-to-Robot Registration Accuracy: A Comparative Study of Marker Detection Algorithms and Registration Parameters Proceedings Article
In: 2025 IEEE International Conference on Robotics and Automation (ICRA), IEEE, 2025.
@inproceedings{mielke_enhancing_2025,
title = {Enhancing AR-to-Robot Registration Accuracy: A Comparative Study of Marker Detection Algorithms and Registration Parameters},
author = {T Mielke and F Heinrich and C Hansen},
doi = {10.1109/ICRA55743.2025.11128039},
year = {2025},
date = {2025-09-02},
urldate = {2025-09-02},
booktitle = {2025 IEEE International Conference on Robotics and Automation (ICRA)},
publisher = {IEEE},
abstract = {Augmented Reality (AR) offers potential for enhancing human-robot collaboration by enabling intuitive interaction and real-time feedback. A crucial aspect of AR-robot integration is accurate spatial registration to align virtual content with the physical robotic workspace. This paper systematically investigates the effects of different tracking techniques and registration parameters on AR-to-robot registration accuracy, focusing on paired-point methods. We evaluate four marker detection algorithms - ARToolkit, Vuforia, ArUco, and retroreflective tracking - analyzing the influence of viewing distance, angle, marker size, point distance, distribution, and quantity. Our results show that ARToolkit provides the highest registration accuracy. While larger markers and positioning registration point centroids close to target locations consistently improved accuracy, other factors such as point distance and quantity were highly dependent on the tracking techniques used. Additionally, we propose an effective refinement method using point cloud registration, significantly improving accuracy by integrating data from points recorded between registration locations. These findings offer practical guidelines for enhancing AR-robot registration, with future work needed to assess the transferability to other AR devices and robots.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Joeres, F; Paetz, T; Hansen, C; Schenk, A
The necessity of parallel needle placement for tumor ablation using irreversible electroporation: a myth? Proceedings Article
In: CARS 2025—Computer Assisted Radiology and Surgery Proceedings of the 39th International Congress and Exhibition Berlin, Germany, June 17–20, 2025 , pp. 90–91, 2025.
@inproceedings{Joeres.2025,
title = {The necessity of parallel needle placement for tumor ablation using irreversible electroporation: a myth?},
author = {F Joeres and T Paetz and C Hansen and A Schenk},
url = {https://link.springer.com/article/10.1007/s11548-025-03373-6},
doi = {https://doi.org/10.1007/s11548-025-03373-6},
year = {2025},
date = {2025-05-29},
urldate = {2025-05-29},
booktitle = {CARS 2025—Computer Assisted Radiology and Surgery Proceedings of the 39th International Congress and Exhibition Berlin, Germany, June 17–20, 2025
},
pages = {90–91},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schwenderling, L; Hanke, L; Holst, U; Huettl, F; Joeres, F; Huber, T; Hansen, C
Toward structured abdominal examination training using augmented reality Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 20, no. 5, pp. 949–958, 2025, ISSN: 1861-6429.
@article{schwenderling_toward_2025,
title = {Toward structured abdominal examination training using augmented reality},
author = {L Schwenderling and L Hanke and U Holst and F Huettl and F Joeres and T Huber and C Hansen},
url = {https://doi.org/10.1007/s11548-024-03311-y},
doi = {10.1007/s11548-024-03311-y},
issn = {1861-6429},
year = {2025},
date = {2025-05-01},
urldate = {2025-05-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {20},
number = {5},
pages = {949–958},
abstract = {Structured abdominal examination is an essential part of the medical curriculum and surgical training, requiring a blend of theory and practice from trainees. Current training methods, however, often do not provide adequate engagement, fail to address individual learning needs or do not cover rare diseases.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Mielke, T; Allgaier, M; Schott, D; Hansen, C; Heinrich, F
Virtual Studies, Real Results? Assessing the Impact of Virtualization on Human-Robot Interaction Proceedings Article
In: Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, ACM, Yokohama Japan, 2025, ISBN: 979-8-4007-1395-8.
@inproceedings{mielke_virtual_2025,
title = {Virtual Studies, Real Results? Assessing the Impact of Virtualization on Human-Robot Interaction},
author = {T Mielke and M Allgaier and D Schott and C Hansen and F Heinrich},
url = {https://dl.acm.org/doi/10.1145/3706599.3719724},
doi = {10.1145/3706599.3719724},
isbn = {979-8-4007-1395-8},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
booktitle = {Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {ACM},
address = {Yokohama Japan},
abstract = {Extended Reality (XR) shows potential for human-centered evaluation of real-world scenarios and could improve efficiency and safety in robotic research. However, the validity of XR Human-Robot Interaction (HRI) studies remains underexplored. This paper investigates the transferability of HRI studies across virtualization levels for three tasks. Our results indicate XR study validity is task-specific, with task virtualization as a key influencing factor. Partially virtualized settings with virtual tasks and a real robot, as well as fully virtualized setups with a simulated robot, yielded results comparable to real setups for pick-and-place and robotic ultrasound. However, for precision-dependent peg-in-hole, differences were observed between real and virtualized conditions regarding completion time, perceived workload, and ease. Demonstrating the task dependency of XR transferability and comparing virtualization levels, our work takes an important step in assessing XR study validity. Future work should isolate factors affecting transferability and assess relative validity in the absence of absolute validity.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schwenderling, L; Schotte, M; Joeres, F; Heinrich, F; Hanke, L; Huettl, F; Huber, T; Hansen, C
Teach Me Where to Look: Dual-task Attention Training in Augmented Reality Proceedings Article
In: Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, ACM, Yokohama Japan, 2025, ISBN: 979-8-4007-1395-8.
@inproceedings{schwenderling_teach_2025,
title = {Teach Me Where to Look: Dual-task Attention Training in Augmented Reality},
author = {L Schwenderling and M Schotte and F Joeres and F Heinrich and L Hanke and F Huettl and T Huber and C Hansen},
url = {https://dl.acm.org/doi/10.1145/3706599.3720198},
doi = {10.1145/3706599.3720198},
isbn = {979-8-4007-1395-8},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
booktitle = {Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {ACM},
address = {Yokohama Japan},
abstract = {Regular eye contact is essential in medicine to recognize signs of pain. However, it is difficult to remember this during training as attention is tied up in learning. While augmented reality (AR) has shown promising results for medical education, there is no training for attention allocation yet. Therefore, three auditory and three visual attention guidance tools in AR are evaluated for their use in medical dual-task training settings. In expert reviews with six participants in human-computer interaction and medical didactics, advantages, disadvantages, and refinements for the cues were developed. For visual cues, an overt but less occluding cue was preferred for constant visibility of the primary task. A more diegetic cue design was proposed for the auditory cues to use a patient simulation as a reminder of the regular face glance. In general, several cues were found to be suitable for gaze guidance training, requiring only minor changes for improvement.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Heinrich, F; Schott, D; Schwenderling, L; Hansen, C
Do You See What I See? Evaluating Relative Depth Judgments Between Real and Virtual Projections Proceedings Article
In: Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, Association for Computing Machinery, New York, NY, USA, 2025, ISBN: 979-8-4007-1395-8.
@inproceedings{heinrich_you_2025,
title = {Do You See What I See? Evaluating Relative Depth Judgments Between Real and Virtual Projections},
author = {F Heinrich and D Schott and L Schwenderling and C Hansen},
url = {https://doi.org/10.1145/3706599.3720157},
doi = {10.1145/3706599.3720157},
isbn = {979-8-4007-1395-8},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
booktitle = {Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI EA '25},
abstract = {Projector-based augmented reality (AR) is promising in different domains with less issues in discomfort or shortage of space. However, due to limitations like high costs and cumbersome calibration, this AR modality remains underused. To address this problem, a stereoscopic projector-based AR simulation was implemented for a cost-effective video see-through AR headset. To evaluate the validity of this simulation, a relative depth judgment experiment was conducted to compare this method with a physical projection system. Consistent results suggest that a known interaction effect between visualization and disparity mode could be successfully reproduced using both the physical projection and the virtual simulation. In addition, first findings indicate that there are no significant differences between these projection modalities. The results indicate that other perception-related effects observed for projector-based AR may also be applicable to virtual projection simulations and that future findings determined using only these simulations may also be applicable to real projections.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Kunz, M; Schott, D; Wunderling, T; Halloul, M; Hansen, C; Albrecht, A; Braun-Dullaeus, R
Embryonic heart development as an immersive experience: Unveiling learning effects and influential factors in virtual learning environments Journal Article
In: Comput. Biol. Med., vol. 187, no. C, 2025, ISSN: 0010-4825.
@article{kunz_embryonic_2025,
title = {Embryonic heart development as an immersive experience: Unveiling learning effects and influential factors in virtual learning environments},
author = {M Kunz and D Schott and T Wunderling and M Halloul and C Hansen and A Albrecht and R Braun-Dullaeus},
url = {https://doi.org/10.1016/j.compbiomed.2024.109638},
doi = {10.1016/j.compbiomed.2024.109638},
issn = {0010-4825},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
journal = {Comput. Biol. Med.},
volume = {187},
number = {C},
abstract = {As the quality and availability of Virtual Reality (VR) technologies improve, their potential applications in medical education, particularly VR Learning Environments (VRLEs), are increasingly explored. VRLEs offer a dynamic platform where educators and students can interact, access materials, and engage in learning beyond traditional classrooms. However, questions remain about their long-term learning effects and potential confounding factors. This study investigates these aspects through a VR application designed for teaching heart embryology. For this reason we conducted a user study with medical students in their early years of training (N = 143). Our findings reveal significant short-term and sustained learning outcomes two to four weeks following a single VR session. Importantly, these outcomes appear largely independent of users’ technical affinity and are minimally influenced by their immersion tendencies. Instead, the quality of the VRLE and its user experience emerge as critical factors. These results underscore the efficacy of well-designed VRLEs in higher education and highlight key areas for future development.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schreiter, J; Heinrich, F; Hatscher, B; Schott, D; Hansen, C
Multimodal human–computer interaction in interventional radiology and surgery: a systematic literature review Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 20, no. 4, pp. 807–816, 2025, ISSN: 1861-6429.
@article{schreiter_multimodal_2025,
title = {Multimodal human–computer interaction in interventional radiology and surgery: a systematic literature review},
author = {J Schreiter and F Heinrich and B Hatscher and D Schott and C Hansen},
url = {https://doi.org/10.1007/s11548-024-03263-3},
doi = {10.1007/s11548-024-03263-3},
issn = {1861-6429},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {20},
number = {4},
pages = {807–816},
abstract = {As technology advances, more research dedicated to medical interactive systems emphasizes the integration of touchless and multimodal interaction (MMI). Particularly in surgical and interventional settings, this approach is advantageous because it maintains sterility and promotes a natural interaction. Past reviews have focused on investigating MMI in terms of technology and interaction with robots. However, none has put particular emphasis on analyzing these kind of interactions for surgical and interventional scenarios.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Mielke, T; Heinrich, F; Hansen, C
SensARy Substitution: Augmented Reality Techniques to Enhance Force Perception in Touchless Robot Control Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 31, no. 5, pp. 3235–3244, 2025, ISSN: 1941-0506.
@article{mielke_sensary_2025,
title = {SensARy Substitution: Augmented Reality Techniques to Enhance Force Perception in Touchless Robot Control},
author = {T Mielke and F Heinrich and C Hansen},
url = {https://ieeexplore.ieee.org/document/10926846},
doi = {10.1109/TVCG.2025.3549856},
issn = {1941-0506},
year = {2025},
date = {2025-03-14},
urldate = {2025-05-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {31},
number = {5},
pages = {3235–3244},
abstract = {The lack of haptic feedback in touchless human-robot interaction is critical in applications such as robotic ultrasound, where force perception is crucial to ensure image quality. Augmented reality (AR) is a promising tool to address this limitation by providing sensory substitution through visual or vibrotactile feedback. The implementation of visual force feedback requires consideration not only of feedback design but also of positioning. Therefore, we implemented two different visualization types at three different positions and investigated the effects of vibrotactile feedback on these approaches. Furthermore, we examined the effects of multimodal feedback compared to visual or vibrotactile output alone. Our results indicate that sensory substitution eases the interaction in contrast to a feedback-less baseline condition, with the presence of visual support reducing average force errors and being subjectively preferred by the participants. However, the more feedback was provided, the longer users needed to complete their tasks. Regarding visualization design, a 2D bar visualization reduced force errors compared to a 3D arrow concept. Additionally, the visualizations being displayed directly on the ultrasound screen were subjectively preferred. With findings regarding feedback modality and visualization design our work represents an important step toward sensory substitution for touchless human-robot interaction.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Herbrich, W; Zittlau, P; Joeres, F; Hansen, C
Prototype development of a cross-reality digital twin ecosystem: the web, open source and open data Proceedings Article
In: 2025 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 459–462, 2025.
@inproceedings{herbrich_prototype_2025,
title = {Prototype development of a cross-reality digital twin ecosystem: the web, open source and open data},
author = {W Herbrich and P Zittlau and F Joeres and C Hansen},
url = {https://ieeexplore.ieee.org/abstract/document/10972895},
doi = {10.1109/VRW66409.2025.00100},
year = {2025},
date = {2025-03-01},
urldate = {2025-03-01},
booktitle = {2025 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {459–462},
abstract = {This work contributes to a broader initiative aimed at transforming a former industrial port area into a dynamic Knowledge Transfer Space (KTS). To support this transformation, we explore the development of a cross-reality (CR) digital twin of the port area, which integrates user interfaces with varying degrees of virtuality. We evaluate different web technologies, focusing on the balance between accessibility, immersion, scalability, and performance. By comparing client-side rendering with pixel streaming approaches, we aim to identify suitable solutions for prototyping a CR digital twin ecosystem. The development of a prototype is ongoing, based on a client-side rendering approach. The outcomes contribute to developing an open and transferable CR digital twin applicable to similar urban projects in other cities.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Allgaier, M; Dangszat, E; Huettl, F; Hanke, L; Huber, T; Preim, B; Hansen, C
Impact of Input and Output Devices on a Virtual Ultrasound Training Proceedings Article
In: 2025 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 937–941, IEEE, Saint Malo, France, 2025, ISBN: 979-8-3315-1484-6.
@inproceedings{allgaier_impact_2025,
title = {Impact of Input and Output Devices on a Virtual Ultrasound Training},
author = {M Allgaier and E Dangszat and F Huettl and L Hanke and T Huber and B Preim and C Hansen},
url = {https://ieeexplore.ieee.org/document/10972939/},
doi = {10.1109/VRW66409.2025.00191},
isbn = {979-8-3315-1484-6},
year = {2025},
date = {2025-03-01},
urldate = {2025-03-01},
booktitle = {2025 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {937–941},
publisher = {IEEE},
address = {Saint Malo, France},
abstract = {Performing ultrasound requires mental skills that have to be trained hands on. Virtual simulations can be employed to provide novice surgeons with a safe training environment prior to performing ultrasound on the patient. For both input and output there is a wide range of devices that are used in existing ultrasound simulations. Because the devices have their limitations and benefits regarding realism, costs, and access, we compared three technical setups: desktop with mouse interaction, desktop with a haptic device, and virtual reality with a haptic device. In a user study with 19 participants we investigated the usability and assessed qualitative user feedback in a semi-structured interview. Significant differences regarding usability and training time were found between both versions with the haptic device and the desktop with mouse version. Based on the feedback, using a haptic device seems to be more relevant than the two output devices in this training case.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Razavizadeh, S; Kofler, M; Kunz, M; Kempfert, J; Braun-Dullaeus, R; Weidling, J; Preim, B; Hansen, C
A virtual patient authoring tool for transcatheter aortic valve replacement Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 20, no. 2, pp. 379–389, 2025, ISSN: 1861-6429.
@article{razavizadeh_virtual_2025,
title = {A virtual patient authoring tool for transcatheter aortic valve replacement},
author = {S Razavizadeh and M Kofler and M Kunz and J Kempfert and R Braun-Dullaeus and J Weidling and B Preim and C Hansen},
url = {https://doi.org/10.1007/s11548-024-03293-x},
doi = {10.1007/s11548-024-03293-x},
issn = {1861-6429},
year = {2025},
date = {2025-02-01},
urldate = {2025-02-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {20},
number = {2},
pages = {379–389},
abstract = {Computer-based medical training scenarios, derived from patient’s records, often lack variability, modifiability, and availability. Furthermore, generating image datasets and creating scenarios is resource-intensive. Therefore, patient authoring tools for rapid dataset-independent creation of virtual patients (VPs) is a pressing need.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schott, D; Kunz, M; Albrecht, A; Braun-Dullaeus, R; Hansen, C
Too Heart to Handle? Exploring Self-Directed And Collaborative Virtual Learning Environments in Anatomy Education. Journal Article
In: EuroVis 2025 - Dirk Bartz Prize, 2025, (Artwork Size: 5 pages ISBN: 9783038682813 Publisher: The Eurographics Association).
@article{schott_too_2025,
title = {Too Heart to Handle? Exploring Self-Directed And Collaborative Virtual Learning Environments in Anatomy Education.},
author = {D Schott and M Kunz and A Albrecht and R Braun-Dullaeus and C Hansen},
url = {https://diglib.eg.org/handle/10.2312/evm20251973},
doi = {10.2312/EVM.20251973},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {EuroVis 2025 - Dirk Bartz Prize},
abstract = {The integration of Extended Reality (XR) into medical education represents a transformative shift, particularly in anatomy training, where immersive simulations enhance cognitive engagement and knowledge retention. The developing heart is characterized by rapid morphological changes within a short time frame, which poses a significant pedagogical challenge. Conventional 2D imaging and static models often fail to convey these processes, limiting learners' ability to conceptualize critical spatial relationships-a barrier in understanding congenital anomalies. To address these limitations, this work leverages XRdriven visualization and interaction paradigms to create virtual learning environments. Based on this, we propose methods for designing XR educational modules that adapt to both collaborative and self-directed learning contexts, using embryonic cardiogenesis as an illustrating case study. We present findings from mixed-methods user studies involving a total of 264 students, along with feedback from lecturers, highlighting the importance of an iterative, user-centered design approach.},
note = {Artwork Size: 5 pages
ISBN: 9783038682813
Publisher: The Eurographics Association},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Urrutia, R; Espejo, D; Guerra, M; Vio, K; Sühn, T; Esmaeili, N; Boese, A; Fuentealba, P; Illanes, A; Hansen, C; Poblete, V
Exploring Deep Clustering Methods in Vibro-Acoustic Sensing for Enhancing Biological Tissue Characterization Journal Article
In: IEEE Access, vol. 13, pp. 80395–80406, 2025, ISSN: 2169-3536.
@article{urrutia_exploring_2025,
title = {Exploring Deep Clustering Methods in Vibro-Acoustic Sensing for Enhancing Biological Tissue Characterization},
author = {R Urrutia and D Espejo and M Guerra and K Vio and T Sühn and N Esmaeili and A Boese and P Fuentealba and A Illanes and C Hansen and V Poblete},
url = {https://ieeexplore.ieee.org/document/10981752/},
doi = {10.1109/ACCESS.2025.3566280},
issn = {2169-3536},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {IEEE Access},
volume = {13},
pages = {80395–80406},
abstract = {Nonlinear dimensionality reduction techniques, often referred to as manifold learning, are increasingly valuable for data visualization and unsupervised clustering. In the context of surgery and medicine, these methods facilitate the analysis of complex datasets, enabling pattern recognition in surgical data. This study explores the characterization of six tissue types through manifold learning and unsupervised clustering, utilizing vibro-acoustic (VA) signals collected from manual palpation experiments. A wireless sensor mounted at the tip of a surgical instrument was used to acquire 1,680 VA signals, which were processed using Fourier transform and cepstral analysis for feature extraction. We assessed the performance of two dimensionality reduction techniques: uniform manifold approximation and projection (UMAP) and variational autoencoder (VAE). Results indicate that cepstral features combined with UMAP yield superior clustering performance compared to VAE, achieving higher classification accuracy ( 92 % vs. 87 % ) and better-defined clusters with greater compactness. The observed differences in performance are linked to the intrinsic properties of the tissues, particularly surface characteristics such as friction and moisture, which affect signal consistency. Additionally, we compared our approach with previous works, including a study utilizing the same dataset, where our methodology demonstrated improved accuracy. Future research will focus on refining the VAE model, increasing the diversity of tissue samples, and validating the proposed approach in real surgical settings to enhance its applicability in minimally invasive surgery.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Bashkanov, O; Engelage, L; Behnel, N; Ehrlich, P; Hansen, C; Rak, M
Multimodal Data Fusion with Irregular PSA Kinetics for Automated Prostate Cancer Grading Journal Article
In: 2025.
@article{bashkanov_multimodal_2025,
title = {Multimodal Data Fusion with Irregular PSA Kinetics for Automated Prostate Cancer Grading},
author = {O Bashkanov and L Engelage and N Behnel and P Ehrlich and C Hansen and M Rak},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Urrutia, R; Ayman, F; Boese, A; Hansen, C; Illanes, A
Needle Puncture Detection Using Vibroacoustic Sensing in Layered Phantoms Journal Article
In: 2025.
@article{urrutia_needle_2025,
title = {Needle Puncture Detection Using Vibroacoustic Sensing in Layered Phantoms},
author = {R Urrutia and F Ayman and A Boese and C Hansen and A Illanes},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Hanke, L; Schwoerer, P; Huettl, F; Vradelis, L; Strelow, K; Boedecker, C; Saalfeld, P; Chheang, V; Buggenhagen, H; Lang, H; Hansen, C; Huber, T
Use of an Immersive Virtual Reality Application to Educate Medical Students in Patient Handover Journal Article
In: 2025.
@article{hanke_use_2025,
title = {Use of an Immersive Virtual Reality Application to Educate Medical Students in Patient Handover},
author = {L Hanke and P Schwoerer and F Huettl and L Vradelis and K Strelow and C Boedecker and P Saalfeld and V Chheang and H Buggenhagen and H Lang and C Hansen and T Huber},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schreiter, J; Mielke, T; Georgiades, M; Pech, M; Hansen, C; Heinrich, F
Exploring Interaction Concepts for the Manipulation of a Collaborative Robot: A Comparative Study Proceedings Article
In: Proceedings of the 2025 ACM/IEEE International Conference on Human-Robot Interaction, pp. 55–64, IEEE Press, Melbourne, Australia, 2025.
@inproceedings{schreiter_exploring_2025,
title = {Exploring Interaction Concepts for the Manipulation of a Collaborative Robot: A Comparative Study},
author = {J Schreiter and T Mielke and M Georgiades and M Pech and C Hansen and F Heinrich},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {Proceedings of the 2025 ACM/IEEE International Conference on Human-Robot Interaction},
pages = {55–64},
publisher = {IEEE Press},
address = {Melbourne, Australia},
series = {HRI '25},
abstract = {Robotic systems have the potential to enhance a wide range of domains, such as medical workflows, by automating individual steps of complex processes. However, human-robot interaction (HRI) is of critical importance, as effective collaboration between humans and robots is essential even in highly automated environments. Recent research has predominantly focused on the development of interaction methods rather than systematically comparing existing approaches. Therefore, we conducted a user study (n=20) to compare different HRI concepts for end effector manipulation combined with clutching mechanisms for manipulation activation in an alignment task using the example of robotic ultrasound (US). Manipulation methods included hand-guiding, teleoperation, and touchless interaction, while clutching mechanisms were realized through hand, voice, and foot interaction. The results indicate advantages of hand-guiding for manipulation. While no significant differences were observed between clutching mechanisms, strong evidence suggests comparable performance across these modalities. Notably, significant interaction effects on perceived workload reveal that the optimal clutching mechanism depends on the selected manipulation technique. This work underscores the critical importance of selecting appropriate HRI concepts and understanding the dependencies of manipulation techniques with clutching mechanisms. While our study included the usage of a robotic US, the insights gained are broadly transferable across various domains involving robotic manipulation tasks in human-robot collaboration.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Bashkanov, O; Rak, M; Engelage, L; Hansen, C
Augmenting Prostate MRI Dataset with Synthetic Volumetric Images from Zone-Conditioned Diffusion Generative Model Proceedings Article
In: Mukhopadhyay, A; Oksuz, Ilkay; Engelhardt, Sandy; Mehrof, Dorit; Yuan, Yixuan (Ed.): Deep Generative Models, pp. 160–168, Springer Nature Switzerland, Cham, 2025, ISBN: 978-3-031-72744-3.
@inproceedings{bashkanov_augmenting_2025,
title = {Augmenting Prostate MRI Dataset with Synthetic Volumetric Images from Zone-Conditioned Diffusion Generative Model},
author = {O Bashkanov and M Rak and L Engelage and C Hansen},
editor = {A Mukhopadhyay and Ilkay Oksuz and Sandy Engelhardt and Dorit Mehrof and Yixuan Yuan},
doi = {10.1007/978-3-031-72744-3_16},
isbn = {978-3-031-72744-3},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {Deep Generative Models},
pages = {160–168},
publisher = {Springer Nature Switzerland},
address = {Cham},
abstract = {The need for artificial intelligence (AI)-driven computer-assist ed diagnosis (CAD) tools drives up the demand for large high-quality datasets in medical imaging. However, collecting the necessary amount of data is often impractical due to patient privacy concerns or restricted time for medical annotation. Recent advances in generative models in medical imaging with a focus on diffusion-based techniques could provide realistic-looking synthetic samples as a supplement for real data. In this work, we study whether synthetic volumetric MRIs generated by the diffusion model can be used to train downstream models, e.g., semantic segmentation. We can create an arbitrarily large dataset with ground truth by conditioning the diffusion model with a segmentation mask. Thus, the additional synthetic data can be used to control the dataset diversity. Experiments revealed that downstream tasks profit from additional synthetic data. However, the effect will eventually diminish when sufficient real samples are available. We showcase the strength of the synthetic data and provide practical recommendations for using the generated data in zonal prostate segmentation.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Batz, V; Liedtke, V B; Lameski, P; Trajkovik, V; Hußlein, S; Hansen, C; Herzog, M A
Enhancing Public Awareness of Air Quality: Evaluating Communication Strategies and Design Prototypes Using a Design-Based Implementation Research Approach Proceedings Article
In: International Conference on Human-Computer Interaction, pp. 3–25, Springer 2025.
@inproceedings{batz2025enhancing,
title = {Enhancing Public Awareness of Air Quality: Evaluating Communication Strategies and Design Prototypes Using a Design-Based Implementation Research Approach},
author = {V Batz and V B Liedtke and P Lameski and V Trajkovik and S Hußlein and C Hansen and M A Herzog},
url = {https://link.springer.com/chapter/10.1007/978-3-031-93221-2_1},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {International Conference on Human-Computer Interaction},
pages = {3–25},
organization = {Springer},
abstract = {Despite the health risks associated with air pollution and the availability of data, public awareness of air quality remains limited. This paper examines communication strategies to enhance awareness in Germany and North Macedonia. Using the Design-Based Implementation Research approach, literature reviews, stakeholder interviews and surveys were conducted with 307 participants to identify knowledge gaps and effective methods.
Three citizen workshops led to the development and evaluation of five design prototypes, including an interactive installation (“Pollution Booth”), a VR application (“Visible Particulate Matter”), and a gamified learning experience (“End Game”). These prototypes addressed information accessibility, data representation, and environmentally conscious behavior. The results show that region-specific communication, interactive designs, and passive information methods in public spaces significantly improve the perception and understanding of air pollution data. Gamified approaches, such as the “End Game” prototype, were particularly effective in educating children about air pollution.
Thirty-two requirements for effective educational strategies were identified, including simplified data visualization, reduced content, and personalized information. The findings highlight the importance of local and personalized approaches, as well as methods for collective knowledge sharing, in promoting awareness and environmentally friendly behavior. Future work should further explore the long-term engagement and sustainable impact of such communication strategies.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Three citizen workshops led to the development and evaluation of five design prototypes, including an interactive installation (“Pollution Booth”), a VR application (“Visible Particulate Matter”), and a gamified learning experience (“End Game”). These prototypes addressed information accessibility, data representation, and environmentally conscious behavior. The results show that region-specific communication, interactive designs, and passive information methods in public spaces significantly improve the perception and understanding of air pollution data. Gamified approaches, such as the “End Game” prototype, were particularly effective in educating children about air pollution.
Thirty-two requirements for effective educational strategies were identified, including simplified data visualization, reduced content, and personalized information. The findings highlight the importance of local and personalized approaches, as well as methods for collective knowledge sharing, in promoting awareness and environmentally friendly behavior. Future work should further explore the long-term engagement and sustainable impact of such communication strategies.
2024

Joeres, F; Zittlau, P; Herbrich, W; Heinrich, F; Rose, G; Hansen, C
Concept development of a cross-reality ecosystem for urban knowledge transfer spaces Proceedings Article
In: 2024 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct), pp. 166–169, 2024, (ISSN: 2771-1110).
@inproceedings{joeres_concept_2024,
title = {Concept development of a cross-reality ecosystem for urban knowledge transfer spaces},
author = {F Joeres and P Zittlau and W Herbrich and F Heinrich and G Rose and C Hansen},
url = {https://ieeexplore.ieee.org/abstract/document/10765174},
doi = {10.1109/ISMAR-Adjunct64951.2024.00043},
year = {2024},
date = {2024-10-01},
urldate = {2024-10-01},
booktitle = {2024 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)},
pages = {166–169},
abstract = {This paper presents the development of a cross-reality (CR) ecosystem designed for an urban knowledge transfer space (KTS) in a post-industrial urban environment. The project is part of a larger initiative aimed at transforming a former industrial river port into a dynamic KTS, facilitating interactions between scientific, commercial, residential, and cultural stakeholders. Our research explores the potential of multimodal mixed reality (XR) technologies to enhance engagement with the content and stakeholders of the KTS. Through a three-phase process, we identified key stakeholders and their target audiences, selected appropriate XR technologies, and developed initial use cases that integrate web applications, mobile augmented reality (AR), and XR head-mounted displays. The preliminary findings indicate that these technologies can effectively cater to diverse user groups, providing different levels of virtuality and interaction. However, challenges remain, particularly in stakeholder engagement and the evolving nature of the KTS initiative. Ongoing work includes the development of a Web-XR-based prototype, which will be iteratively refined to better meet user needs and adapt to future technological advancements. This research contributes to the understanding of how CR technologies can be employed in urban transformation processes, offering insights into the design of flexible and scalable CR ecosystems.},
note = {ISSN: 2771-1110},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Hanke, L; Vradelis, L; Boedecker, C; Griesinger, J; Demare, T; Lindemann, N; Huettl, F; Chheang, V; Saalfeld, P; Wachter, N; Wollstädter, J; Spranz, M; Lang, H; Hansen, C; Huber, T
Immersive virtual reality for interdisciplinary trauma management – initial evaluation of a training tool prototype Journal Article
In: BMC Medical Education, vol. 24, no. 1, pp. 769, 2024, ISSN: 1472-6920.
@article{hanke_immersive_2024,
title = {Immersive virtual reality for interdisciplinary trauma management – initial evaluation of a training tool prototype},
author = {L Hanke and L Vradelis and C Boedecker and J Griesinger and T Demare and N Lindemann and F Huettl and V Chheang and P Saalfeld and N Wachter and J Wollstädter and M Spranz and H Lang and C Hansen and T Huber},
url = {https://doi.org/10.1186/s12909-024-05764-w},
doi = {10.1186/s12909-024-05764-w},
issn = {1472-6920},
year = {2024},
date = {2024-07-01},
urldate = {2024-07-01},
journal = {BMC Medical Education},
volume = {24},
number = {1},
pages = {769},
abstract = {Emergency care of critically ill patients in the trauma room is an integral part of interdisciplinary work in hospitals. Live threatening injuries require swift diagnosis, prioritization, and treatment; thus, different medical specialties need to work together closely for optimal patient care. Training is essential to facilitate smooth performance. This study presents a training tool for familiarization with trauma room algorithms in immersive virtual reality (VR), and a first qualitative assessment.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Chheang, V; Schott, D; Saalfeld, P; Vradelis, L; Huber, T; Huettl, F; Lang, H; Preim, B; Hansen, C
Advanced liver surgery training in collaborative VR environments Journal Article
In: Computers & Graphics, vol. 119, pp. 103879, 2024, ISSN: 0097-8493.
@article{chheang_advanced_2024,
title = {Advanced liver surgery training in collaborative VR environments},
author = {V Chheang and D Schott and P Saalfeld and L Vradelis and T Huber and F Huettl and H Lang and B Preim and C Hansen},
url = {https://www.sciencedirect.com/science/article/pii/S0097849324000050},
doi = {10.1016/j.cag.2024.01.006},
issn = {0097-8493},
year = {2024},
date = {2024-04-01},
urldate = {2024-04-01},
journal = {Computers & Graphics},
volume = {119},
pages = {103879},
abstract = {Virtual surgical training systems are crucial for enabling mental preparation, supporting decision-making, and improving surgical skills. Many virtual surgical training environments focus only on training for a specific medical skill and take place in a single virtual room. However, surgical education and training include the planning of procedures as well as interventions in the operating room context. Moreover, collaboration among surgeons and other medical professionals is only applicable to a limited extent. This work presents a collaborative VR environment similar to a virtual teaching hospital to support surgical training and interprofessional collaboration in a co-located or remote environment. The environment supports photo-realistic avatars and scenarios ranging from planning to training procedures in the virtual operating room. It includes a lobby, a virtual surgical planning room with four surgical planning stations, laparoscopic liver surgery training with the integration of laparoscopic surgical instruments, and medical training scenarios for interprofessional team training in a virtual operating room. Each component was evaluated by domain experts as well as in a series of user studies, providing insights on usability, usefulness, and potential research directions. The proposed environment may serve as a foundation for future medical training simulators.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schott, D; Kunz, M; Mandel, J; Schwenderling, L; Braun-Dullaeus, R; Hansen, C
An AR-Based Multi-User Learning Environment for Anatomy Seminars Proceedings Article
In: 2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 949–950, IEEE, Orlando, FL, USA, 2024, ISBN: 979-8-3503-7449-0.
@inproceedings{schott_ar-based_2024,
title = {An AR-Based Multi-User Learning Environment for Anatomy Seminars},
author = {D Schott and M Kunz and J Mandel and L Schwenderling and R Braun-Dullaeus and C Hansen},
url = {https://ieeexplore.ieee.org/document/10536568/},
doi = {10.1109/VRW62533.2024.00271},
isbn = {979-8-3503-7449-0},
year = {2024},
date = {2024-03-01},
urldate = {2024-03-01},
booktitle = {2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {949–950},
publisher = {IEEE},
address = {Orlando, FL, USA},
abstract = {Understanding the intricate and rapid changes in shape during em-bryonic formation is vital for medical students. Using the example of embryonic human heart development, we introduce an AR-based multi-user approach to enhance understanding and foster a participatory learning environment. Through a user-centered approach, we created a prototype accommodating two player roles and enabling multi-modal inputs to encourage dynamic group discussions. We in-vited four anatomy experts to evaluate three system configurations in an interdisciplinary workshop to assess the feasibility of integration into anatomy seminars. The gathered data and feedback indicate the potential of our collaborative concept for integration into the medical curriculum.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schwenderling, L; Herbrich, W; Joeres, F; Hansen, C
A Novel Framework for Hand Visualization in Web-Based Collaborative XR Proceedings Article
In: 2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 18–23, IEEE, Orlando, FL, USA, 2024, ISBN: 979-8-3503-7449-0.
@inproceedings{schwenderling_novel_2024,
title = {A Novel Framework for Hand Visualization in Web-Based Collaborative XR},
author = {L Schwenderling and W Herbrich and F Joeres and C Hansen},
url = {https://ieeexplore.ieee.org/document/10536317/},
doi = {10.1109/VRW62533.2024.00010},
isbn = {979-8-3503-7449-0},
year = {2024},
date = {2024-03-01},
urldate = {2024-03-01},
booktitle = {2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {18–23},
publisher = {IEEE},
address = {Orlando, FL, USA},
abstract = {Many extended reality (XR) applications are platform-specific, making accessibility and cross-platform collaboration difficult. Web-based collaborative XR can enhance adoption of XR technologies, using the browser as a platform-independent interface. However, challenges arise from the browser environment, such as performance limitations. To this end, we present a WebXR-based framework for hand interaction in cross-platform collaboration in XR. A network structure and methods for collaborative and individual object manipulation complement the integrated hand tracking. Three different fidelity levels to represent the hands of remote users were implemented to accommodate different performance capabilities. Concepts ranged from virtual hands to discrete poses with abstract objects. A sample application was implemented with a puzzle task. Two users collaborated in the browsers of the Microsoft HoloLens 2 and the Meta Quest 2. Qualitative and quantitative data on user performance (n=9), and frame rate recordings (n=1) were collected. All users were able to solve the puzzle together quickly and intu-itively. The Quest environment was preferred, as there were more performance issues with the HoloLens. Hand interaction was well-received and proved to be sufficient as the only form of communication. Simpler representations of the hands lead to a higher frame rate, whereby the effects were device-dependent. The impact on task performance was low. Hand interaction enables an intuitive exchange of objects and basic communication in cross-platform collaboration via browsers. Depending on the XR environment, however, device-specific performance limitations must be taken into account by modulating the amount of data and rendering effort.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schott, D; Kunz, M; Heinrich, F; Mandel, J; Albrecht, A; Braun-Dullaeus, R; Hansen, C
Stand Alone or Stay Together: An In-situ Experiment of Mixed-Reality Applications in Embryonic Anatomy Education Proceedings Article
In: Proceedings of the 30th ACM Symposium on Virtual Reality Software and Technology, pp. 1–11, Association for Computing Machinery, New York, NY, USA, 2024, ISBN: 979-8-4007-0535-9.
@inproceedings{schott_stand_2024,
title = {Stand Alone or Stay Together: An In-situ Experiment of Mixed-Reality Applications in Embryonic Anatomy Education},
author = {D Schott and M Kunz and F Heinrich and J Mandel and A Albrecht and R Braun-Dullaeus and C Hansen},
url = {https://dl.acm.org/doi/10.1145/3641825.3687706},
doi = {10.1145/3641825.3687706},
isbn = {979-8-4007-0535-9},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {Proceedings of the 30th ACM Symposium on Virtual Reality Software and Technology},
pages = {1–11},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {VRST '24},
abstract = {Where traditional media and methods reach their limits in anatomy education, mixed-reality (MR) environments can provide effective learning support because of their high interactivity and spatial visualization capabilities. However, the underlying design and pedagogical requirements are as diverse as the technologies themselves. This paper examines the effectiveness of individual- and collaborative learning environments for anatomy education, using embryonic heart development as an example. Both applications deliver the same content using identical visualizations and hardware but differ in interactivity and pedagogical approach. The environments were evaluated in a user study with medical students (n = 90) during their examination phase, assessing usability, user experience, social interaction/co-presence, cognitive load, and personal preference. Additionally, we conducted a knowledge test before and after an MR learning session to determine educational effects compared to a conventional anatomy seminar. Results indicate that the individual learning environment was generally preferred. However, no significant difference in learning effectiveness could be shown between the conventional approach and the MR applications. This suggests that both can effectively complement traditional seminars despite their different natures. Our study contributes to understanding how different MR settings could be tailored for anatomical education.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schott, D; Heinrich, F; Kunz, M; Mandel, J; Albrecht, A; Braun-Dullaeus, R; Hansen, C
CardioCoLab: Collaborative Learning of Embryonic Heart Anatomy in Mixed Reality Journal Article
In: Eurographics Workshop on Visual Computing for Biology and Medicine, 2024, (Artwork Size: 5 pages Edition: 1191 ISBN: 9783038682448 Publisher: The Eurographics Association).
@article{schott_cardiocolab_2024,
title = {CardioCoLab: Collaborative Learning of Embryonic Heart Anatomy in Mixed Reality},
author = {D Schott and F Heinrich and M Kunz and J Mandel and A Albrecht and R Braun-Dullaeus and C Hansen},
url = {https://diglib.eg.org/handle/10.2312/vcbm20241191},
doi = {10.2312/VCBM.20241191},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {Eurographics Workshop on Visual Computing for Biology and Medicine},
abstract = {The complexity of embryonic heart development presents significant challenges for medical education, particularly in illustrating dynamic morphological changes over short time periods. Traditional teaching methods, such as 2D textbook illustrations and static models, are often insufficient for conveying these intricate processes. To address this gap, we developed a multi-user Mixed Reality (MR) system designed to enhance collaborative learning and interaction with virtual heart models. Building on previous research, we identified the needs of both students and teachers, implementing various interaction and visualization features iteratively. An evaluation with teachers and students (N = 12) demonstrated the system's effectiveness in improving engagement and understanding of embryonic heart development. The study highlights the potential of MR in medical seminar settings as a valuable addition to medical education by enhancing traditional learning methods.},
note = {Artwork Size: 5 pages
Edition: 1191
ISBN: 9783038682448
Publisher: The Eurographics Association},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Wagnerberger, D; Schott, D; Schwenderling, L; Hansen, C; Schumacher, D
Empowering Patients: Improve Gender-Sensitive Medical Knowledge Through Interactive Edutainment Proceedings Article
In: Proceedings of the 13th Nordic Conference on Human-Computer Interaction, pp. 1–12, Association for Computing Machinery, New York, NY, USA, 2024, ISBN: 979-8-4007-0966-1.
@inproceedings{wagnerberger_empowering_2024,
title = {Empowering Patients: Improve Gender-Sensitive Medical Knowledge Through Interactive Edutainment},
author = {D Wagnerberger and D Schott and L Schwenderling and C Hansen and D Schumacher},
url = {https://dl.acm.org/doi/10.1145/3679318.3685500},
doi = {10.1145/3679318.3685500},
isbn = {979-8-4007-0966-1},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {Proceedings of the 13th Nordic Conference on Human-Computer Interaction},
pages = {1–12},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {NordiCHI '24},
abstract = {Disregarding crucial gender-specific differences and potential risks in medicine leads to widespread gender inequalities. This paper introduces interactive edutainment concepts developed through a user-centered design approach to raise awareness of gender medicine. An interactive exhibition course and an accompanying deck of cards provide an engaging and sensitizing experience of medical gender inequalities. Qualitative feedback, self-assessment, and user experience and behavior were evaluated during a public display of the concepts (n=14). The results highlight the potential of our playful approach to raising awareness among the public as well as health-related professionals, paving new ways for communication and empowerment of patients of all genders. We believe these insights have broader applicability across various domains, supporting efforts to address all forms of inequality.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Polenz, L; Joeres, F; Hansen, C; Heinrich, F
Simulating projective Augmented Reality Visualizations in Virtual Reality: Is VR a feasible Environment for medical AR Evaluations? Proceedings Article
In: Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, Association for Computing Machinery, New York, NY, USA, 2024, ISBN: 979-8-4007-0331-7.
@inproceedings{polenz_simulating_2024,
title = {Simulating projective Augmented Reality Visualizations in Virtual Reality: Is VR a feasible Environment for medical AR Evaluations?},
author = {L Polenz and F Joeres and C Hansen and F Heinrich},
url = {https://doi.org/10.1145/3613905.3650843},
doi = {10.1145/3613905.3650843},
isbn = {979-8-4007-0331-7},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI EA '24},
abstract = {Augmented Reality (AR) has demonstrated potential in medical applications, such as enhancing surgical navigation. However, evaluating medical AR visualizations entails high costs and effort to provide suitable hardware solutions. This is particularly crucial in projective AR, as these systems require several error-prone calibration and registration steps. This work investigates the suitability of Virtual Reality (VR) as a cost-effective and controlled study environment for evaluating projective AR visualizations. A virtual twin of a real laboratory environment was created, and a user study comparing two needle navigation visualizations was conducted. The study simulated identical experiments in both AR and VR to assess if similar results would emerge. Our findings indicate that both AR and VR experiments exhibited comparable effects in terms of performance and workload of both needle insertion visualizations. This study serves as a preliminary step in demonstrating the feasibility of using VR as an evaluation environment for projective AR visualizations.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Allgaier, M; Huettl, F; Hanke, L; Huber, T; Preim, B; Saalfeld, S; Hansen, C
Gamification Concepts for a VR-based Visuospatial Training for Intraoperative Liver Ultrasound Proceedings Article
In: Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, Association for Computing Machinery, New York, NY, USA, 2024, ISBN: 979-8-4007-0331-7.
@inproceedings{allgaier_gamification_2024,
title = {Gamification Concepts for a VR-based Visuospatial Training for Intraoperative Liver Ultrasound},
author = {M Allgaier and F Huettl and L Hanke and T Huber and B Preim and S Saalfeld and C Hansen},
url = {https://doi.org/10.1145/3613905.3650736},
doi = {10.1145/3613905.3650736},
isbn = {979-8-4007-0331-7},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI EA '24},
abstract = {Gamification is widely used due to its positive influence on learning by adding emotions and steering behavior. In medical VR training applications, the use of gamification is rare, and when it is implemented, it often lacks thoughtful design decisions and empirical evaluation. Using a VR-based training for intraoperative ultrasound for liver surgery, we analyzed game elements regarding their suitability and examined two in more detail: difficulty levels and a kit, where the user has to assemble a virtual liver using US. In a broad audience study, levels achieved significantly better results regarding enjoyment. Qualitative feedback from medical students directly comparing the elements revealed that they prefer the kit as well as levels for training. Our studies indicate that levels and the more interactive kit improve the learning experience, which could also be taken as a basis for similar VR-based medical training applications.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schröer, S; Alpers, J; Gutberlet, M; Brüsch, I; Rumpel, R; Wacker, F; Hensen, B; Hansen, C
A probabilistic thermal dose model for the estimation of necrosis in MR-guided tumor ablations Journal Article
In: Medical Physics, vol. 51, no. 1, pp. 239–250, 2024, ISSN: 2473-4209, (_eprint: https://aapm.onlinelibrary.wiley.com/doi/pdf/10.1002/mp.16605).
@article{schroer_probabilistic_2024,
title = {A probabilistic thermal dose model for the estimation of necrosis in MR-guided tumor ablations},
author = {S Schröer and J Alpers and M Gutberlet and I Brüsch and R Rumpel and F Wacker and B Hensen and C Hansen},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/mp.16605},
doi = {10.1002/mp.16605},
issn = {2473-4209},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {Medical Physics},
volume = {51},
number = {1},
pages = {239–250},
abstract = {Background Monitoring minimally invasive thermo ablation procedures using magnetic resonance (MR) thermometry allows therapy of tumors even close to critical anatomical structures. Unfortunately, intraoperative monitoring remains challenging due to the necessary accuracy and real-time capability. One reason for this is the statistical error introduced by MR measurement, which causes the prediction of ablation zones to become inaccurate. Purpose In this work, we derive a probabilistic model for the prediction of ablation zones during thermal ablation procedures based on the thermal damage model CEM43. By integrating the statistical error caused by MR measurement into the conventional prediction, we hope to reduce the amount of falsely classified voxels. Methods The probabilistic CEM43 model is empirically evaluated using a polyacrilamide gel phantom and three in-vivo pig livers. Results The results show a higher accuracy in three out of four data sets, with a relative difference in Sørensen–Dice coefficient from textbackslash-3.04%textbackslash to 3.97% compared to the conventional model. Furthermore, the ablation zones predicted by the probabilistic model show a false positive rate with a relative decrease of 11.89%–30.04% compared to the conventional model. Conclusion The presented probabilistic thermal dose model might help to prevent false classification of voxels within ablation zones. This could potentially result in an increased success rate for MR-guided thermal ablation procedures. Future work may address additional error sources and a follow-up study in a more realistic clinical context.},
note = {_eprint: https://aapm.onlinelibrary.wiley.com/doi/pdf/10.1002/mp.16605},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Bashkanov, O; Rak, M; Engelage, L; Hansen, C
Automatic Patient-level Diagnosis of Prostate Disease with Fused 3D MRI and Tabular Clinical Data Proceedings Article
In: Medical Imaging with Deep Learning, pp. 1225–1238, PMLR, 2024, (ISSN: 2640-3498).
@inproceedings{bashkanov_automatic_2024,
title = {Automatic Patient-level Diagnosis of Prostate Disease with Fused 3D MRI and Tabular Clinical Data},
author = {O Bashkanov and M Rak and L Engelage and C Hansen},
url = {https://proceedings.mlr.press/v227/bashkanov24a.html},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {Medical Imaging with Deep Learning},
pages = {1225–1238},
publisher = {PMLR},
abstract = {Computer-aided diagnosis systems for automatic prostate cancer diagnosis can provide radiologists with decision support during image reading. However, in this case, patient-relevant information often remains unexploited due to the greater focus on the image recognition side, with various imaging devices and modalities, while omitting other potentially valuable clinical data. Therefore, our work investigates the performance of recent methods for the fusion of rich image data and heterogeneous tabular data. Those data may include patient demographics as well as laboratory data, e.g., prostate-specific antigen (PSA). Experiments on the large dataset (3800 subjects) indicated that when using the fusion method with demographic data in clinically significant prostate cancer (csPCa) detection tasks, the mean area under the receiver operating characteristic curve (ROC AUC) has improved significantly from 0.736 to 0.765. We also observed that the naïve concatenation performs similarly or even better than the textbackslashmboxstate-of-the-art fusion modules. We also achieved better prediction quality in grading prostate disease by including more samples from longitudinal PSA profiles in the tabular feature set. Thus, by including the three last PSA samples per patient, the best-performing model has reached AUC of 0.794 and a quadratic weighted kappa score (QWK) of 0.464, which constitutes a significant improvement compared with the image-only method, with ROC AUC of 0.736 and QWK of 0.342.},
note = {ISSN: 2640-3498},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2023

Heinrich, F; Bornemann, K; Polenz, L; Lawonn, K; Hansen, C
Clutch & Grasp: Activation gestures and grip styles for device-based interaction in medical spatial augmented reality Journal Article
In: International Journal of Human-Computer Studies, vol. 180, pp. 103117, 2023, ISSN: 1071-5819.
@article{heinrich_clutch_2023,
title = {Clutch & Grasp: Activation gestures and grip styles for device-based interaction in medical spatial augmented reality},
author = {F Heinrich and K Bornemann and L Polenz and K Lawonn and C Hansen},
url = {https://www.sciencedirect.com/science/article/pii/S107158192300126X},
doi = {10.1016/j.ijhcs.2023.103117},
issn = {1071-5819},
year = {2023},
date = {2023-12-01},
urldate = {2023-12-01},
journal = {International Journal of Human-Computer Studies},
volume = {180},
pages = {103117},
abstract = {Presenting medical volume data using augmented reality (AR) can facilitate the identification of anatomical structures, the perception of their spatial relations and the development of mental maps compared to more commonly used monitors. However, interaction methods explored in these conventional settings may not be applicable in AR environments, or perform differently. In terms of mode activation, gestural interaction was shown to be a viable, touchless alternative to traditional input devices, which is desirable in sterile medical use cases. Therefore, we present a user study (n = 21) comparing hand and foot gestures with voice commands for the activation of interaction modes within a projector-based, spatial AR prototype to visualize medical volume data. Interaction itself was performed via hand movements captured by a data glove. Consistent, statistically significant results across measured variables suggest advantages of voice commands. In addition, a second experiment (n = 17) compared the hand-based interaction with two motion-sensitive devices held in power and in precision grip respectively. All modes were activated using voice commands. No considerable differences between tested grip styles could be determined. The findings suggest that the choice of preferable interaction devices is user and use case dependent.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schott, D; Heinrich, F; Stallmeister, L; Moritz, J; Hensen, B; Hansen, C
Is this the vReal Life? Manipulating Visual Fidelity of Immersive Environments for Medical Task Simulation Proceedings Article
In: 2023 IEEE International Symposium on Mixed and Augmented Reality (ISMAR), pp. 1171–1180, IEEE, Sydney, Australia, 2023, ISBN: 979-8-3503-2838-7.
@inproceedings{schott_is_2023,
title = {Is this the vReal Life? Manipulating Visual Fidelity of Immersive Environments for Medical Task Simulation},
author = {D Schott and F Heinrich and L Stallmeister and J Moritz and B Hensen and C Hansen},
url = {https://ieeexplore.ieee.org/document/10316533/},
doi = {10.1109/ISMAR59233.2023.00134},
isbn = {979-8-3503-2838-7},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},
pages = {1171–1180},
publisher = {IEEE},
address = {Sydney, Australia},
abstract = {Recent developments and research advances contribute to an ever-increasing trend towards quality levels close to what we experience in reality. In this work, we investigate how different degrees of these quality characteristics affect user performance, qualia of user experience (UX), and sense of presence in an example medical task. To this end, a two-way within-subjects design user study was conducted, in which three different levels of visual fidelity were compared. In addition, two different interaction modalities were considered: (1) the use of conventional VR controllers and (2) natural hand interaction using 3D-printed, spatially-registered replicas of medical devices, to interact with their virtual representations. Consistent results indicate that higher degrees of visual fidelity evoke a higher sense of presence and UX. However, user performance was less affected. Moreover, no differences were detected between both interaction modalities for the examined task. Future work should investigate the discovered interaction effects between quality levels and interaction modalities in more detail and examine whether these results can be reproduced in tasks that require more precision. This work provides insights into the implications to consider when studying interactions in VR and paves the way for investigations into early phases of medical product development and workflow analysis.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Mielke, T; Joeres, F; Schott, D; Hansen, C
Interactive Registration Methods for Augmented Reality in Robotics: A Comparative Evaluation Proceedings Article
In: 2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct), pp. 501–506, IEEE, Sydney, Australia, 2023, ISBN: 979-8-3503-2891-2.
@inproceedings{mielke_interactive_2023,
title = {Interactive Registration Methods for Augmented Reality in Robotics: A Comparative Evaluation},
author = {T Mielke and F Joeres and D Schott and C Hansen},
url = {https://ieeexplore.ieee.org/document/10322246/},
doi = {10.1109/ISMAR-Adjunct60411.2023.00109},
isbn = {979-8-3503-2891-2},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)},
pages = {501–506},
publisher = {IEEE},
address = {Sydney, Australia},
abstract = {Augmented Reality (AR) visualization has shown potential for supporting intuitive and efficient human-robot interaction in a range of tasks. Since all these tasks are spatially related to the robot, the precise positioning of the AR content is critical to the applicability. However, most research has primarily focused on developing visualizations rather than exploring methods for aligning AR content in the robotic workspace. This paper aims to bridge this gap by implementing and comparing different interactive registration methods, including two point-based and one manual approach. We comparatively evaluated these registration methods in a user study (n=21), measuring registration accuracy, duration, and subjective user feedback. Our results indicate that the point-based methods outperform the manual approach in terms of both accuracy and perceived workload. Furthermore, participants achieved significantly faster performance with a point-based approach using physically defined registration points compared to a point-based approach using markers attached to the robot.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Allgaier, M; Huettl, F; Hanke, L; Lang, H; Huber, T; Preim, B; Saalfeld, S; Hansen, C
LiVRSono - Virtual Reality Training with Haptics for Intraoperative Ultrasound Proceedings Article
In: 2023 IEEE International Symposium on Mixed and Augmented Reality (ISMAR), pp. 980–989, IEEE, Sydney, Australia, 2023, ISBN: 979-8-3503-2838-7.
@inproceedings{allgaier_livrsono_2023,
title = {LiVRSono - Virtual Reality Training with Haptics for Intraoperative Ultrasound},
author = {M Allgaier and F Huettl and L Hanke and H Lang and T Huber and B Preim and S Saalfeld and C Hansen},
url = {https://ieeexplore.ieee.org/document/10316488/},
doi = {10.1109/ISMAR59233.2023.00114},
isbn = {979-8-3503-2838-7},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},
pages = {980–989},
publisher = {IEEE},
address = {Sydney, Australia},
abstract = {One of the biggest challenges in using ultrasound (US) is learning to create a spatial mental model of the interior of the scanned object based on the US image and the probe position. As intraoperative ultrasound (IOUS) cannot be easily trained on patients, we present LiVRSono, an immersive VR application to train this skill. The immersive environment, including an US simulation with patientspecific data as well as haptics to support hand-eye coordination, provides a realistic setting. Four clinically relevant training scenarios were identified based on the described learning goal and the workflow of IOUS for liver. The realism of the setting and the training scenarios were evaluated with eleven physicians, of which six participants are experts in IOUS for liver and five participants are potential users of the training system. The setting, handling of the US probe, and US image were considered realistic enough for the learning goal. Regarding the haptic feedback, a limitation is the restricted workspace of the input device. Three of the four training scenarios were rated as meaningful and effective. A pilot study regarding learning outcome shows positive results, especially with respect to confidence and perceived competence. Besides the drawbacks of the input device, our training system provides a realistic learning environment with meaningful scenarios to train the creation of a mental 3D model when performing IOUS. We also identified important improvements to the training scenarios to further enhance the training experience.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schott, D; Moritz, J; Hansen, C; Joeres, F
The UUXR-Framework: A Draft Classification for Using Extended Reality in Usability and User Experience Research Proceedings Article
In: 2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct), pp. 460–465, IEEE, Sydney, Australia, 2023, ISBN: 979-8-3503-2891-2.
@inproceedings{schott_uuxr-framework_2023,
title = {The UUXR-Framework: A Draft Classification for Using Extended Reality in Usability and User Experience Research},
author = {D Schott and J Moritz and C Hansen and F Joeres},
url = {https://ieeexplore.ieee.org/document/10322234/},
doi = {10.1109/ISMAR-Adjunct60411.2023.00100},
isbn = {979-8-3503-2891-2},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)},
pages = {460–465},
publisher = {IEEE},
address = {Sydney, Australia},
abstract = {Conducting human-centered evaluations in extended reality (XR) environments is a growing trend in user research and usability engineering. However, there has been little to no systematic investigation of the emerging methods in this field published to date. The motivation behind our work is to explore and classify strategies and methods for utilizing XR technologies in the context of usability and user experience (UUX) activities. This paper proposes a draft classification framework for the use of XR technologies in UUX activities, combining an informal exploration of relevant literature with established UUX methods. Within this framework, we propose 12 dimensions that we consider potentially relevant for determining whether and how the use of XR technologies can benefit product development and user research. To evaluate the structure and phrasing of our proposed dimensions, we conducted an initial evaluation with UUX professionals (N = 11). We believe that our dimensions form an early-stage foundation for future guidelines aimed at UUX researchers. The framework serves as a tool for assessing different levels of virtualization in UUX work and facilitating knowledge transfer between academia and industry.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Bashkanov, O; Rak, M; Meyer, A; Engelage, L; Lumiani, A; Muschter, R; Hansen, C
Automatic detection of prostate cancer grades and chronic prostatitis in biparametric MRI Journal Article
In: Computer Methods and Programs in Biomedicine, vol. 239, pp. 107624, 2023, ISSN: 0169-2607.
@article{bashkanov_automatic_2023,
title = {Automatic detection of prostate cancer grades and chronic prostatitis in biparametric MRI},
author = {O Bashkanov and M Rak and A Meyer and L Engelage and A Lumiani and R Muschter and C Hansen},
url = {https://www.sciencedirect.com/science/article/pii/S0169260723002894},
doi = {10.1016/j.cmpb.2023.107624},
issn = {0169-2607},
year = {2023},
date = {2023-09-01},
urldate = {2023-09-01},
journal = {Computer Methods and Programs in Biomedicine},
volume = {239},
pages = {107624},
abstract = {Background and objective:With emerging evidence to improve prostate cancer (PCa) screening, multiparametric magnetic prostate imaging is becoming an essential noninvasive component of the diagnostic routine. Computer-aided diagnostic (CAD) tools powered by deep learning can help radiologists interpret multiple volumetric images. In this work, our objective was to examine promising methods recently proposed in the multigrade prostate cancer detection task and to suggest practical considerations regarding model training in this context. Methods:We collected 1647 fine-grained biopsy-confirmed findings, including Gleason scores and prostatitis, to form a training dataset. In our experimental framework for lesion detection, all models utilized 3D nnU-Net architecture that accounts for anisotropy in the MRI data. First, we explore an optimal range of b-values for diffusion-weighted imaging (DWI) modality and its effect on the detection of clinically significant prostate cancer (csPCa) and prostatitis using deep learning, as the optimal range is not yet clearly defined in this domain. Next, we propose a simulated multimodal shift as a data augmentation technique to compensate for the multimodal shift present in the data. Third, we study the effect of incorporating the prostatitis class alongside cancer-related findings at three different granularities of the prostate cancer class (coarse, medium, and fine) and its impact on the detection rate of the target csPCa. Furthermore, ordinal and one-hot encoded (OHE) output formulations were tested. Results: An optimal model configuration with fine class granularity (prostatitis included) and OHE has scored the lesion-wise partial Free-Response Receiver Operating Characteristic (FROC) area under the curve (AUC) of 1.94 (CI 95%: 1.76–2.11) and patient-wise ROC AUC of 0.874 (CI 95%: 0.793–0.938) in the detection of csPCa. Inclusion of the auxiliary prostatitis class has demonstrated a stable relative improvement in specificity at a false positive rate (FPR) of 1.0 per patient, with an increase of 3%, 7%, and 4% for coarse, medium, and fine class granularities. Conclusions: This paper examines several configurations for model training in the biparametric MRI setup and proposes optimal value ranges. It also shows that the fine-grained class configuration, including prostatitis, is beneficial for detecting csPCa. The ability to detect prostatitis in all low-risk cancer lesions suggests the potential to improve the quality of the early diagnosis of prostate diseases. It also implies an improved interpretability of the results by the radiologist.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Huettl, F; Heinrich, F; Boedecker, C; Vradelis, L; Ludt, A; Kneist, W; Lang, H; Hansen, C; Huber, T
In: Journal of the American College of Surgeons, vol. 237, no. 2, pp. 292, 2023, ISSN: 1879-1190.
@article{huettl_real-time_2023,
title = {Real-Time Augmented Reality Annotation for Surgical Education during Laparoscopic Surgery: Results from a Single-Center Randomized Controlled Trial and Future Aspects},
author = {F Huettl and F Heinrich and C Boedecker and L Vradelis and A Ludt and W Kneist and H Lang and C Hansen and T Huber},
url = {https://journals.lww.com/journalacs/abstract/2023/08000/real_time_augmented_reality_annotation_for.20.aspx},
doi = {10.1097/XCS.0000000000000712},
issn = {1879-1190},
year = {2023},
date = {2023-08-01},
urldate = {2023-08-01},
journal = {Journal of the American College of Surgeons},
volume = {237},
number = {2},
pages = {292},
abstract = {Background: We developed an interactive augmented reality tool (HoloPointer) that enables real-time annotation on a laparoscopy monitor for intraoperative guidance. This application operates exclusively via verbal commands and head movements to ensure a sterile workflow. Study design: Purpose of this randomized controlled clinical trial was to evaluate the integration of this new technology into the operating room. This prospective single-center study included 32 elective laparoscopic cholecystectomies (29 surgical teams, 15 trainees, 13 trainers). Primary objectives and assessment measures was the HoloPointer's influence on surgical performance (subjective assessment, global operative assessment of laparoscopic skills - GOALS, and Critical View of Safety -CVS). Secondary objectives and outcome variables were its influence on operation time, quality of assistance (5 point likert-scale), and user-friendliness (System Usability Scale - SUS, 0-100 points). Results: Gestural corrections were reduced by 59.4% (4.6 SD 8.1 vs. 1.9 SD 4.7; p > 0.05) and verbal corrections by 36.1% (17.8 SD 12.9 vs. 11.4 SD 8.1; p > 0.05). Subjective surgical performance could be improved by 84,6% of participants. No statistically significant differences were observed for objective parameters GOALS, CVS and operation time. In the SUS, the application achieved an average score of 72.5 SD 16.3 (good user-friendliness). Of the participants, 69.2% wanted to use the HoloPointer more frequently. Conclusion: The majority of trainees had improved their surgical performance using the HoloPointer in elective laparoscopic cholecystectomies, and the rate of classic but potentially misleading corrections was noticeably reduced. The HoloPointer has the potential to improve education in minimally invasive surgery.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Gulamhussene, G; Rak, M; Bashkanov, O; Joeres, F; Omari, J; Pech, M; Hansen, C
Transfer-learning is a key ingredient to fast deep learning-based 4D liver MRI reconstruction Journal Article
In: Scientific Reports, vol. 13, no. 1, pp. 11227, 2023, ISSN: 2045-2322, (Publisher: Nature Publishing Group).
@article{gulamhussene_transfer-learning_2023,
title = {Transfer-learning is a key ingredient to fast deep learning-based 4D liver MRI reconstruction},
author = {G Gulamhussene and M Rak and O Bashkanov and F Joeres and J Omari and M Pech and C Hansen},
url = {https://www.nature.com/articles/s41598-023-38073-1},
doi = {10.1038/s41598-023-38073-1},
issn = {2045-2322},
year = {2023},
date = {2023-07-01},
urldate = {2023-07-01},
journal = {Scientific Reports},
volume = {13},
number = {1},
pages = {11227},
abstract = {Time-resolved volumetric magnetic resonance imaging (4D MRI) could be used to address organ motion in image-guided interventions like tumor ablation. Current 4D reconstruction techniques are unsuitable for most interventional settings because they are limited to specific breathing phases, lack temporal/spatial resolution, and have long prior acquisitions or reconstruction times. Deep learning-based (DL) 4D MRI approaches promise to overcome these shortcomings but are sensitive to domain shift. This work shows that transfer learning (TL) combined with an ensembling strategy can help alleviate this key challenge. We evaluate four approaches: pre-trained models from the source domain, models directly trained from scratch on target domain data, models fine-tuned from a pre-trained model and an ensemble of fine-tuned models. For that the data base was split into 16 source and 4 target domain subjects. Comparing ensemble of fine-tuned models (N = 10) with directly learned models, we report significant improvements (P < 0.001) of the root mean squared error (RMSE) of up to 12% and the mean displacement (MDISP) of up to 17.5%. The smaller the target domain data amount, the larger the effect. This shows that TL + Ens significantly reduces beforehand acquisition time and improves reconstruction quality, rendering it a key component in making 4D MRI clinically feasible for the first time in the context of 4D organ motion models of the liver and beyond.},
note = {Publisher: Nature Publishing Group},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schwenderling, L; Kleinau, A; Herbrich, W; Kasireddy, H; Heinrich, F; Hansen, C
Activation modes for gesture-based interaction with a magic lens in AR anatomy visualisation Journal Article
In: Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization, vol. 11, no. 4, pp. 1243–1250, 2023, ISSN: 2168-1163, (Publisher: Taylor & Francis).
@article{schwenderling_activation_2023,
title = {Activation modes for gesture-based interaction with a magic lens in AR anatomy visualisation},
author = {L Schwenderling and A Kleinau and W Herbrich and H Kasireddy and F Heinrich and C Hansen},
url = {https://doi.org/10.1080/21681163.2022.2157749},
doi = {10.1080/21681163.2022.2157749},
issn = {2168-1163},
year = {2023},
date = {2023-07-01},
urldate = {2023-07-01},
journal = {Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization},
volume = {11},
number = {4},
pages = {1243–1250},
abstract = {Learning human anatomy is key for health-related education and often requires expensive and time-consuming cadaver dissection courses. Augmented reality (AR) for the representation of spatially registered 3D models can be used as a low-cost and flexible alternative. However, suitable visualisation and interaction approaches are needed to display multilayered anatomy data. This paper features a spherical volumetric AR Magic Lens controlled by mid-air hand gestures to explore the human anatomy on a phantom. Defining how gestures control associated actions is important for intuitive interaction. Therefore, two gesture activation modes were investigated in a user study (n = 24). Performing the gestures once to toggle actions showed a higher interaction count since an additional stop gesture was used. Holding the gestures was favoured in the qualitative feedback. Both modes showed similar performance in terms of accuracy and task completion time. Overall, direct gesture manipulation of a magic lens for anatomy visualisation is, thus, recommended.},
note = {Publisher: Taylor & Francis},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schott, D; Kunz, M; Wunderling, T; Heinrich, F; Braun-Dullaeus, R; Hansen, C
CardioGenesis4D: Interactive Morphological Transitions of Embryonic Heart Development in a Virtual Learning Environment Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 29, no. 5, pp. 2615–2625, 2023, ISSN: 1941-0506.
@article{schott_cardiogenesis4d_2023,
title = {CardioGenesis4D: Interactive Morphological Transitions of Embryonic Heart Development in a Virtual Learning Environment},
author = {D Schott and M Kunz and T Wunderling and F Heinrich and R Braun-Dullaeus and C Hansen},
url = {https://ieeexplore.ieee.org/document/10049681},
doi = {10.1109/TVCG.2023.3247110},
issn = {1941-0506},
year = {2023},
date = {2023-05-01},
urldate = {2023-05-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {29},
number = {5},
pages = {2615–2625},
abstract = {In the embryonic human heart, complex dynamic shape changes take place in a short period of time on a microscopic scale, making this development difficult to visualize. However, spatial understanding of these processes is essential for students and future cardiologists to properly diagnose and treat congenital heart defects. Following a user centered approach, the most crucial embryological stages were identified and translated into a virtual reality learning environment (VRLE) to enable the understanding of the morphological transitions of these stages through advanced interactions. To address individual learning types, we implemented different features and evaluated the application regarding usability, perceived task load, and sense of presence in a user study. We also assessed spatial awareness and knowledge gain, and finally obtained feedback from domain experts. Overall, students and professionals rated the application positively. To minimize distraction from interactive learning content, such VRLEs should consider features for different learning types, allow for gradual habituation, and at the same time provide enough playful stimuli. Our work previews how VR can be integrated into a cardiac embryology education curriculum.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Chheang, V; Bruggernann, R; Preim, B; Hansen, C
Virtual Resection Planning using Bezier Surface Interactions in Collaborative VR Environments Proceedings Article
In: 2023 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 166–169, 2023.
@inproceedings{chheang_virtual_2023,
title = {Virtual Resection Planning using Bezier Surface Interactions in Collaborative VR Environments},
author = {V Chheang and R Bruggernann and B Preim and C Hansen},
url = {https://ieeexplore.ieee.org/document/10108900},
doi = {10.1109/VRW58643.2023.00041},
year = {2023},
date = {2023-03-01},
urldate = {2023-03-01},
booktitle = {2023 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {166–169},
abstract = {The use of virtual reality (VR) has been proposed for collaborative planning or training scenarios to aid surgeons in preparing surgical interventions. In this paper, we explore the usage of cubic Bezier surfaces and their respective interactions in collaborative VR environments to define atypical resection surfaces for planning abdominal organ tumor resections. We conducted a pilot study (n = 10) to evaluate the usability of the Bezier surface interaction compared to a free deformation approach using the example of liver surgery planning. Moreover, we showed the prototype to an expert and conducted an interview to collect clinical feedback. The results show potential benefits of both interaction techniques that could be essential for virtual resection planning and training. The expert highlighted that combining both techniques could speed up the workflow and provide various options for planning.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Sühn, T; Esmaeili, N; Mattepu, S; Spiller, M; Boese, A; Urrutia, R; Poblete, V; Hansen, C; Lohmann, C; Illanes, A; Friebe, M
Vibro-Acoustic Sensing of Instrument Interactions as a Potential Source of Texture-Related Information in Robotic Palpation Journal Article
In: Sensors, vol. 23, no. 6, pp. 3141, 2023, ISSN: 1424-8220, (Publisher: Multidisciplinary Digital Publishing Institute).
@article{suhn_vibro-acoustic_2023,
title = {Vibro-Acoustic Sensing of Instrument Interactions as a Potential Source of Texture-Related Information in Robotic Palpation},
author = {T Sühn and N Esmaeili and S Mattepu and M Spiller and A Boese and R Urrutia and V Poblete and C Hansen and C Lohmann and A Illanes and M Friebe},
url = {https://www.mdpi.com/1424-8220/23/6/3141},
doi = {10.3390/s23063141},
issn = {1424-8220},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {Sensors},
volume = {23},
number = {6},
pages = {3141},
abstract = {The direct tactile assessment of surface textures during palpation is an essential component of open surgery that is impeded in minimally invasive and robot-assisted surgery. When indirectly palpating with a surgical instrument, the structural vibrations from this interaction contain tactile information that can be extracted and analysed. This study investigates the influence of the parameters contact angle α and velocity v→ on the vibro-acoustic signals from this indirect palpation. A 7-DOF robotic arm, a standard surgical instrument, and a vibration measurement system were used to palpate three different materials with varying α and v→. The signals were processed based on continuous wavelet transformation. They showed material-specific signatures in the time–frequency domain that retained their general characteristic for varying α and v→. Energy-related and statistical features were extracted, and supervised classification was performed, where the testing data comprised only signals acquired with different palpation parameters than for training data. The classifiers support vector machine and k-nearest neighbours provided 99.67% and 96.00% accuracy for the differentiation of the materials. The results indicate the robustness of the features against variations in the palpation parameters. This is a prerequisite for an application in minimally invasive surgery but needs to be confirmed in realistic experiments with biological tissues.},
note = {Publisher: Multidisciplinary Digital Publishing Institute},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Gulamhussene, G; Das, A; Spiegel, J; Punzet, D; Rak, M; Hansen, C
Needle Tip Tracking During CT-guided Interventions using Fuzzy Segmentation Proceedings Article
In: Deserno, T; Handels, H; Maier, A; Maier-Hein, K; Palm, C; Tolxdorff, T (Ed.): Bildverarbeitung für die Medizin 2023, pp. 285–291, Springer Fachmedien, Wiesbaden, 2023, ISBN: 978-3-658-41657-7.
@inproceedings{gulamhussene_needle_2023,
title = {Needle Tip Tracking During CT-guided Interventions using Fuzzy Segmentation},
author = {G Gulamhussene and A Das and J Spiegel and D Punzet and M Rak and C Hansen},
editor = {T Deserno and H Handels and A Maier and K Maier-Hein and C Palm and T Tolxdorff},
doi = {10.1007/978-3-658-41657-7_62},
isbn = {978-3-658-41657-7},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
booktitle = {Bildverarbeitung für die Medizin 2023},
pages = {285–291},
publisher = {Springer Fachmedien},
address = {Wiesbaden},
abstract = {CT-guided interventions are standard practice for radiologists to treat lesions in various parts of the human body. In this context, accurate tracking of instruments is of paramount importance for the safety of the procedure and helps radiologists avoid unintended damage to adjacent organs. In this work, a novel method for the estimation of 3D needle tip coordinates in a CT volume using only two 2D projections in an interventional setting is proposed. The method applies a deep learning model for the fuzzy segmentation of the region containing the tip on 2D projections and automatically extracts the position of the tip. A simple UNet achieves a Dice score of 0.9906 for the fuzzy segmentation and an average euclidean distance of 2.96 mm for the needle tip regression task.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Urrutia, R; Espejo, D; Evens, N; Guerra, M; Sühn, T; Boese, A; Hansen, C; Fuentealba, P; Illanes, A; Poblete, V
Clustering Methods for Vibro-Acoustic Sensing Features as a Potential Approach to Tissue Characterisation in Robot-Assisted Interventions Journal Article
In: Sensors, vol. 23, no. 23, pp. 9297, 2023, ISSN: 1424-8220, (Publisher: Multidisciplinary Digital Publishing Institute).
@article{urrutia_clustering_2023,
title = {Clustering Methods for Vibro-Acoustic Sensing Features as a Potential Approach to Tissue Characterisation in Robot-Assisted Interventions},
author = {R Urrutia and D Espejo and N Evens and M Guerra and T Sühn and A Boese and C Hansen and P Fuentealba and A Illanes and V Poblete},
url = {https://www.mdpi.com/1424-8220/23/23/9297},
doi = {10.3390/s23239297},
issn = {1424-8220},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {Sensors},
volume = {23},
number = {23},
pages = {9297},
abstract = {This article provides a comprehensive analysis of the feature extraction methods applied to vibro-acoustic signals (VA signals) in the context of robot-assisted interventions. The primary objective is to extract valuable information from these signals to understand tissue behaviour better and build upon prior research. This study is divided into three key stages: feature extraction using the Cepstrum Transform (CT), Mel-Frequency Cepstral Coefficients (MFCCs), and Fast Chirplet Transform (FCT); dimensionality reduction employing techniques such as Principal Component Analysis (PCA), t-Distributed Stochastic Neighbour Embedding (t-SNE), and Uniform Manifold Approximation and Projection (UMAP); and, finally, classification using a nearest neighbours classifier. The results demonstrate that using feature extraction techniques, especially the combination of CT and MFCC with dimensionality reduction algorithms, yields highly efficient outcomes. The classification metrics (Accuracy, Recall, and F1-score) approach 99%, and the clustering metric is 0.61. The performance of the CT–UMAP combination stands out in the evaluation metrics.},
note = {Publisher: Multidisciplinary Digital Publishing Institute},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Gulamhussene, G; Spiegel, J; Das, A; Rak, M; Hansen, C
Deep Learning-based Marker-less Pose Estimation of Interventional Tools using Surrogate Keypoints Proceedings Article
In: Deserno, T; Handels, H; Maier, A; Maier-Hein, K; Palm, C; Tolxdorff, T (Ed.): Bildverarbeitung für die Medizin 2023, pp. 292–298, Springer Fachmedien, Wiesbaden, 2023, ISBN: 978-3-658-41657-7.
@inproceedings{gulamhussene_deep_2023,
title = {Deep Learning-based Marker-less Pose Estimation of Interventional Tools using Surrogate Keypoints},
author = {G Gulamhussene and J Spiegel and A Das and M Rak and C Hansen},
editor = {T Deserno and H Handels and A Maier and K Maier-Hein and C Palm and T Tolxdorff},
doi = {10.1007/978-3-658-41657-7_63},
isbn = {978-3-658-41657-7},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
booktitle = {Bildverarbeitung für die Medizin 2023},
pages = {292–298},
publisher = {Springer Fachmedien},
address = {Wiesbaden},
abstract = {Estimating the position of an intervention needle is an important ability in computer-assisted interventions. Currently, such pose estimations rely either on radiation-intensive CT imaging or need additional optical markers which add overhead to the clinical workflow. We propose a novel deep-learning-based technique for pose estimation of interventional tools which relies on detecting visible features on the tool itself without additional markers.We also propose a novel and fast pipeline for creating vast amounts of robustly labeled and markerless ground truth data for training such neural networks. Initial evaluations suggest that with needle base and needle tip localization errors of about 1 and 4 cm, Our approach can yield a search corridor that can be used to find the needle in a low-dose CT image, reducing radiation exposure.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Gulamhussene, G; Bashkanov, O; Omari, J; Pech, M; Hansen, C; Rak, M
Using Training Samples as Transitive Information Bridges in Predicted 4D MRI Proceedings Article
In: Xue, Z; Antani, S; Zamzmi, G; Yang, F; Rajaraman, S; Huang, S; Linguraru, M; Liang, Z (Ed.): Medical Image Learning with Limited and Noisy Data, pp. 237–245, Springer Nature Switzerland, Cham, 2023, ISBN: 978-3-031-44917-8.
@inproceedings{gulamhussene_using_2023,
title = {Using Training Samples as Transitive Information Bridges in Predicted 4D MRI},
author = {G Gulamhussene and O Bashkanov and J Omari and M Pech and C Hansen and M Rak},
editor = {Z Xue and S Antani and G Zamzmi and F Yang and S Rajaraman and S Huang and M Linguraru and Z Liang},
doi = {10.1007/978-3-031-44917-8_23},
isbn = {978-3-031-44917-8},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
booktitle = {Medical Image Learning with Limited and Noisy Data},
pages = {237–245},
publisher = {Springer Nature Switzerland},
address = {Cham},
abstract = {The lack of real-time techniques for monitoring respiratory motion impairs the development of guidance systems for image-guided interventions. Recent works show that U-Net based real-time 4D MRI prediction methods are promising, but prone to bad image quality when small training data sets and inputs with multiple MR contrast are used. To overcome this problem, we propose a more efficient use of the spare training data and re-utilize 2D training samples as a secondary input for construction of transitive information bridges between the navigator slice primary input and the data slice prediction. We thus remove the need for a separate 3D breath-hold MRI with different MR contrast as the secondary input. Results show that our novel construction leads to improved prediction quality with very sparse training data, with a significant decrease in root mean squared error (RMSE) from 0.3 to 0.27 (p$$<2.2eˆ-16$$<2.2e-16},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}