Dr. Fabian Joeres
Short Bio
I am a senior researcher and lecturer with a passion for medical devices, human-machine systems, and XR (in no particular order). My background zigzags across multiple engineering disciplines with a Bachelor's in Mechanical Engineering, a Master's and industry experience in Human Factors, and a Doctorate in Computer Science. This gives me a holistic and in-depth view of complex human-machine systems. It allows me to develop a methodologically sound and empathetic understanding of the users' perspectives and needs, as well as a profound understanding of the technical systems' side. Within the VAR group and the Research Campus STIMULATE, I assume teaching responsibilities in the field of human-computer interaction and conduct research in the broader field of human-computer interaction in medical settings.
Find me also here:
Google Scholar |
ORCID |
ResearchGate |
LinkedIn |
Research Interests
I am interested in complex human-computer systems in medical and other high-risk use environments. My current research focuses are:
- XR for facilitating medical human-robot interaction
- Effective and efficient use of XR for usability engineering and UX research
- Interactive navigation visualisations in image-guided needle interventions
Publications
2025

Joeres, F; Paetz, T; Hansen, C; Schenk, A
The necessity of parallel needle placement for tumor ablation using irreversible electroporation: a myth? Proceedings Article
In: CARS 2025—Computer Assisted Radiology and Surgery Proceedings of the 39th International Congress and Exhibition Berlin, Germany, June 17–20, 2025 , pp. 90–91, 2025.
@inproceedings{Joeres.2025,
title = {The necessity of parallel needle placement for tumor ablation using irreversible electroporation: a myth?},
author = {F Joeres and T Paetz and C Hansen and A Schenk},
url = {https://link.springer.com/article/10.1007/s11548-025-03373-6},
doi = {https://doi.org/10.1007/s11548-025-03373-6},
year = {2025},
date = {2025-05-29},
urldate = {2025-05-29},
booktitle = {CARS 2025—Computer Assisted Radiology and Surgery Proceedings of the 39th International Congress and Exhibition Berlin, Germany, June 17–20, 2025
},
pages = {90–91},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schwenderling, L; Hanke, L; Holst, U; Huettl, F; Joeres, F; Huber, T; Hansen, C
Toward structured abdominal examination training using augmented reality Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 20, no. 5, pp. 949–958, 2025, ISSN: 1861-6429.
@article{schwenderling_toward_2025,
title = {Toward structured abdominal examination training using augmented reality},
author = {L Schwenderling and L Hanke and U Holst and F Huettl and F Joeres and T Huber and C Hansen},
url = {https://doi.org/10.1007/s11548-024-03311-y},
doi = {10.1007/s11548-024-03311-y},
issn = {1861-6429},
year = {2025},
date = {2025-05-01},
urldate = {2025-05-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {20},
number = {5},
pages = {949–958},
abstract = {Structured abdominal examination is an essential part of the medical curriculum and surgical training, requiring a blend of theory and practice from trainees. Current training methods, however, often do not provide adequate engagement, fail to address individual learning needs or do not cover rare diseases.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schwenderling, L; Schotte, M; Joeres, F; Heinrich, F; Hanke, L; Huettl, F; Huber, T; Hansen, C
Teach Me Where to Look: Dual-task Attention Training in Augmented Reality Proceedings Article
In: Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, ACM, Yokohama Japan, 2025, ISBN: 979-8-4007-1395-8.
@inproceedings{schwenderling_teach_2025,
title = {Teach Me Where to Look: Dual-task Attention Training in Augmented Reality},
author = {L Schwenderling and M Schotte and F Joeres and F Heinrich and L Hanke and F Huettl and T Huber and C Hansen},
url = {https://dl.acm.org/doi/10.1145/3706599.3720198},
doi = {10.1145/3706599.3720198},
isbn = {979-8-4007-1395-8},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
booktitle = {Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {ACM},
address = {Yokohama Japan},
abstract = {Regular eye contact is essential in medicine to recognize signs of pain. However, it is difficult to remember this during training as attention is tied up in learning. While augmented reality (AR) has shown promising results for medical education, there is no training for attention allocation yet. Therefore, three auditory and three visual attention guidance tools in AR are evaluated for their use in medical dual-task training settings. In expert reviews with six participants in human-computer interaction and medical didactics, advantages, disadvantages, and refinements for the cues were developed. For visual cues, an overt but less occluding cue was preferred for constant visibility of the primary task. A more diegetic cue design was proposed for the auditory cues to use a patient simulation as a reminder of the regular face glance. In general, several cues were found to be suitable for gaze guidance training, requiring only minor changes for improvement.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Herbrich, W; Zittlau, P; Joeres, F; Hansen, C
Prototype development of a cross-reality digital twin ecosystem: the web, open source and open data Proceedings Article
In: 2025 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 459–462, 2025.
@inproceedings{herbrich_prototype_2025,
title = {Prototype development of a cross-reality digital twin ecosystem: the web, open source and open data},
author = {W Herbrich and P Zittlau and F Joeres and C Hansen},
url = {https://ieeexplore.ieee.org/abstract/document/10972895},
doi = {10.1109/VRW66409.2025.00100},
year = {2025},
date = {2025-03-01},
urldate = {2025-03-01},
booktitle = {2025 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {459–462},
abstract = {This work contributes to a broader initiative aimed at transforming a former industrial port area into a dynamic Knowledge Transfer Space (KTS). To support this transformation, we explore the development of a cross-reality (CR) digital twin of the port area, which integrates user interfaces with varying degrees of virtuality. We evaluate different web technologies, focusing on the balance between accessibility, immersion, scalability, and performance. By comparing client-side rendering with pixel streaming approaches, we aim to identify suitable solutions for prototyping a CR digital twin ecosystem. The development of a prototype is ongoing, based on a client-side rendering approach. The outcomes contribute to developing an open and transferable CR digital twin applicable to similar urban projects in other cities.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2024

Joeres, F; Zittlau, P; Herbrich, W; Heinrich, F; Rose, G; Hansen, C
Concept development of a cross-reality ecosystem for urban knowledge transfer spaces Proceedings Article
In: 2024 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct), pp. 166–169, 2024, (ISSN: 2771-1110).
@inproceedings{joeres_concept_2024,
title = {Concept development of a cross-reality ecosystem for urban knowledge transfer spaces},
author = {F Joeres and P Zittlau and W Herbrich and F Heinrich and G Rose and C Hansen},
url = {https://ieeexplore.ieee.org/abstract/document/10765174},
doi = {10.1109/ISMAR-Adjunct64951.2024.00043},
year = {2024},
date = {2024-10-01},
urldate = {2024-10-01},
booktitle = {2024 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)},
pages = {166–169},
abstract = {This paper presents the development of a cross-reality (CR) ecosystem designed for an urban knowledge transfer space (KTS) in a post-industrial urban environment. The project is part of a larger initiative aimed at transforming a former industrial river port into a dynamic KTS, facilitating interactions between scientific, commercial, residential, and cultural stakeholders. Our research explores the potential of multimodal mixed reality (XR) technologies to enhance engagement with the content and stakeholders of the KTS. Through a three-phase process, we identified key stakeholders and their target audiences, selected appropriate XR technologies, and developed initial use cases that integrate web applications, mobile augmented reality (AR), and XR head-mounted displays. The preliminary findings indicate that these technologies can effectively cater to diverse user groups, providing different levels of virtuality and interaction. However, challenges remain, particularly in stakeholder engagement and the evolving nature of the KTS initiative. Ongoing work includes the development of a Web-XR-based prototype, which will be iteratively refined to better meet user needs and adapt to future technological advancements. This research contributes to the understanding of how CR technologies can be employed in urban transformation processes, offering insights into the design of flexible and scalable CR ecosystems.},
note = {ISSN: 2771-1110},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schwenderling, L; Herbrich, W; Joeres, F; Hansen, C
A Novel Framework for Hand Visualization in Web-Based Collaborative XR Proceedings Article
In: 2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 18–23, IEEE, Orlando, FL, USA, 2024, ISBN: 979-8-3503-7449-0.
@inproceedings{schwenderling_novel_2024,
title = {A Novel Framework for Hand Visualization in Web-Based Collaborative XR},
author = {L Schwenderling and W Herbrich and F Joeres and C Hansen},
url = {https://ieeexplore.ieee.org/document/10536317/},
doi = {10.1109/VRW62533.2024.00010},
isbn = {979-8-3503-7449-0},
year = {2024},
date = {2024-03-01},
urldate = {2024-03-01},
booktitle = {2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {18–23},
publisher = {IEEE},
address = {Orlando, FL, USA},
abstract = {Many extended reality (XR) applications are platform-specific, making accessibility and cross-platform collaboration difficult. Web-based collaborative XR can enhance adoption of XR technologies, using the browser as a platform-independent interface. However, challenges arise from the browser environment, such as performance limitations. To this end, we present a WebXR-based framework for hand interaction in cross-platform collaboration in XR. A network structure and methods for collaborative and individual object manipulation complement the integrated hand tracking. Three different fidelity levels to represent the hands of remote users were implemented to accommodate different performance capabilities. Concepts ranged from virtual hands to discrete poses with abstract objects. A sample application was implemented with a puzzle task. Two users collaborated in the browsers of the Microsoft HoloLens 2 and the Meta Quest 2. Qualitative and quantitative data on user performance (n=9), and frame rate recordings (n=1) were collected. All users were able to solve the puzzle together quickly and intu-itively. The Quest environment was preferred, as there were more performance issues with the HoloLens. Hand interaction was well-received and proved to be sufficient as the only form of communication. Simpler representations of the hands lead to a higher frame rate, whereby the effects were device-dependent. The impact on task performance was low. Hand interaction enables an intuitive exchange of objects and basic communication in cross-platform collaboration via browsers. Depending on the XR environment, however, device-specific performance limitations must be taken into account by modulating the amount of data and rendering effort.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Polenz, L; Joeres, F; Hansen, C; Heinrich, F
Simulating projective Augmented Reality Visualizations in Virtual Reality: Is VR a feasible Environment for medical AR Evaluations? Proceedings Article
In: Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, Association for Computing Machinery, New York, NY, USA, 2024, ISBN: 979-8-4007-0331-7.
@inproceedings{polenz_simulating_2024,
title = {Simulating projective Augmented Reality Visualizations in Virtual Reality: Is VR a feasible Environment for medical AR Evaluations?},
author = {L Polenz and F Joeres and C Hansen and F Heinrich},
url = {https://doi.org/10.1145/3613905.3650843},
doi = {10.1145/3613905.3650843},
isbn = {979-8-4007-0331-7},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI EA '24},
abstract = {Augmented Reality (AR) has demonstrated potential in medical applications, such as enhancing surgical navigation. However, evaluating medical AR visualizations entails high costs and effort to provide suitable hardware solutions. This is particularly crucial in projective AR, as these systems require several error-prone calibration and registration steps. This work investigates the suitability of Virtual Reality (VR) as a cost-effective and controlled study environment for evaluating projective AR visualizations. A virtual twin of a real laboratory environment was created, and a user study comparing two needle navigation visualizations was conducted. The study simulated identical experiments in both AR and VR to assess if similar results would emerge. Our findings indicate that both AR and VR experiments exhibited comparable effects in terms of performance and workload of both needle insertion visualizations. This study serves as a preliminary step in demonstrating the feasibility of using VR as an evaluation environment for projective AR visualizations.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2023

Mielke, T; Joeres, F; Schott, D; Hansen, C
Interactive Registration Methods for Augmented Reality in Robotics: A Comparative Evaluation Proceedings Article
In: 2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct), pp. 501–506, IEEE, Sydney, Australia, 2023, ISBN: 979-8-3503-2891-2.
@inproceedings{mielke_interactive_2023,
title = {Interactive Registration Methods for Augmented Reality in Robotics: A Comparative Evaluation},
author = {T Mielke and F Joeres and D Schott and C Hansen},
url = {https://ieeexplore.ieee.org/document/10322246/},
doi = {10.1109/ISMAR-Adjunct60411.2023.00109},
isbn = {979-8-3503-2891-2},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)},
pages = {501–506},
publisher = {IEEE},
address = {Sydney, Australia},
abstract = {Augmented Reality (AR) visualization has shown potential for supporting intuitive and efficient human-robot interaction in a range of tasks. Since all these tasks are spatially related to the robot, the precise positioning of the AR content is critical to the applicability. However, most research has primarily focused on developing visualizations rather than exploring methods for aligning AR content in the robotic workspace. This paper aims to bridge this gap by implementing and comparing different interactive registration methods, including two point-based and one manual approach. We comparatively evaluated these registration methods in a user study (n=21), measuring registration accuracy, duration, and subjective user feedback. Our results indicate that the point-based methods outperform the manual approach in terms of both accuracy and perceived workload. Furthermore, participants achieved significantly faster performance with a point-based approach using physically defined registration points compared to a point-based approach using markers attached to the robot.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schott, D; Moritz, J; Hansen, C; Joeres, F
The UUXR-Framework: A Draft Classification for Using Extended Reality in Usability and User Experience Research Proceedings Article
In: 2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct), pp. 460–465, IEEE, Sydney, Australia, 2023, ISBN: 979-8-3503-2891-2.
@inproceedings{schott_uuxr-framework_2023,
title = {The UUXR-Framework: A Draft Classification for Using Extended Reality in Usability and User Experience Research},
author = {D Schott and J Moritz and C Hansen and F Joeres},
url = {https://ieeexplore.ieee.org/document/10322234/},
doi = {10.1109/ISMAR-Adjunct60411.2023.00100},
isbn = {979-8-3503-2891-2},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)},
pages = {460–465},
publisher = {IEEE},
address = {Sydney, Australia},
abstract = {Conducting human-centered evaluations in extended reality (XR) environments is a growing trend in user research and usability engineering. However, there has been little to no systematic investigation of the emerging methods in this field published to date. The motivation behind our work is to explore and classify strategies and methods for utilizing XR technologies in the context of usability and user experience (UUX) activities. This paper proposes a draft classification framework for the use of XR technologies in UUX activities, combining an informal exploration of relevant literature with established UUX methods. Within this framework, we propose 12 dimensions that we consider potentially relevant for determining whether and how the use of XR technologies can benefit product development and user research. To evaluate the structure and phrasing of our proposed dimensions, we conducted an initial evaluation with UUX professionals (N = 11). We believe that our dimensions form an early-stage foundation for future guidelines aimed at UUX researchers. The framework serves as a tool for assessing different levels of virtualization in UUX work and facilitating knowledge transfer between academia and industry.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Gulamhussene, G; Rak, M; Bashkanov, O; Joeres, F; Omari, J; Pech, M; Hansen, C
Transfer-learning is a key ingredient to fast deep learning-based 4D liver MRI reconstruction Journal Article
In: Scientific Reports, vol. 13, no. 1, pp. 11227, 2023, ISSN: 2045-2322, (Publisher: Nature Publishing Group).
@article{gulamhussene_transfer-learning_2023,
title = {Transfer-learning is a key ingredient to fast deep learning-based 4D liver MRI reconstruction},
author = {G Gulamhussene and M Rak and O Bashkanov and F Joeres and J Omari and M Pech and C Hansen},
url = {https://www.nature.com/articles/s41598-023-38073-1},
doi = {10.1038/s41598-023-38073-1},
issn = {2045-2322},
year = {2023},
date = {2023-07-01},
urldate = {2023-07-01},
journal = {Scientific Reports},
volume = {13},
number = {1},
pages = {11227},
abstract = {Time-resolved volumetric magnetic resonance imaging (4D MRI) could be used to address organ motion in image-guided interventions like tumor ablation. Current 4D reconstruction techniques are unsuitable for most interventional settings because they are limited to specific breathing phases, lack temporal/spatial resolution, and have long prior acquisitions or reconstruction times. Deep learning-based (DL) 4D MRI approaches promise to overcome these shortcomings but are sensitive to domain shift. This work shows that transfer learning (TL) combined with an ensembling strategy can help alleviate this key challenge. We evaluate four approaches: pre-trained models from the source domain, models directly trained from scratch on target domain data, models fine-tuned from a pre-trained model and an ensemble of fine-tuned models. For that the data base was split into 16 source and 4 target domain subjects. Comparing ensemble of fine-tuned models (N = 10) with directly learned models, we report significant improvements (P < 0.001) of the root mean squared error (RMSE) of up to 12% and the mean displacement (MDISP) of up to 17.5%. The smaller the target domain data amount, the larger the effect. This shows that TL + Ens significantly reduces beforehand acquisition time and improves reconstruction quality, rendering it a key component in making 4D MRI clinically feasible for the first time in the context of 4D organ motion models of the liver and beyond.},
note = {Publisher: Nature Publishing Group},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2022

Schreiter, J; Schott, D; Schwenderling, L; Hansen, C; Heinrich, F; Joeres, F
AR-Supported Supervision of Conditional Autonomous Robots: Considerations for Pedicle Screw Placement in the Future Journal Article
In: Journal of Imaging, vol. 8, no. 10, pp. 255, 2022, ISSN: 2313-433X, (Publisher: Multidisciplinary Digital Publishing Institute).
@article{schreiter_ar-supported_2022,
title = {AR-Supported Supervision of Conditional Autonomous Robots: Considerations for Pedicle Screw Placement in the Future},
author = {J Schreiter and D Schott and L Schwenderling and C Hansen and F Heinrich and F Joeres},
url = {https://www.mdpi.com/2313-433X/8/10/255},
doi = {10.3390/jimaging8100255},
issn = {2313-433X},
year = {2022},
date = {2022-10-01},
urldate = {2022-10-01},
journal = {Journal of Imaging},
volume = {8},
number = {10},
pages = {255},
abstract = {Robotic assistance is applied in orthopedic interventions for pedicle screw placement (PSP). While current robots do not act autonomously, they are expected to have higher autonomy under surgeon supervision in the mid-term. Augmented reality (AR) is promising to support this supervision and to enable human–robot interaction (HRI). To outline a futuristic scenario for robotic PSP, the current workflow was analyzed through literature review and expert discussion. Based on this, a hypothetical workflow of the intervention was developed, which additionally contains the analysis of the necessary information exchange between human and robot. A video see-through AR prototype was designed and implemented. A robotic arm with an orthopedic drill mock-up simulated the robotic assistance. The AR prototype included a user interface to enable HRI. The interface provides data to facilitate understanding of the robot’s ”intentions”, e.g., patient-specific CT images, the current workflow phase, or the next planned robot motion. Two-dimensional and three-dimensional visualization illustrated patient-specific medical data and the drilling process. The findings of this work contribute a valuable approach in terms of addressing future clinical needs and highlighting the importance of AR support for HRI.},
note = {Publisher: Multidisciplinary Digital Publishing Institute},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Heinrich, F; Schwenderling, L; Joeres, F; Hansen, C
2D versus 3D: A Comparison of Needle Navigation Concepts between Augmented Reality Display Devices Proceedings Article
In: 2022 IEEE Conference on Virtual Reality and 3D User Interfaces (VR), pp. 260–269, 2022, (ISSN: 2642-5254).
@inproceedings{heinrich_2d_2022,
title = {2D versus 3D: A Comparison of Needle Navigation Concepts between Augmented Reality Display Devices},
author = {F Heinrich and L Schwenderling and F Joeres and C Hansen},
url = {https://ieeexplore.ieee.org/document/9756753},
doi = {10.1109/VR51125.2022.00045},
year = {2022},
date = {2022-03-01},
urldate = {2022-03-01},
booktitle = {2022 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)},
pages = {260–269},
abstract = {Surgical procedures requiring needle navigation assistance suffer from complicated hand-eye coordination and are mentally demanding. Augmented reality (AR) can help overcome these issues. How-ever, only an insufficient amount of fundamental research has focused on the design and hardware selection of such AR needle navigation systems. This work contributes to this research area by presenting a user study (n=24) comparing three state-of-the-art navigation concepts displayed by an optical see-through head-mounted display and a stereoscopic projection system. A two-dimensional glyph visualization resulted in higher targeting accuracy but required more needle insertion time. In contrast, punctures guided by a three-dimensional see-through vision concept were less accurate but faster and were favored in a qualitative interview. The third concept, a static representation of the correctly positioned needle, showed too high target errors for clinical accuracy needs. This concept per-formed worse when displayed by the projection system. Besides that, no meaningful differences between the evaluated AR display devices were detected. User preferences and use case restrictions, e.g., sterility requirements, seem to be more crucial selection criteria. Future work should focus on improving the accuracy of the see-through vision concept. Until then, the glyph visualization is recommended.},
note = {ISSN: 2642-5254},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Chheang, V; Heinrich, F; Joeres, F; Saalfeld, P; Preim, B; Hansen, C
Group WiM: A Group Navigation Technique for Collaborative Virtual Reality Environments Proceedings Article
In: 2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 556–557, 2022.
@inproceedings{chheang_group_2022,
title = {Group WiM: A Group Navigation Technique for Collaborative Virtual Reality Environments},
author = {V Chheang and F Heinrich and F Joeres and P Saalfeld and B Preim and C Hansen},
url = {https://ieeexplore.ieee.org/document/9757426},
doi = {10.1109/VRW55335.2022.00129},
year = {2022},
date = {2022-03-01},
urldate = {2022-03-01},
booktitle = {2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {556–557},
abstract = {In this work, we present a group World-in-Miniature (WiM) navigation technique that allows a guide to navigate a team in collaborative virtual reality (VR) environments. We evaluated the usability, discomfort, and user performance of the proposed technique compared to state-of-the-art group teleportation in a user study łeft(ntextbackslash,=textbackslash,21textbackslashright). The results show that the proposed technique induces less discomfort for the guide and has slight usability advantages. Additionally, the group WiM technique seems superior in regards to task completion time for obstructed target destination. However, it performs similarly to the group teleportation technique in direct line of sight cases. The group WiM technique provides potential benefits for effective group navigation in complex virtual environments and harder-to-reach target locations.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Mielke, T; Joeres, F; Hansen, C
Natural 3D Object Manipulation for Interactive Laparoscopic Augmented Reality Registration Proceedings Article
In: Chen, Jessie Y. C.; Fragomeni, Gino (Ed.): Virtual, Augmented and Mixed Reality: Design and Development, pp. 317–328, Springer International Publishing, Cham, 2022, ISBN: 978-3-031-05939-1.
@inproceedings{mielke_natural_2022,
title = {Natural 3D Object Manipulation for Interactive Laparoscopic Augmented Reality Registration},
author = {T Mielke and F Joeres and C Hansen},
editor = {Jessie Y. C. Chen and Gino Fragomeni},
doi = {10.1007/978-3-031-05939-1_21},
isbn = {978-3-031-05939-1},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Virtual, Augmented and Mixed Reality: Design and Development},
pages = {317–328},
publisher = {Springer International Publishing},
address = {Cham},
abstract = {Due to the growing focus on minimally invasive surgery, there is increasing interest in intraoperative software support. For example, augmented reality can be used to provide additional information. Accurate registration is required for effective support. In this work, we present a manual registration method that aims at mimicking natural manipulation of 3D objects using tracked surgical instruments. This method is compared to a point-based registration method in a simulated laparoscopic environment. Both registration methods serve as an initial alignment step prior to surface-based registration refinement. For the evaluation, we conducted a user study with 12 participants. The registration methods were compared in terms of registration accuracy, registration duration, and subjective usability feedback. No significant differences could be found with respect to the previously mentioned criteria between the manual and the point-based registration methods. Thus, the manual registration did not outperform the reference method. However, we found that our method offers qualitative advantages, which may make it more suitable for some application scenarios. Furthermore we identified possible approaches for improvement, which should be investigated in the future to strengthen possible advantages of our registration method.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2021

Chheang, V; Saalfeld, P; Joeres, F; Boedecker, C; Huber, T; Huettl, F; Lang, H; Preim, B; Hansen, C
A collaborative virtual reality environment for liver surgery planning Journal Article
In: Computers & Graphics, vol. 99, pp. 234–246, 2021, ISSN: 0097-8493.
@article{chheang_collaborative_2021,
title = {A collaborative virtual reality environment for liver surgery planning},
author = {V Chheang and P Saalfeld and F Joeres and C Boedecker and T Huber and F Huettl and H Lang and B Preim and C Hansen},
url = {https://www.sciencedirect.com/science/article/pii/S0097849321001400},
doi = {10.1016/j.cag.2021.07.009},
issn = {0097-8493},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
journal = {Computers & Graphics},
volume = {99},
pages = {234–246},
abstract = {Surgical planning software is a key component in the treatment of tumor diseases. However, desktop-based systems provide only limited visualization and interaction opportunities. Moreover, collaborative planning among members of a surgical team is only possible to a limited extent. In this work, a collaborative virtual reality (VR) environment to assist liver surgeons in tumor surgery planning is presented. Our aim is to improve virtual resection planning between surgeons in a remote or co-located environment. The system allows surgeons to define and adjust virtual resections on patient-specific organ 3D surfaces and 2D image slices. Changes on both modalities are synchronized, which will enable surgeons to iterate and refine the resection surfaces quickly. In addition, a real-time risk map visualization is presented that displays safety margins around tumors. An evaluation performed by liver surgeons provides information on potential benefits, such as the possibility to visualize complex cases and assessing the safety-critical areas, applicability, and limitations for further improvement.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Wagner, S; Belger, J; Joeres, F; Thöne-Otto, A; Hansen, C; Preim, B; Saalfeld, P
iVRoad: Immersive virtual road crossing as an assessment tool for unilateral spatial neglect Journal Article
In: Computers & Graphics, vol. 99, pp. 70–82, 2021, ISSN: 0097-8493.
@article{wagner_ivroad_2021,
title = {iVRoad: Immersive virtual road crossing as an assessment tool for unilateral spatial neglect},
author = {S Wagner and J Belger and F Joeres and A Thöne-Otto and C Hansen and B Preim and P Saalfeld},
url = {https://www.sciencedirect.com/science/article/pii/S009784932100128X},
doi = {10.1016/j.cag.2021.06.013},
issn = {0097-8493},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
journal = {Computers & Graphics},
volume = {99},
pages = {70–82},
abstract = {We developed a virtual road crossing assessment tool called iVRoad - immersive Virtual Road, which allows to put the patient into realistic road crossing situations and to record various parameters that can be used to quantify unilateral spatial neglect. We present a study with 18 stroke patients in which we evaluate our system with respect to usability, satisfaction, sense of presence and possible occurring cybersickness symptoms. Unilateral spatial neglect is a cognitive disturbance, often occurring after right hemispheric stroke. Conventional neuropsychological tests, such as paper-and-pencil tests, for assessing unilateral spatial neglect, often lack sensitivity. Especially in mild forms, symptoms can be seen in everyday life, but are hard to detect in formal testing. We examined patients with and without unilateral spatial neglect in order to identify parameters that could be feasible to separate these patient groups. Using everyday life tasks as a diagnostic instrument, however, is challenging because it is time-consuming, hard to control and to quantify. Computer-aided diagnostic systems are promising for analysing the behaviour of patients in detail. Modern virtual reality technology allows to place the patient in realistic situations. Especially situations in which patients often have difficulties or that are too dangerous in reality can be assessed with VR.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Joeres, F; Mielke, T; Hansen, C
Laparoscopic augmented reality registration for oncological resection site repair Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 16, no. 9, pp. 1577–1586, 2021, ISSN: 1861-6429.
@article{joeres_laparoscopic_2021,
title = {Laparoscopic augmented reality registration for oncological resection site repair},
author = {F Joeres and T Mielke and C Hansen},
url = {https://doi.org/10.1007/s11548-021-02336-x},
doi = {10.1007/s11548-021-02336-x},
issn = {1861-6429},
year = {2021},
date = {2021-09-01},
urldate = {2021-09-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {16},
number = {9},
pages = {1577–1586},
abstract = {Resection site repair during laparoscopic oncological surgery (e.g. laparoscopic partial nephrectomy) poses some unique challenges and opportunities for augmented reality (AR) navigation support. This work introduces an AR registration workflow that addresses the time pressure that is present during resection site repair.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Joeres, F; Heinrich, F; Schott, D; Hansen, C
Towards natural 3D interaction for laparoscopic augmented reality registration Journal Article
In: Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization, vol. 9, no. 4, pp. 384–391, 2021, ISSN: 2168-1163, (Publisher: Taylor & Francis _eprint: https://doi.org/10.1080/21681163.2020.1834877).
@article{joeres_towards_2021,
title = {Towards natural 3D interaction for laparoscopic augmented reality registration},
author = {F Joeres and F Heinrich and D Schott and C Hansen},
url = {https://doi.org/10.1080/21681163.2020.1834877},
doi = {10.1080/21681163.2020.1834877},
issn = {2168-1163},
year = {2021},
date = {2021-07-01},
urldate = {2021-07-01},
journal = {Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization},
volume = {9},
number = {4},
pages = {384–391},
abstract = {Augmented reality (AR) is a widely researched route for navigation support in laparoscopic surgery. Accurate registration is a crucial component for such AR systems. We introduce two methods for interactive registration that aim to be minimally invasive to the workflow and to mimic natural manipulation of 3D objects. The methods utilise spatially tracked laparoscopic tools to manipulate the virtual 3D content. We comparatively evaluated the methods against a reference, landmark-based registration method in a user study with 12 participants. We tested the methods for registration accuracy, time, and subjective usability perception. Our methods did not outperform the reference method on these parameters but showed promising results. The results indicate that our methods present no finalised solutions but that one of them is a promising approach for which we identified concrete improvement measures to be implemented in future research.},
note = {Publisher: Taylor & Francis
_eprint: https://doi.org/10.1080/21681163.2020.1834877},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schott, D; Saalfeld, P; Schmidt, G; Joeres, F; Boedecker, C; Huettl, F; Lang, H; Huber, T; Preim, B; Hansen, C
A VR/AR Environment for Multi-User Liver Anatomy Education Proceedings Article
In: 2021 IEEE Virtual Reality and 3D User Interfaces (VR), pp. 296–305, IEEE, Lisboa, Portugal, 2021, ISBN: 978-1-6654-1838-6.
@inproceedings{schott_vrar_2021,
title = {A VR/AR Environment for Multi-User Liver Anatomy Education},
author = {D Schott and P Saalfeld and G Schmidt and F Joeres and C Boedecker and F Huettl and H Lang and T Huber and B Preim and C Hansen},
url = {https://ieeexplore.ieee.org/document/9417662/},
doi = {10.1109/VR50410.2021.00052},
isbn = {978-1-6654-1838-6},
year = {2021},
date = {2021-03-01},
urldate = {2021-03-01},
booktitle = {2021 IEEE Virtual Reality and 3D User Interfaces (VR)},
pages = {296–305},
publisher = {IEEE},
address = {Lisboa, Portugal},
abstract = {We present a Virtual and Augmented Reality multi-user prototype of a learning environment for liver anatomy education. Our system supports various training scenarios ranging from small learning groups to classroom-size education, where students and teachers can participate in virtual reality, augmented reality, or via desktop PCs. In an iterative development process with surgeons and teachers, a virtual organ library was created. Nineteen liver data sets were used comprising 3D surface models, 2D image data, pathology information, diagnosis and treatment decisions. These data sets can interactively be sorted and investigated individually regarding their volumetric and meta information. The three participation modes were evaluated within a user study with surgery lecturers (5) and medical students (5). We assessed the usability and presence using questionnaires. Additionally, we collected qualitative data with semistructured interviews. A total of 435 individual statements were recorded and summarized to 49 statements. The results show that our prototype is usable, induces presence, and potentially support the teaching of liver anatomy and surgery in the future.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schreiter, J; Joeres, F; March, C; Pech, M; Hansen, C
Application Potential of Robot-Guided Ultrasound During CT-Guided Interventions Proceedings Article
In: Noble, J; Aylward, S; Grimwood, A; Min, Z; Lee, S; Hu, Y (Ed.): Simplifying Medical Ultrasound, pp. 116–125, Springer International Publishing, Cham, 2021, ISBN: 978-3-030-87583-1.
@inproceedings{schreiter_application_2021,
title = {Application Potential of Robot-Guided Ultrasound During CT-Guided Interventions},
author = {J Schreiter and F Joeres and C March and M Pech and C Hansen},
editor = {J Noble and S Aylward and A Grimwood and Z Min and S Lee and Y Hu},
doi = {10.1007/978-3-030-87583-1_12},
isbn = {978-3-030-87583-1},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Simplifying Medical Ultrasound},
pages = {116–125},
publisher = {Springer International Publishing},
address = {Cham},
abstract = {CT-guided interventions are common practices in interventional radiology to treat oncological conditions. During these interventions, radiologists are exposed to radiation and faced with a non-ergonomic working environment. A robot-guided ultrasound (US) as a complementing imaging method for the purpose of needle guidance could help to overcome these challenges. A survey with 21 radiologists was made to analyze the application potential of US during CT-guided interventions with regard to anatomical regions to be scanned as locations of target lesions as well as specific situations during which US could complement CT imaging. The results indicate that the majority of respondents already applied US during CT-guided interventions for reasons of real-time imaging of the target lesion, organ, and needle movement as well as for lesions that are difficult to visualize in CT. Potential situations of US application were identified as out-of-plane needle insertion and puncturing lesions within the liver and subcutaneous lymph nodes. Interaction with a robot-guided US should be intuitive and include an improved sterility concept.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Joeres, F; Black, D; Razavizadeh, S; Hansen, C
Audiovisual AR concepts for laparoscopic subsurface structure navigation Journal Article
In: 2021.
@article{joeres_audiovisual_2021,
title = {Audiovisual AR concepts for laparoscopic subsurface structure navigation},
author = {F Joeres and D Black and S Razavizadeh and C Hansen},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
abstract = {The identification of subsurface structures during resection wound repair is a challenge during minimally invasive partial nephrectomy. Specifically, major blood vessels and branches of the urinary collecting system need to be localized under time pressure as target or risk structures during suture placement. This work presents concepts for AR visualization and auditory guidance based on tool position that support this task. We evaluated the concepts in a laboratory user study with a simplified, simulated task: The localization of subsurface target points in a healthy kidney phantom. We evaluated the task time, localization accuracy, and perceived workload for our concepts and a control condition without navigation support. The AR visualization improved the accuracy and perceived workload over the control condition. We observed similar, non-significant trends for the auditory display. Further, clinically realistic evaluation is pending. Our initial results indicate the potential benefits of our concepts in supporting laparoscopic resection wound repair.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Joeres, F
Augmented reality in minimally invasive partial nephrectomy PhD Thesis
2021.
@phdthesis{joeres_augmented_2021,
title = {Augmented reality in minimally invasive partial nephrectomy},
author = {F Joeres},
url = {https://opendata.uni-halle.de/bitstream/1981185920/63598/1/Joeres_Fabian_Dissertation_2021.pdf},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
abstract = {Die laparoskopische und robotergest¨utzte Chirurgie bietet diverse klinische Vorteile
f¨ur Patientinnen und Patienten. Hierbei spielen insbesondere die kleineren Wunden
und das insgesamt geringere chirurgische Trauma eine Rolle. Dabei enstehen neue und
zus¨atzliche physische und kognitive Herausforderungen f¨ur Chirurginnen und Chirur-
gen. Das Forschungsfeld der computerassistierten Chirurgie versucht unter anderem,
Chirurginnen und Chirurgen bei diesen zus¨atzlichen Herausforderungen zu unterst¨utzen.
Die minimalinvasive Nierenteilresektion ist eine Operation, die von diesen Herausforderun-
gen betroffen ist und die aufgrund ihrer chirurgischen Komplexit¨at nur von spezialisierten
und erfahrenen chirurgischen Urologinnen und Urologen durchgef¨uhrt werden kann. Ziel
der minimalen Nierenteilresektion ist die laparoskopische oder robotergest¨utzte Resektion
von Nierentumoren unter Erhaltung einer m¨oglichst hohen Nierenfunktion.
In den letzten Jahrzehnten ist eine Vielzahl von Forschungsarbeiten in der computer-
assistierten, minimalinvasiven Chirurgie erstellt worden. Viele dieser Arbeiten nutzen
augmentierte Realit¨at ( AR ), um intraoperativ relevante Informationen zur Verf¨ugung zu
stellen. Diese Ans¨atze haben zum Ziel, die entsprechende Operation sicherer, effektiver
oder effizienter zu machen. Dabei fokussiert sich die Mehrheit dieser Arbeiten auf
technische Systemaspekte, wie etwa die Genauigkeit und Zuverl¨assigkeit chirurgischer
AR-Systeme. Allerdings stellt die nutzerzentrierte Betrachtung solcher Systeme eine
Voraussetzung f¨ur die Entwicklung sicherer und effektiver chirurgischer Assistenzsys-
teme dar. Diese Dissertation f¨uhrt eine nutzerzentrierte Untersuchung der folgenden
¨ubergreifenden Forschungsfrage durch: Wie kann AR -Navigation die chirurgische Be-
handlung von Nierentumoren unterst¨utzen? Die erste Frage, die sich in diesem Kontext
stellt, ist: Welche zus¨atzlichen Informationen braucht die Chirurgin oder der Chirurg?
Daher nutzt diese Dissertation die minimalinvasive Nierenteilresektion als beispielhafte
Fallstudie f¨ur die folgende zweite Forschungsfrage: Kann eine gezielte Untersuchung der
Informationsbed¨urfnisse neue Forschungsfelder f¨ur die chirurgische Navigationsassistenz
er¨offnen?
Zu diesem Zweck wurden die Informationsbed¨urfnisse, die w¨ahrend der minimalin-
vasiven Nierenteilresektion auftreten, mit Methoden der kognitiven Aufgabenanalyse
untersucht. Die Ergebnisse wurden mithilfe eines systematischen Literaturreviews mit
dem aktuellen Forschungsstand verglichen. In diesen Analysen wurden drei chirurgische
Phasen der minimalinvasiven Teilresektion identifiziert, die besonders von AR-Assistenz
profitieren k¨onnen: Die Pr¨aparation und Versorgung der Nierenblutgef¨aße vor der Tu-
morresektion, die Tumorresektion selbst und die Versorgung der Resektionswunde. In
dem systematischen Literaturreview wurden keine dedizierten L¨osungen f¨ur die Assistenz
w¨ahrend der Resektionswundenversorgung gefunden. W¨ahrend dieser Phase treten einige
spezielle technologische Herausforderungen f¨ur die Entwicklung von AR-Systemen auf.Im Rahmen dieser Dissertation wurden daher ein AR-Registrierungsprozess f¨ur diese
Phase, sowie allgemeine interaktive Registrierungsmethoden f¨ur laparoskopische AR
entwickelt. Zus¨atzlich wurde ein interaktives, audiovisuelles AR-Konzept f¨ur die Nav-
igation w¨ahrend der Resektionswundenversorgung entwickelt. Diese L¨osungskonzepte
wurden im Rahmen fr¨uher Forschungsprototypen implementiert und in laborbasierten
Nutzerstudien getestet. Die in dieser Dissertation berichteten Arbeiten er¨offnen ein neues
Anwendungsfeld f¨ur AR-Navigation in der minimalinvasiven Nierenteilresektion und
stellen L¨osungskonzepte f¨ur diese Anwendung vor. Weitere Arbeiten sind erforderlich, um
diese L¨osungskonzepte in einem weiterentwickelten Assistenzsystem zu integrieren und
dieses, letztendlich, zum Wohle von Patientinnen und Patienten einsetzbar zu machen.},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
f¨ur Patientinnen und Patienten. Hierbei spielen insbesondere die kleineren Wunden
und das insgesamt geringere chirurgische Trauma eine Rolle. Dabei enstehen neue und
zus¨atzliche physische und kognitive Herausforderungen f¨ur Chirurginnen und Chirur-
gen. Das Forschungsfeld der computerassistierten Chirurgie versucht unter anderem,
Chirurginnen und Chirurgen bei diesen zus¨atzlichen Herausforderungen zu unterst¨utzen.
Die minimalinvasive Nierenteilresektion ist eine Operation, die von diesen Herausforderun-
gen betroffen ist und die aufgrund ihrer chirurgischen Komplexit¨at nur von spezialisierten
und erfahrenen chirurgischen Urologinnen und Urologen durchgef¨uhrt werden kann. Ziel
der minimalen Nierenteilresektion ist die laparoskopische oder robotergest¨utzte Resektion
von Nierentumoren unter Erhaltung einer m¨oglichst hohen Nierenfunktion.
In den letzten Jahrzehnten ist eine Vielzahl von Forschungsarbeiten in der computer-
assistierten, minimalinvasiven Chirurgie erstellt worden. Viele dieser Arbeiten nutzen
augmentierte Realit¨at ( AR ), um intraoperativ relevante Informationen zur Verf¨ugung zu
stellen. Diese Ans¨atze haben zum Ziel, die entsprechende Operation sicherer, effektiver
oder effizienter zu machen. Dabei fokussiert sich die Mehrheit dieser Arbeiten auf
technische Systemaspekte, wie etwa die Genauigkeit und Zuverl¨assigkeit chirurgischer
AR-Systeme. Allerdings stellt die nutzerzentrierte Betrachtung solcher Systeme eine
Voraussetzung f¨ur die Entwicklung sicherer und effektiver chirurgischer Assistenzsys-
teme dar. Diese Dissertation f¨uhrt eine nutzerzentrierte Untersuchung der folgenden
¨ubergreifenden Forschungsfrage durch: Wie kann AR -Navigation die chirurgische Be-
handlung von Nierentumoren unterst¨utzen? Die erste Frage, die sich in diesem Kontext
stellt, ist: Welche zus¨atzlichen Informationen braucht die Chirurgin oder der Chirurg?
Daher nutzt diese Dissertation die minimalinvasive Nierenteilresektion als beispielhafte
Fallstudie f¨ur die folgende zweite Forschungsfrage: Kann eine gezielte Untersuchung der
Informationsbed¨urfnisse neue Forschungsfelder f¨ur die chirurgische Navigationsassistenz
er¨offnen?
Zu diesem Zweck wurden die Informationsbed¨urfnisse, die w¨ahrend der minimalin-
vasiven Nierenteilresektion auftreten, mit Methoden der kognitiven Aufgabenanalyse
untersucht. Die Ergebnisse wurden mithilfe eines systematischen Literaturreviews mit
dem aktuellen Forschungsstand verglichen. In diesen Analysen wurden drei chirurgische
Phasen der minimalinvasiven Teilresektion identifiziert, die besonders von AR-Assistenz
profitieren k¨onnen: Die Pr¨aparation und Versorgung der Nierenblutgef¨aße vor der Tu-
morresektion, die Tumorresektion selbst und die Versorgung der Resektionswunde. In
dem systematischen Literaturreview wurden keine dedizierten L¨osungen f¨ur die Assistenz
w¨ahrend der Resektionswundenversorgung gefunden. W¨ahrend dieser Phase treten einige
spezielle technologische Herausforderungen f¨ur die Entwicklung von AR-Systemen auf.Im Rahmen dieser Dissertation wurden daher ein AR-Registrierungsprozess f¨ur diese
Phase, sowie allgemeine interaktive Registrierungsmethoden f¨ur laparoskopische AR
entwickelt. Zus¨atzlich wurde ein interaktives, audiovisuelles AR-Konzept f¨ur die Nav-
igation w¨ahrend der Resektionswundenversorgung entwickelt. Diese L¨osungskonzepte
wurden im Rahmen fr¨uher Forschungsprototypen implementiert und in laborbasierten
Nutzerstudien getestet. Die in dieser Dissertation berichteten Arbeiten er¨offnen ein neues
Anwendungsfeld f¨ur AR-Navigation in der minimalinvasiven Nierenteilresektion und
stellen L¨osungskonzepte f¨ur diese Anwendung vor. Weitere Arbeiten sind erforderlich, um
diese L¨osungskonzepte in einem weiterentwickelten Assistenzsystem zu integrieren und
dieses, letztendlich, zum Wohle von Patientinnen und Patienten einsetzbar zu machen.
2020

Heinrich, F; Schwenderling, L; Joeres, F; Lawonn, K; Hansen, C
Comparison of Augmented Reality Display Techniques to Support Medical Needle Insertion Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 26, no. 12, pp. 3568–3575, 2020, ISSN: 1941-0506.
@article{heinrich_comparison_2020,
title = {Comparison of Augmented Reality Display Techniques to Support Medical Needle Insertion},
author = {F Heinrich and L Schwenderling and F Joeres and K Lawonn and C Hansen},
url = {https://ieeexplore.ieee.org/abstract/document/9211732},
doi = {10.1109/TVCG.2020.3023637},
issn = {1941-0506},
year = {2020},
date = {2020-12-01},
urldate = {2020-12-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {26},
number = {12},
pages = {3568–3575},
abstract = {Augmented reality (AR) may be a useful technique to overcome issues of conventionally used navigation systems supporting medical needle insertions, like increased mental workload and complicated hand-eye coordination. Previous research primarily focused on the development of AR navigation systems designed for specific displaying devices, but differences between employed methods have not been investigated before. To this end, a user study involving a needle insertion task was conducted comparing different AR display techniques with a monitor-based approach as baseline condition for the visualization of navigation information. A video see-through stationary display, an optical see-through head-mounted display and a spatial AR projector-camera-system were investigated in this comparison. Results suggest advantages of using projected navigation information in terms of lower task completion time, lower angular deviation and affirmative subjective participant feedback. Techniques requiring the intermediate view on screens, i.e. the stationary display and the baseline condition, showed less favorable results. Thus, benefits of providing AR navigation information compared to a conventionally used method could be identified. Significant objective measures results, as well as an identification of advantages and disadvantages of individual display techniques contribute to the development and design of improved needle navigation systems.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Gulamhussene, G; Joeres, F; Rak, M; Pech, M; Hansen, C
4D MRI: Robust sorting of free breathing MRI slices for use in interventional settings Journal Article
In: PLOS ONE, vol. 15, no. 6, pp. e0235175, 2020, ISSN: 1932-6203, (Publisher: Public Library of Science).
@article{gulamhussene_4d_2020,
title = {4D MRI: Robust sorting of free breathing MRI slices for use in interventional settings},
author = {G Gulamhussene and F Joeres and M Rak and M Pech and C Hansen},
url = {https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0235175},
doi = {10.1371/journal.pone.0235175},
issn = {1932-6203},
year = {2020},
date = {2020-06-01},
urldate = {2020-06-01},
journal = {PLOS ONE},
volume = {15},
number = {6},
pages = {e0235175},
abstract = {Purpose We aim to develop a robust 4D MRI method for large FOVs enabling the extraction of irregular respiratory motion that is readily usable with all MRI machines and thus applicable to support a wide range of interventional settings. Method We propose a 4D MRI reconstruction method to capture an arbitrary number of breathing states. It uses template updates in navigator slices and search regions for fast and robust vessel cross-section tracking. It captures FOVs of 255 mm x 320 mm x 228 mm at a spatial resolution of 1.82 mm x 1.82 mm x 4mm and temporal resolution of 200ms. A total of 37 4D MRIs of 13 healthy subjects were reconstructed to validate the method. A quantitative evaluation of the reconstruction rate and speed of both the new and baseline method was performed. Additionally, a study with ten radiologists was conducted to assess the subjective reconstruction quality of both methods. Results Our results indicate improved mean reconstruction rates compared to the baseline method (79.4% vs. 45.5%) and improved mean reconstruction times (24s vs. 73s) per subject. Interventional radiologists perceive the reconstruction quality of our method as higher compared to the baseline (262.5 points vs. 217.5 points},
note = {Publisher: Public Library of Science},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schott, D; Hatscher, B; Joeres, F; Gabele, M; Hußlein, S; Hansen, C
Lean-Interaction: passive image manipulation in concurrent multitasking Journal Article
In: 2020.
@article{schott_lean-interaction_2020,
title = {Lean-Interaction: passive image manipulation in concurrent multitasking},
author = {D Schott and B Hatscher and F Joeres and M Gabele and S Hußlein and C Hansen},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
abstract = {Complex bi-manual tasks often benefit from supporting visual information and guidance. Controlling the system that provides this information is a secondary task that forces the user to perform concurrent multitasking, which in turn may affect the main task performance. Interactions based on natural behavior are a promising solution to this challenge. We investigated the performance of these interactions in a hands-free image manipulation task during a primary manual task with an upright stance. Essential tasks were extracted from the example of clinical workflow and turned into an abstract simulation to gain general insights into how different interaction techniques impact the user’s performance and workload. The interaction techniques we compared were full-body movements, facial expression, gesture and speech input. We found that leaning as an interaction technique facilitates significantly faster image manipulation at lower subjective workloads than facial expression. Our results pave the way towards efficient, natural, hands-free interaction in a challenging multitasking environment.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2019

Wagner, S; Joeres, F; Gabele, M; Hansen, C; Preim, B; Saalfeld, P
Difficulty factors for VR cognitive rehabilitation training – Crossing a virtual road Journal Article
In: Computers & Graphics, vol. 83, pp. 11–22, 2019, ISSN: 0097-8493.
@article{wagner_difficulty_2019,
title = {Difficulty factors for VR cognitive rehabilitation training – Crossing a virtual road},
author = {S Wagner and F Joeres and M Gabele and C Hansen and B Preim and P Saalfeld},
url = {https://www.sciencedirect.com/science/article/pii/S0097849319301037},
doi = {10.1016/j.cag.2019.06.009},
issn = {0097-8493},
year = {2019},
date = {2019-10-01},
urldate = {2019-10-01},
journal = {Computers & Graphics},
volume = {83},
pages = {11–22},
abstract = {Patients with cognitive or visual impairments have problems in dealing with complex situations. During the rehabilitation process, it is important to confront the patient with (everyday) tasks that have increasing degrees of difficulty to improve their performance. Immersive virtual reality training offers the potential to create a better transfer to daily life than non-immersive computer training. In cooperation with two neuropsychologists, an immersive virtual environment (VE) was developed in which cognitive training in the form of safe road crossing decisions can be performed. We present the experimental exploration and evaluation of difficulty factors within such a VR-based cognitive rehabilitation program. Four difficulty factors were identified and compared (number of relevant traffic lanes, speed of vehicles, distance between vehicles, and number of vehicles). The combination of these difficulty factors resulted in 36 training scenarios. The impact of the factors on participant performance and subjective perception of scenario difficulty were evaluated with 60 healthy participants to estimate the impact of the four factors to a situation’s difficulty level. For the factors Relevant Lanes and Traffic Speed a clear influence on the perceived task difficulty could be determined. No clear influence could be found for the Gap Size. The Number of Vehicles had almost no effect on the perceived task difficulty. Finally, we asked two experienced neuropsychologists about the applicability of our developed system to patients, and they stated that the system is ready for a study on patients.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Joeres, F; Schindele, D; Luz, M; Blaschke, S; Russwinkel, N; Schostak, M; Hansen, C
In: PLOS ONE, vol. 14, no. 7, pp. e0219920, 2019, ISSN: 1932-6203, (Publisher: Public Library of Science).
@article{joeres_how_2019,
title = {How well do software assistants for minimally invasive partial nephrectomy meet surgeon information needs? A cognitive task analysis and literature review study},
author = {F Joeres and D Schindele and M Luz and S Blaschke and N Russwinkel and M Schostak and C Hansen},
url = {https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0219920},
doi = {10.1371/journal.pone.0219920},
issn = {1932-6203},
year = {2019},
date = {2019-07-01},
urldate = {2019-07-01},
journal = {PLOS ONE},
volume = {14},
number = {7},
pages = {e0219920},
abstract = {Introduction Intraoperative software assistance is gaining increasing importance in laparoscopic and robot-assisted surgery. Within the user-centred development process of such systems, the first question to be asked is: What information does the surgeon need and when does he or she need it? In this article, we present an approach to investigate these surgeon information needs for minimally invasive partial nephrectomy and compare these needs to the relevant surgical computer assistance literature. Materials and methods First, we conducted a literature-based hierarchical task analysis of the surgical procedure. This task analysis was taken as a basis for a qualitative in-depth interview study with nine experienced surgical urologists. The study employed a cognitive task analysis method to elicit surgeons' information needs during minimally invasive partial nephrectomy. Finally, a systematic literature search was conducted to review proposed software assistance solutions for minimally invasive partial nephrectomy. The review focused on what information the solutions present to the surgeon and what phase of the surgery they aim to support. Results The task analysis yielded a workflow description for minimally invasive partial nephrectomy. During the subsequent interview study, we identified three challenging phases of the procedure, which may particularly benefit from software assistance. These phases are I. Hilar and vascular management, II. Tumour excision, and III. Repair of the renal defects. Between these phases, 25 individual challenges were found which define the surgeon information needs. The literature review identified 34 relevant publications, all of which aim to support the surgeon in hilar and vascular management (phase I) or tumour excision (phase II). Conclusion The work presented in this article identified unmet surgeon information needs in minimally invasive partial nephrectomy. Namely, our results suggest that future solutions should address the repair of renal defects (phase III) or put more focus on the renal collecting system as a critical anatomical structure.},
note = {Publisher: Public Library of Science},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Heinrich, F; Joeres, F; Lawonn, K; Hansen, C
Comparison of Projective Augmented Reality Concepts to Support Medical Needle Insertion Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 25, no. 6, pp. 2157–2167, 2019, ISSN: 1077-2626, 1941-0506, 2160-9306.
@article{heinrich_comparison_2019,
title = {Comparison of Projective Augmented Reality Concepts to Support Medical Needle Insertion},
author = {F Heinrich and F Joeres and K Lawonn and C Hansen},
url = {https://ieeexplore.ieee.org/document/8667734/},
doi = {10.1109/TVCG.2019.2903942},
issn = {1077-2626, 1941-0506, 2160-9306},
year = {2019},
date = {2019-06-01},
urldate = {2019-06-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {25},
number = {6},
pages = {2157–2167},
abstract = {Augmented reality (AR) is a promising tool to improve instrument navigation in needle-based interventions. Limited research has been conducted regarding suitable navigation visualizations. In this work, three navigation concepts based on existing approaches were compared in a user study using a projective AR setup. Each concept was implemented with three different scales for accuracy-to-color mapping and two methods of navigation indicator scaling. Participants were asked to perform simulated needle insertion tasks with each of the resulting 18 prototypes. Insertion angle and insertion depth accuracies were measured and analyzed, as well as task completion time and participants’ subjectively perceived task difficulty. Results show a clear ranking of visualization concepts across variables. Less consistent results were obtained for the color and indicator scaling factors. Results suggest that logarithmic indicator scaling achieved better accuracy, but participants perceived it to be more difficult than linear scaling. With specific results for angle and depth accuracy, our study contributes to the future composition of improved navigation support and systems for precise needle insertion or similar applications.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Heinrich, F; Joeres, F; Lawonn, K; Hansen, C
Effects of Accuracy-to-Colour Mapping Scales on Needle Navigation Aids visualised by Projective Augmented Reality Journal Article
In: 2019.
@article{heinrich_eects_2019,
title = {Effects of Accuracy-to-Colour Mapping Scales on Needle Navigation Aids visualised by Projective Augmented Reality},
author = {F Heinrich and F Joeres and K Lawonn and C Hansen},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
abstract = {Instrument navigation in needle-based interventions can benefit from augmented reality (AR) visualisation. Design aspects of these visualisations have been investigated to a limited degree. This work examined colourspecific parameters for AR instrument navigation, that have not been successfully researched before. Three different mapping methods to encode accuracy information to colour and two colour scales varying different colour channels were evaluated in a user study. Angular and depth accuracy of inserted needles were measured and task difficulty was subjectively rated. Result trends indicate benefits of mapping accuracy to discrete colours based on thresholds and using single hue colour scales that vary in the luminance or saturation channel. Yet, more research is required to validate the exposed indications. This work can constitute a valuable basis for this.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}