Position: PhD Candidate
Phone: ++49 391 67-59349
E-Mail: schwenderling@isg.cs.ovgu.de
Office: G82 - 167
Position: PhD Candidate
Phone: ++49 391 67-59349
E-Mail: schwenderling@isg.cs.ovgu.de
Office: G82 - 167
I am a research scientist focusing on the use and design of augmented reality in medical education. I studied medical engineering (B.Sc.) and computer science (M.Sc.) at the Otto von Guericke University Magdeburg. Now I am working towards my dissertation at the intersection of these disciplines.
| Current position | Research Scientist in Computer Science, Otto von Guericke University Magdeburg |
|---|---|
| Academic background | M.Sc. in Computer Science, Otto von Guericke University Magdeburg |
| B.Sc. in Medical Engineering, Otto von Guericke University Magdeburg |
Extended Reality / Mixed Reality / Augmented Reality • Augmented Education • Medical Education • Human Computer Interaction

Schwenderling, L; Hanke, L; Holst, U; Huettl, F; Joeres, F; Huber, T; Hansen, C
Toward structured abdominal examination training using augmented reality Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 20, no. 5, pp. 949–958, 2025, ISSN: 1861-6429.
@article{schwenderling_toward_2025,
title = {Toward structured abdominal examination training using augmented reality},
author = {L Schwenderling and L Hanke and U Holst and F Huettl and F Joeres and T Huber and C Hansen},
url = {https://doi.org/10.1007/s11548-024-03311-y},
doi = {10.1007/s11548-024-03311-y},
issn = {1861-6429},
year = {2025},
date = {2025-05-01},
urldate = {2025-05-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {20},
number = {5},
pages = {949–958},
abstract = {Structured abdominal examination is an essential part of the medical curriculum and surgical training, requiring a blend of theory and practice from trainees. Current training methods, however, often do not provide adequate engagement, fail to address individual learning needs or do not cover rare diseases.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schwenderling, L; Schotte, M; Joeres, F; Heinrich, F; Hanke, L; Huettl, F; Huber, T; Hansen, C
Teach Me Where to Look: Dual-task Attention Training in Augmented Reality Proceedings Article
In: Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, ACM, Yokohama Japan, 2025, ISBN: 979-8-4007-1395-8.
@inproceedings{schwenderling_teach_2025,
title = {Teach Me Where to Look: Dual-task Attention Training in Augmented Reality},
author = {L Schwenderling and M Schotte and F Joeres and F Heinrich and L Hanke and F Huettl and T Huber and C Hansen},
url = {https://dl.acm.org/doi/10.1145/3706599.3720198},
doi = {10.1145/3706599.3720198},
isbn = {979-8-4007-1395-8},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
booktitle = {Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {ACM},
address = {Yokohama Japan},
abstract = {Regular eye contact is essential in medicine to recognize signs of pain. However, it is difficult to remember this during training as attention is tied up in learning. While augmented reality (AR) has shown promising results for medical education, there is no training for attention allocation yet. Therefore, three auditory and three visual attention guidance tools in AR are evaluated for their use in medical dual-task training settings. In expert reviews with six participants in human-computer interaction and medical didactics, advantages, disadvantages, and refinements for the cues were developed. For visual cues, an overt but less occluding cue was preferred for constant visibility of the primary task. A more diegetic cue design was proposed for the auditory cues to use a patient simulation as a reminder of the regular face glance. In general, several cues were found to be suitable for gaze guidance training, requiring only minor changes for improvement.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Heinrich, F; Schott, D; Schwenderling, L; Hansen, C
Do You See What I See? Evaluating Relative Depth Judgments Between Real and Virtual Projections Proceedings Article
In: Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, Association for Computing Machinery, New York, NY, USA, 2025, ISBN: 979-8-4007-1395-8.
@inproceedings{heinrich_you_2025,
title = {Do You See What I See? Evaluating Relative Depth Judgments Between Real and Virtual Projections},
author = {F Heinrich and D Schott and L Schwenderling and C Hansen},
url = {https://doi.org/10.1145/3706599.3720157},
doi = {10.1145/3706599.3720157},
isbn = {979-8-4007-1395-8},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
booktitle = {Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI EA '25},
abstract = {Projector-based augmented reality (AR) is promising in different domains with less issues in discomfort or shortage of space. However, due to limitations like high costs and cumbersome calibration, this AR modality remains underused. To address this problem, a stereoscopic projector-based AR simulation was implemented for a cost-effective video see-through AR headset. To evaluate the validity of this simulation, a relative depth judgment experiment was conducted to compare this method with a physical projection system. Consistent results suggest that a known interaction effect between visualization and disparity mode could be successfully reproduced using both the physical projection and the virtual simulation. In addition, first findings indicate that there are no significant differences between these projection modalities. The results indicate that other perception-related effects observed for projector-based AR may also be applicable to virtual projection simulations and that future findings determined using only these simulations may also be applicable to real projections.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schwenderling, L; Herbrich, W; Joeres, F; Hansen, C
A Novel Framework for Hand Visualization in Web-Based Collaborative XR Proceedings Article
In: 2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 18–23, IEEE, Orlando, FL, USA, 2024, ISBN: 979-8-3503-7449-0.
@inproceedings{schwenderling_novel_2024,
title = {A Novel Framework for Hand Visualization in Web-Based Collaborative XR},
author = {L Schwenderling and W Herbrich and F Joeres and C Hansen},
url = {https://ieeexplore.ieee.org/document/10536317/},
doi = {10.1109/VRW62533.2024.00010},
isbn = {979-8-3503-7449-0},
year = {2024},
date = {2024-03-01},
urldate = {2024-03-01},
booktitle = {2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {18–23},
publisher = {IEEE},
address = {Orlando, FL, USA},
abstract = {Many extended reality (XR) applications are platform-specific, making accessibility and cross-platform collaboration difficult. Web-based collaborative XR can enhance adoption of XR technologies, using the browser as a platform-independent interface. However, challenges arise from the browser environment, such as performance limitations. To this end, we present a WebXR-based framework for hand interaction in cross-platform collaboration in XR. A network structure and methods for collaborative and individual object manipulation complement the integrated hand tracking. Three different fidelity levels to represent the hands of remote users were implemented to accommodate different performance capabilities. Concepts ranged from virtual hands to discrete poses with abstract objects. A sample application was implemented with a puzzle task. Two users collaborated in the browsers of the Microsoft HoloLens 2 and the Meta Quest 2. Qualitative and quantitative data on user performance (n=9), and frame rate recordings (n=1) were collected. All users were able to solve the puzzle together quickly and intu-itively. The Quest environment was preferred, as there were more performance issues with the HoloLens. Hand interaction was well-received and proved to be sufficient as the only form of communication. Simpler representations of the hands lead to a higher frame rate, whereby the effects were device-dependent. The impact on task performance was low. Hand interaction enables an intuitive exchange of objects and basic communication in cross-platform collaboration via browsers. Depending on the XR environment, however, device-specific performance limitations must be taken into account by modulating the amount of data and rendering effort.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schott, D; Kunz, M; Mandel, J; Schwenderling, L; Braun-Dullaeus, R; Hansen, C
An AR-Based Multi-User Learning Environment for Anatomy Seminars Proceedings Article
In: 2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 949–950, IEEE, Orlando, FL, USA, 2024, ISBN: 979-8-3503-7449-0.
@inproceedings{schott_ar-based_2024,
title = {An AR-Based Multi-User Learning Environment for Anatomy Seminars},
author = {D Schott and M Kunz and J Mandel and L Schwenderling and R Braun-Dullaeus and C Hansen},
url = {https://ieeexplore.ieee.org/document/10536568/},
doi = {10.1109/VRW62533.2024.00271},
isbn = {979-8-3503-7449-0},
year = {2024},
date = {2024-03-01},
urldate = {2024-03-01},
booktitle = {2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {949–950},
publisher = {IEEE},
address = {Orlando, FL, USA},
abstract = {Understanding the intricate and rapid changes in shape during em-bryonic formation is vital for medical students. Using the example of embryonic human heart development, we introduce an AR-based multi-user approach to enhance understanding and foster a participatory learning environment. Through a user-centered approach, we created a prototype accommodating two player roles and enabling multi-modal inputs to encourage dynamic group discussions. We in-vited four anatomy experts to evaluate three system configurations in an interdisciplinary workshop to assess the feasibility of integration into anatomy seminars. The gathered data and feedback indicate the potential of our collaborative concept for integration into the medical curriculum.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Wagnerberger, D; Schott, D; Schwenderling, L; Hansen, C; Schumacher, D
Empowering Patients: Improve Gender-Sensitive Medical Knowledge Through Interactive Edutainment Proceedings Article
In: Proceedings of the 13th Nordic Conference on Human-Computer Interaction, pp. 1–12, Association for Computing Machinery, New York, NY, USA, 2024, ISBN: 979-8-4007-0966-1.
@inproceedings{wagnerberger_empowering_2024,
title = {Empowering Patients: Improve Gender-Sensitive Medical Knowledge Through Interactive Edutainment},
author = {D Wagnerberger and D Schott and L Schwenderling and C Hansen and D Schumacher},
url = {https://dl.acm.org/doi/10.1145/3679318.3685500},
doi = {10.1145/3679318.3685500},
isbn = {979-8-4007-0966-1},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {Proceedings of the 13th Nordic Conference on Human-Computer Interaction},
pages = {1–12},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {NordiCHI '24},
abstract = {Disregarding crucial gender-specific differences and potential risks in medicine leads to widespread gender inequalities. This paper introduces interactive edutainment concepts developed through a user-centered design approach to raise awareness of gender medicine. An interactive exhibition course and an accompanying deck of cards provide an engaging and sensitizing experience of medical gender inequalities. Qualitative feedback, self-assessment, and user experience and behavior were evaluated during a public display of the concepts (n=14). The results highlight the potential of our playful approach to raising awareness among the public as well as health-related professionals, paving new ways for communication and empowerment of patients of all genders. We believe these insights have broader applicability across various domains, supporting efforts to address all forms of inequality.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schwenderling, L; Kleinau, A; Herbrich, W; Kasireddy, H; Heinrich, F; Hansen, C
Activation modes for gesture-based interaction with a magic lens in AR anatomy visualisation Journal Article
In: Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization, vol. 11, no. 4, pp. 1243–1250, 2023, ISSN: 2168-1163, (Publisher: Taylor & Francis).
@article{schwenderling_activation_2023,
title = {Activation modes for gesture-based interaction with a magic lens in AR anatomy visualisation},
author = {L Schwenderling and A Kleinau and W Herbrich and H Kasireddy and F Heinrich and C Hansen},
url = {https://doi.org/10.1080/21681163.2022.2157749},
doi = {10.1080/21681163.2022.2157749},
issn = {2168-1163},
year = {2023},
date = {2023-07-01},
urldate = {2023-07-01},
journal = {Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization},
volume = {11},
number = {4},
pages = {1243–1250},
abstract = {Learning human anatomy is key for health-related education and often requires expensive and time-consuming cadaver dissection courses. Augmented reality (AR) for the representation of spatially registered 3D models can be used as a low-cost and flexible alternative. However, suitable visualisation and interaction approaches are needed to display multilayered anatomy data. This paper features a spherical volumetric AR Magic Lens controlled by mid-air hand gestures to explore the human anatomy on a phantom. Defining how gestures control associated actions is important for intuitive interaction. Therefore, two gesture activation modes were investigated in a user study (n = 24). Performing the gestures once to toggle actions showed a higher interaction count since an additional stop gesture was used. Holding the gestures was favoured in the qualitative feedback. Both modes showed similar performance in terms of accuracy and task completion time. Overall, direct gesture manipulation of a magic lens for anatomy visualisation is, thus, recommended.},
note = {Publisher: Taylor & Francis},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schwenderling, L; Heinrich, F; Hansen, C
Augmented reality visualization of automated path planning for percutaneous interventions: a phantom study Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 17, no. 11, pp. 2071–2079, 2022, ISSN: 1861-6429.
@article{schwenderling_augmented_2022,
title = {Augmented reality visualization of automated path planning for percutaneous interventions: a phantom study},
author = {L Schwenderling and F Heinrich and C Hansen},
url = {https://doi.org/10.1007/s11548-022-02690-4},
doi = {10.1007/s11548-022-02690-4},
issn = {1861-6429},
year = {2022},
date = {2022-11-01},
urldate = {2022-11-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {17},
number = {11},
pages = {2071–2079},
abstract = {Insertion point identification is a major challenge for percutaneous interventions. Planning in 2D slice image data is time-consuming and inefficient. Automated path planning can help to overcome these challenges. However, the setup of the intervention room is difficult to consider. In addition, transferring the insertion point to the skin is often prone to error. Therefore, a visualization for an automated path planning was implemented.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schreiter, J; Schott, D; Schwenderling, L; Hansen, C; Heinrich, F; Joeres, F
AR-Supported Supervision of Conditional Autonomous Robots: Considerations for Pedicle Screw Placement in the Future Journal Article
In: Journal of Imaging, vol. 8, no. 10, pp. 255, 2022, ISSN: 2313-433X, (Publisher: Multidisciplinary Digital Publishing Institute).
@article{schreiter_ar-supported_2022,
title = {AR-Supported Supervision of Conditional Autonomous Robots: Considerations for Pedicle Screw Placement in the Future},
author = {J Schreiter and D Schott and L Schwenderling and C Hansen and F Heinrich and F Joeres},
url = {https://www.mdpi.com/2313-433X/8/10/255},
doi = {10.3390/jimaging8100255},
issn = {2313-433X},
year = {2022},
date = {2022-10-01},
urldate = {2022-10-01},
journal = {Journal of Imaging},
volume = {8},
number = {10},
pages = {255},
abstract = {Robotic assistance is applied in orthopedic interventions for pedicle screw placement (PSP). While current robots do not act autonomously, they are expected to have higher autonomy under surgeon supervision in the mid-term. Augmented reality (AR) is promising to support this supervision and to enable human–robot interaction (HRI). To outline a futuristic scenario for robotic PSP, the current workflow was analyzed through literature review and expert discussion. Based on this, a hypothetical workflow of the intervention was developed, which additionally contains the analysis of the necessary information exchange between human and robot. A video see-through AR prototype was designed and implemented. A robotic arm with an orthopedic drill mock-up simulated the robotic assistance. The AR prototype included a user interface to enable HRI. The interface provides data to facilitate understanding of the robot’s ”intentions”, e.g., patient-specific CT images, the current workflow phase, or the next planned robot motion. Two-dimensional and three-dimensional visualization illustrated patient-specific medical data and the drilling process. The findings of this work contribute a valuable approach in terms of addressing future clinical needs and highlighting the importance of AR support for HRI.},
note = {Publisher: Multidisciplinary Digital Publishing Institute},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Heinrich, F; Schwenderling, L; Joeres, F; Hansen, C
2D versus 3D: A Comparison of Needle Navigation Concepts between Augmented Reality Display Devices Proceedings Article
In: 2022 IEEE Conference on Virtual Reality and 3D User Interfaces (VR), pp. 260–269, 2022, (ISSN: 2642-5254).
@inproceedings{heinrich_2d_2022,
title = {2D versus 3D: A Comparison of Needle Navigation Concepts between Augmented Reality Display Devices},
author = {F Heinrich and L Schwenderling and F Joeres and C Hansen},
url = {https://ieeexplore.ieee.org/document/9756753},
doi = {10.1109/VR51125.2022.00045},
year = {2022},
date = {2022-03-01},
urldate = {2022-03-01},
booktitle = {2022 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)},
pages = {260–269},
abstract = {Surgical procedures requiring needle navigation assistance suffer from complicated hand-eye coordination and are mentally demanding. Augmented reality (AR) can help overcome these issues. How-ever, only an insufficient amount of fundamental research has focused on the design and hardware selection of such AR needle navigation systems. This work contributes to this research area by presenting a user study (n=24) comparing three state-of-the-art navigation concepts displayed by an optical see-through head-mounted display and a stereoscopic projection system. A two-dimensional glyph visualization resulted in higher targeting accuracy but required more needle insertion time. In contrast, punctures guided by a three-dimensional see-through vision concept were less accurate but faster and were favored in a qualitative interview. The third concept, a static representation of the correctly positioned needle, showed too high target errors for clinical accuracy needs. This concept per-formed worse when displayed by the projection system. Besides that, no meaningful differences between the evaluated AR display devices were detected. User preferences and use case restrictions, e.g., sterility requirements, seem to be more crucial selection criteria. Future work should focus on improving the accuracy of the see-through vision concept. Until then, the glyph visualization is recommended.},
note = {ISSN: 2642-5254},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Heinrich, F; Schwenderling, L; Streuber, M; Bornemann, K; Lawonn, K; Hansen, C
Effects of Surface Visualizations on Depth Perception in Projective Augmented Reality Proceedings Article
In: 2021 IEEE 2nd International Conference on Human-Machine Systems (ICHMS), pp. 1–6, 2021.
@inproceedings{heinrich_effects_2021,
title = {Effects of Surface Visualizations on Depth Perception in Projective Augmented Reality},
author = {F Heinrich and L Schwenderling and M Streuber and K Bornemann and K Lawonn and C Hansen},
url = {https://ieeexplore.ieee.org/document/9582452},
doi = {10.1109/ICHMS53169.2021.9582452},
year = {2021},
date = {2021-09-01},
urldate = {2021-09-01},
booktitle = {2021 IEEE 2nd International Conference on Human-Machine Systems (ICHMS)},
pages = {1–6},
abstract = {Depth perception is a common issue in augmented reality (AR). Projective AR, where the spatial relations between the projection surface and displayed virtual contents need to be represented properly, is particularly affected. This is crucial in the medical domain, e.g., for the distances between the patient’s skin and projected inner anatomical structures, but not much research was conducted in this context before. To this end, this work investigates the applicability of surface visualization techniques to support the perception of spatial relations in projective AR. Four methods previously explored in different domains were combined with the projection of inner anatomical structures on a human torso phantom. They were evaluated in a comparative user study (n=21) with respect to a distance estimation and a sorting task. Measures included Task completion time, accuracy, total Head movement and Confidence of the participants. Consistent results across variables show advantages of more occluding surface visualizations for the distance estimation task. Opposite results were obtained for the sorting task. This suggests that the amount of needed surface preservation depends on the use case and individual occlusion compromises need to be explored in future work.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schwenderling, L; Hansen, C; Heinrich, F
AR visualization of automated access path planning for percutaneous interventions Journal Article
In: Current Directions in Biomedical Engineering, vol. 7, no. 1, pp. 48–52, 2021, ISSN: 2364-5504, (Publisher: De Gruyter).
@article{schwenderling_ar_2021,
title = {AR visualization of automated access path planning for percutaneous interventions},
author = {L Schwenderling and C Hansen and F Heinrich},
url = {https://www.degruyterbrill.com/document/doi/10.1515/cdbme-2021-1011/html},
doi = {10.1515/cdbme-2021-1011},
issn = {2364-5504},
year = {2021},
date = {2021-08-01},
urldate = {2021-08-01},
journal = {Current Directions in Biomedical Engineering},
volume = {7},
number = {1},
pages = {48–52},
abstract = {Minimally invasive interventions, e.g., percutaneous needle interventions, have many advantages compared to traditional surgery. However, they may require complex and time-consuming planning with experience-dependent success. Automated access path planning is faster and more consistent but individual preferences and situational circumstances are not considered. To this end, displaying the path planning results directly on the patient’s skin, using projector-based augmented reality (AR), was investigated. A constraint-based path planning was implemented to evaluate the quality of every path, taking into account risk structures and path length. A visualization was developed to display the results on the skin and to allow for path selection. The choice of the path followed by a navigated insertion was evaluated in a pilot study (n=5), considering four levels of the visualization with different amounts of displayed insertion points. Participants stated that they preferred to have multiple potential puncture points displayed. However, the results for the considered variables show only small differences. Overall, it has been shown that projectorbased AR visualization of automated access path planning is possible and enables individual, situation-adapted insertion point selection. More research is required to further explore optimal display of paths.},
note = {Publisher: De Gruyter},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schwenderling, L
Intraoperative Augmented Reality Visualisierung einer automatisierten Pfadplanung für perkutane Interventionen Masters Thesis
2021.
@mastersthesis{schwenderling_intraoperative_2021,
title = {Intraoperative Augmented Reality Visualisierung einer automatisierten Pfadplanung für perkutane Interventionen},
author = {L Schwenderling},
url = {https://www.var.ovgu.de/pub/2021_MA_Schwenderling.pdf},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
abstract = {Perkutane Interventionen resultieren, bedingt durch kleine Traumata, oft in weniger Schmerzen
und einer schnelleren Genesung, als offene Verfahren. Problematisch für die Durchführung ist
dabei jedoch ein verändertes visuelles und haptisches Feedback. Mit einer Zugangsplanung ist
eine Verringerung von Komplikationsrisiken möglich. Dies kann jedoch viel Zeit in Anspruch
nehmen und ist abhängig von der Erfahrung der durchführenden Person. Eine automatisierte
Pfadplanung ermöglicht eine schnelle und konsistente Bestimmung von Zugangsdaten. Um eine
Pfadwahl im Interventionsraum zur Berücksichtigung der individuellen Situation zu erlauben,
wurde eine Darstellung der Pfadplanungsergebnisse mit projektorbasierter Augmented Reality
untersucht. Basierend auf einer bedingungsbasierten Pfadplanung erfolgte die Darstellung mög-
licher Einstichpunkte mit einer Farbkodierung direkt auf der Hautoberfläche. Eine zusätzliche
Anzeige der Zielposition sollte die Pfadentscheidung weiter unterstützen. Im Rahmen von zwei
Studien wurden verschiedene Aspekte zur Darstellung der Einstichpunkte und des Ziels näher
untersucht. Dabei konnte gezeigt werden, dass eine Darstellung aller möglichen Einstichpunkte
mit einer Bewertung der Güte die Wahl von sicheren Pfaden unterstützt. Als Farbkodierung
wurde dabei die Verwendung einer sequentiellen grünen Skala mit diskreten Abstufungen zur
Anzeige aller möglichen Einstichpunkte am besten bewertet. Eine Kombination mit einer auf-
projizierten Zielvisualisierung zusätzlich zu einer medizinischen Schichtbilddarstellung ermög-
lichte die Evaluierung des des resultierenden Einstichs für jeden Oberflächenpunkt. Insgesamt
konnte gezeigt werden, dass eine Darstellung einer automatisierten Pfadplanung direkt auf der
Hautoberfläche eine Pfadwahl vor allem für unerfahrene Nutzende erleichtern kann. Dafür ist
eine Anzeige aller Einstichpfade mit Wertung in Kombination mit einer Zielvisualisierung zu
bevorzugen.},
keywords = {},
pubstate = {published},
tppubtype = {mastersthesis}
}

Heinrich, F; Schwenderling, L; Joeres, F; Lawonn, K; Hansen, C
Comparison of Augmented Reality Display Techniques to Support Medical Needle Insertion Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 26, no. 12, pp. 3568–3575, 2020, ISSN: 1941-0506.
@article{heinrich_comparison_2020,
title = {Comparison of Augmented Reality Display Techniques to Support Medical Needle Insertion},
author = {F Heinrich and L Schwenderling and F Joeres and K Lawonn and C Hansen},
url = {https://ieeexplore.ieee.org/abstract/document/9211732},
doi = {10.1109/TVCG.2020.3023637},
issn = {1941-0506},
year = {2020},
date = {2020-12-01},
urldate = {2020-12-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {26},
number = {12},
pages = {3568–3575},
abstract = {Augmented reality (AR) may be a useful technique to overcome issues of conventionally used navigation systems supporting medical needle insertions, like increased mental workload and complicated hand-eye coordination. Previous research primarily focused on the development of AR navigation systems designed for specific displaying devices, but differences between employed methods have not been investigated before. To this end, a user study involving a needle insertion task was conducted comparing different AR display techniques with a monitor-based approach as baseline condition for the visualization of navigation information. A video see-through stationary display, an optical see-through head-mounted display and a spatial AR projector-camera-system were investigated in this comparison. Results suggest advantages of using projected navigation information in terms of lower task completion time, lower angular deviation and affirmative subjective participant feedback. Techniques requiring the intermediate view on screens, i.e. the stationary display and the baseline condition, showed less favorable results. Thus, benefits of providing AR navigation information compared to a conventionally used method could be identified. Significant objective measures results, as well as an identification of advantages and disadvantages of individual display techniques contribute to the development and design of improved needle navigation systems.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Heinrich, F; Schwenderling, L; Becker, M; Skalej, M; Hansen, C
HoloInjection: augmented reality support for CT-guided spinal needle injections Journal Article
In: Healthcare Technology Letters, vol. 6, no. 6, pp. 165–171, 2019, (Publisher: The Institution of Engineering and Technology).
@article{heinrich_holoinjection_2019,
title = {HoloInjection: augmented reality support for CT-guided spinal needle injections},
author = {F Heinrich and L Schwenderling and M Becker and M Skalej and C Hansen},
url = {https://digital-library.theiet.org/doi/10.1049/htl.2019.0062},
doi = {10.1049/htl.2019.0062},
year = {2019},
date = {2019-11-01},
urldate = {2019-11-01},
journal = {Healthcare Technology Letters},
volume = {6},
number = {6},
pages = {165–171},
abstract = {The correct placement of needles is decisive for the success of many minimally-invasive interventions and therapies. These needle insertions are usually only guided by radiological imaging and can benefit from additional navigation support. Augmented reality (AR) is a promising tool to conveniently provide needed information and may thus overcome the limitations of existing approaches. To this end, a prototypical AR application was developed to guide the insertion of needles to spinal targets using the mixed reality glasses Microsoft HoloLens. The system's registration accuracy was attempted to measure and three guidance visualisation concepts were evaluated concerning achievable in-plane and out-of-plane needle orientation errors in a comparison study. Results suggested high registration accuracy and showed that the AR prototype is suitable for reducing out-of-plane orientation errors. Limitations, like comparatively high in-plane orientation errors, effects of the viewing position and missing image slices indicate potential for improvement that needs to be addressed before transferring the application to clinical trials.},
note = {Publisher: The Institution of Engineering and Technology},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schwenderling, L
Augmented-Reality-Unterstuetzung bei CT-gesteuerter periradikulaerer Therapie mit Hilfe der Microsoft HoloLens Bachelor Thesis
2019.
@bachelorthesis{schwenderling_l_augmented-reality-unterstuetzung_2019,
title = {Augmented-Reality-Unterstuetzung bei CT-gesteuerter periradikulaerer Therapie mit Hilfe der Microsoft HoloLens},
author = {L Schwenderling},
url = {https://www.var.ovgu.de/pub/2019_BA_Schwenderling_geschwaerzt.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
abstract = {Die periradikuläre Therapie stellt eine Behandlungsmöglichkeit bei chronischen und
akuten Rückenschmerzen dar, die auf eine Reizung der Spinalnervswurzeln zurückzufüh-
ren sind. Dabei wird eine Nadel an den Nervenwurzeln positioniert und es werden lokal
Medikamente appliziert. Die korrekte Platzierung der Nadel ist dabei entscheidend für
den Erfolg der Therapie. Eine Nadelnavigation kann dies unterstützen.
In dieser Arbeit wird eine prototypische Anwendung umgesetzt und evaluiert, die eine
Augmented-Reality-Nadelnavigation für die Microsoft HoloLens zur Verfügung stellt.
Dabei werden drei verschiedene Visualisierungskonzepte betrachtet: das Ebenen-, das
Linien- und das ConeRing-Konzept.
Für die Evaluation der Anwendung wurden sechs verschiedener Faktoren betrachtet.
Für den In-Plane-Fehler ( IPE ) und den Gesamtwinkelfehler haben sich deutlich größere
Abweichungen gezeigt, als beim Out-of-Plane Fehler (OPE). Am schlechtesten abge-
schnitten hat das Ebenenkonzept (Gesamtwinkelfehler: 6.752 ± 0.80°, IPE: 6.01 ± 0.79°,
OPE: 1.58 ± 0.21°), dann das Linienkonzept (Gesamtwinkelfehler: 4.45 ± 0.75°, IPE:
4.45 ± 0.75°, OPE: 1.52 ± 0.23°) und am besten das ConeRing-Konzept (Gesamtwinkel-
fehler: 4.12 ± 0.38°, IPE: 3.51 ± 0.39°, OPE: 1.53 ± 0.22°). Im Rahmen einer zweiten
Evaluation konnte nachgewiesen werden, dass die Registrierung eine größere Genauigkeit
ermöglicht. Hinsichtlich der Dauer und der subjektiven Sicherheit und Schwierigkeit
zeigten sich signifikante Unterschiede. Das Ebenen-Konzept wurde deutlich schlechter
bewertet, als die Konzepte Linie und ConeRing.},
keywords = {},
pubstate = {published},
tppubtype = {bachelorthesis}
}