Josefine Schreiter, M.Sc.
Short Bio
I obtained my Bachelor's degree in Medical Engineering (B.Eng.) at the University of Applied Sciences Jena, followed by a Master's degree in Medical Systems Engineering (M.Sc.) at Otto-von-Guericke University Magdeburg, where I specialized in Medical Imaging and Theoretical Neurosciences. Since 2020, I have pursuing my PhD in the Virtual and Augmented Reality Group at Otto-von-Guericke University Magdeburg and the Research Campus STIMULATE. I have gained international experiences through work stays in China, South Africa, Nicaragua, and Australia.
Research Interests
My research focuses on human-robot interaction in medical contexts and on integrating augmented and mixed reality technologies to support robotic interventional and surgical workflows. I explore how intuitive interaction concepts can enhance the collaboration between clinicans and robotic systems.
- Multimodal interfaces for controlling (medical) robots
- Comparative evaluation of interaction modalities for human–robot collaboration
- AR/XR-based interfaces for robotic procedures
Contact:
LinkedIn | ORCID | ResearchGate
Publications
2025

Schreiter, J; Heinrich, F; Hatscher, B; Schott, D; Hansen, C
Multimodal human–computer interaction in interventional radiology and surgery: a systematic literature review Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 20, no. 4, pp. 807–816, 2025, ISSN: 1861-6429.
@article{schreiter_multimodal_2025,
title = {Multimodal human–computer interaction in interventional radiology and surgery: a systematic literature review},
author = {J Schreiter and F Heinrich and B Hatscher and D Schott and C Hansen},
url = {https://doi.org/10.1007/s11548-024-03263-3},
doi = {10.1007/s11548-024-03263-3},
issn = {1861-6429},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {20},
number = {4},
pages = {807–816},
abstract = {As technology advances, more research dedicated to medical interactive systems emphasizes the integration of touchless and multimodal interaction (MMI). Particularly in surgical and interventional settings, this approach is advantageous because it maintains sterility and promotes a natural interaction. Past reviews have focused on investigating MMI in terms of technology and interaction with robots. However, none has put particular emphasis on analyzing these kind of interactions for surgical and interventional scenarios.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schreiter, J; Mielke, T; Georgiades, M; Pech, M; Hansen, C; Heinrich, F
Exploring Interaction Concepts for the Manipulation of a Collaborative Robot: A Comparative Study Proceedings Article
In: Proceedings of the 2025 ACM/IEEE International Conference on Human-Robot Interaction, pp. 55–64, IEEE Press, Melbourne, Australia, 2025.
@inproceedings{schreiter_exploring_2025,
title = {Exploring Interaction Concepts for the Manipulation of a Collaborative Robot: A Comparative Study},
author = {J Schreiter and T Mielke and M Georgiades and M Pech and C Hansen and F Heinrich},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {Proceedings of the 2025 ACM/IEEE International Conference on Human-Robot Interaction},
pages = {55–64},
publisher = {IEEE Press},
address = {Melbourne, Australia},
series = {HRI '25},
abstract = {Robotic systems have the potential to enhance a wide range of domains, such as medical workflows, by automating individual steps of complex processes. However, human-robot interaction (HRI) is of critical importance, as effective collaboration between humans and robots is essential even in highly automated environments. Recent research has predominantly focused on the development of interaction methods rather than systematically comparing existing approaches. Therefore, we conducted a user study (n=20) to compare different HRI concepts for end effector manipulation combined with clutching mechanisms for manipulation activation in an alignment task using the example of robotic ultrasound (US). Manipulation methods included hand-guiding, teleoperation, and touchless interaction, while clutching mechanisms were realized through hand, voice, and foot interaction. The results indicate advantages of hand-guiding for manipulation. While no significant differences were observed between clutching mechanisms, strong evidence suggests comparable performance across these modalities. Notably, significant interaction effects on perceived workload reveal that the optimal clutching mechanism depends on the selected manipulation technique. This work underscores the critical importance of selecting appropriate HRI concepts and understanding the dependencies of manipulation techniques with clutching mechanisms. While our study included the usage of a robotic US, the insights gained are broadly transferable across various domains involving robotic manipulation tasks in human-robot collaboration.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2022

Schreiter, J; Mielke, T; Schott, D; Thormann, M; Omari, J; Pech, M; Hansen, C
A multimodal user interface for touchless control of robotic ultrasound Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 18, no. 8, pp. 1429–1436, 2022, ISSN: 1861-6429.
@article{schreiter_multimodal_2022,
title = {A multimodal user interface for touchless control of robotic ultrasound},
author = {J Schreiter and T Mielke and D Schott and M Thormann and J Omari and M Pech and C Hansen},
url = {https://link.springer.com/10.1007/s11548-022-02810-0},
doi = {10.1007/s11548-022-02810-0},
issn = {1861-6429},
year = {2022},
date = {2022-12-01},
urldate = {2022-12-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {18},
number = {8},
pages = {1429–1436},
abstract = {Purpose Past research contained the investigation and development of robotic ultrasound. In this context, interfaces which allow for interaction with the robotic system are of paramount importance. Few researchers have addressed the issue of developing non-tactile interaction approaches, although they could be beneficial for maintaining sterility during medical procedures. Interaction could be supported by multimodality, which has the potential to enable intuitive and natural interaction. To assess the feasibility of multimodal interaction for non-tactile control of a co-located robotic ultrasound system, a novel human–robot interaction concept was developed.
Methods The medical use case of needle-based interventions under hybrid computed tomography and ultrasound imaging was analyzed by interviewing four radiologists. From the resulting workflow, interaction tasks were derived which include human–robot interaction. Based on this, characteristics of a multimodal, touchless human–robot interface were elaborated, suitable interaction modalities were identified, and a corresponding interface was developed, which was thereafter evaluated in a user study with eight participants.
Results The implemented interface includes voice commands, combined with hand gesture control for discrete control and navigation interaction of the robotic US probe, respectively. The interaction concept was evaluated by the users in the form of a quantitative questionnaire with a average usability. Qualitative analysis of interview results revealed user satisfaction with the implemented interaction methods and potential improvements to the system.
Conclusion A multimodal, touchless interaction concept for a robotic US for the use case of needle-based procedures in interventional radiology was developed, incorporating combined voice and hand gesture control. Future steps will include the integration of a solution for the missing haptic feedback and the evaluation of its clinical suitability.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Methods The medical use case of needle-based interventions under hybrid computed tomography and ultrasound imaging was analyzed by interviewing four radiologists. From the resulting workflow, interaction tasks were derived which include human–robot interaction. Based on this, characteristics of a multimodal, touchless human–robot interface were elaborated, suitable interaction modalities were identified, and a corresponding interface was developed, which was thereafter evaluated in a user study with eight participants.
Results The implemented interface includes voice commands, combined with hand gesture control for discrete control and navigation interaction of the robotic US probe, respectively. The interaction concept was evaluated by the users in the form of a quantitative questionnaire with a average usability. Qualitative analysis of interview results revealed user satisfaction with the implemented interaction methods and potential improvements to the system.
Conclusion A multimodal, touchless interaction concept for a robotic US for the use case of needle-based procedures in interventional radiology was developed, incorporating combined voice and hand gesture control. Future steps will include the integration of a solution for the missing haptic feedback and the evaluation of its clinical suitability.

Schreiter, J; Schott, D; Schwenderling, L; Hansen, C; Heinrich, F; Joeres, F
AR-Supported Supervision of Conditional Autonomous Robots: Considerations for Pedicle Screw Placement in the Future Journal Article
In: Journal of Imaging, vol. 8, no. 10, pp. 255, 2022, ISSN: 2313-433X, (Publisher: Multidisciplinary Digital Publishing Institute).
@article{schreiter_ar-supported_2022,
title = {AR-Supported Supervision of Conditional Autonomous Robots: Considerations for Pedicle Screw Placement in the Future},
author = {J Schreiter and D Schott and L Schwenderling and C Hansen and F Heinrich and F Joeres},
url = {https://www.mdpi.com/2313-433X/8/10/255},
doi = {10.3390/jimaging8100255},
issn = {2313-433X},
year = {2022},
date = {2022-10-01},
urldate = {2022-10-01},
journal = {Journal of Imaging},
volume = {8},
number = {10},
pages = {255},
abstract = {Robotic assistance is applied in orthopedic interventions for pedicle screw placement (PSP). While current robots do not act autonomously, they are expected to have higher autonomy under surgeon supervision in the mid-term. Augmented reality (AR) is promising to support this supervision and to enable human–robot interaction (HRI). To outline a futuristic scenario for robotic PSP, the current workflow was analyzed through literature review and expert discussion. Based on this, a hypothetical workflow of the intervention was developed, which additionally contains the analysis of the necessary information exchange between human and robot. A video see-through AR prototype was designed and implemented. A robotic arm with an orthopedic drill mock-up simulated the robotic assistance. The AR prototype included a user interface to enable HRI. The interface provides data to facilitate understanding of the robot’s ”intentions”, e.g., patient-specific CT images, the current workflow phase, or the next planned robot motion. Two-dimensional and three-dimensional visualization illustrated patient-specific medical data and the drilling process. The findings of this work contribute a valuable approach in terms of addressing future clinical needs and highlighting the importance of AR support for HRI.},
note = {Publisher: Multidisciplinary Digital Publishing Institute},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schreiter, J; Semshchikov, V; Hanses, M; Elkmann, N; Hansen, C
Towards a real-time control of robotic ultrasound using haptic force feedback Journal Article
In: Current Directions in Biomedical Engineering, vol. 8, no. 1, pp. 81–84, 2022, ISSN: 2364-5504, (Publisher: De Gruyter).
@article{schreiter_towards_2022,
title = {Towards a real-time control of robotic ultrasound using haptic force feedback},
author = {J Schreiter and V Semshchikov and M Hanses and N Elkmann and C Hansen},
url = {https://www.degruyterbrill.com/document/doi/10.1515/cdbme-2022-0021/html},
doi = {10.1515/cdbme-2022-0021},
issn = {2364-5504},
year = {2022},
date = {2022-07-01},
urldate = {2022-07-01},
journal = {Current Directions in Biomedical Engineering},
volume = {8},
number = {1},
pages = {81–84},
abstract = {Ultrasound is a widely used imaging technique and is appreciated for its non-invasiveness, absence of radiation, widespread availability, and compact equipment. Ergonomic difficulties in manual handling of the probe could be enhanced by a robotic controlled ultrasound. The paper addresses the development of such a system which enables remote operation of a ultrasound probe and includes haptic force feedback as well as video conferencing components for visual feedback. The development process followed a user-centered approach by investigating needs of potential end-users. Preliminary results demonstrated the functionality of the developed system for generating medical image data under laboratory conditions.},
note = {Publisher: De Gruyter},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2021

Schreiter, J; Joeres, F; March, C; Pech, M; Hansen, C
Application Potential of Robot-Guided Ultrasound During CT-Guided Interventions Proceedings Article
In: Noble, J; Aylward, S; Grimwood, A; Min, Z; Lee, S; Hu, Y (Ed.): Simplifying Medical Ultrasound, pp. 116–125, Springer International Publishing, Cham, 2021, ISBN: 978-3-030-87583-1.
@inproceedings{schreiter_application_2021,
title = {Application Potential of Robot-Guided Ultrasound During CT-Guided Interventions},
author = {J Schreiter and F Joeres and C March and M Pech and C Hansen},
editor = {J Noble and S Aylward and A Grimwood and Z Min and S Lee and Y Hu},
doi = {10.1007/978-3-030-87583-1_12},
isbn = {978-3-030-87583-1},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Simplifying Medical Ultrasound},
pages = {116–125},
publisher = {Springer International Publishing},
address = {Cham},
abstract = {CT-guided interventions are common practices in interventional radiology to treat oncological conditions. During these interventions, radiologists are exposed to radiation and faced with a non-ergonomic working environment. A robot-guided ultrasound (US) as a complementing imaging method for the purpose of needle guidance could help to overcome these challenges. A survey with 21 radiologists was made to analyze the application potential of US during CT-guided interventions with regard to anatomical regions to be scanned as locations of target lesions as well as specific situations during which US could complement CT imaging. The results indicate that the majority of respondents already applied US during CT-guided interventions for reasons of real-time imaging of the target lesion, organ, and needle movement as well as for lesions that are difficult to visualize in CT. Potential situations of US application were identified as out-of-plane needle insertion and puncturing lesions within the liver and subcutaneous lymph nodes. Interaction with a robot-guided US should be intuitive and include an improved sterility concept.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}