Position: PhD Candidate
Phone: +49 391 67-59349
E-Mail: tonia.mielke@ovgu.de
Office: Currently at University of Toronto, Canada
Position: PhD Candidate
Phone: +49 391 67-59349
E-Mail: tonia.mielke@ovgu.de
Office: Currently at University of Toronto, Canada
I am a research scientist in the Virtual and Augmented Reality Group. I completed my bachelor’s degree in Medical Engineering and my master’s degree in Computer Science at Otto von Guericke University Magdeburg. I am now working toward my dissertation, focusing on the development of intuitive human–robot interaction and the use of extended reality to support such interaction.
Currently, I am a visiting student at the Continuum Robotics Lab at the University of Toronto, Canada, where I investigate interaction with continuum robots, exploring interaction concepts and feedback approaches.
Find me also here:
Google Scholar |
ORCID |
ResearchGate
My research focuses on intuitive Human–Robot Interaction (HRI), with an emphasis on gesture control and the use of extended reality (XR) to support these interactions. This includes:

Mielke, T; Allgaier, M; Hansen, C; Heinrich, F
Extended Reality Check: Evaluating XR Prototyping for Human-Robot Interaction in Contact-Intensive Tasks Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 31, iss. 11, pp. 10035 - 10044, 2025.
@article{mielke_extended_2025,
title = {Extended Reality Check: Evaluating XR Prototyping for Human-Robot Interaction in Contact-Intensive Tasks},
author = {T Mielke and M Allgaier and C Hansen and F Heinrich},
doi = {10.1109/TVCG.2025.3616753},
year = {2025},
date = {2025-10-02},
urldate = {2025-10-02},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {31},
issue = {11},
pages = {10035 - 10044},
abstract = {Extended Reality (XR) has the potential to improve efficiency and safety in the user-centered development of human-robot interaction. However, the validity of using XR prototyping for user studies for contact-intensive robotic tasks remains underexplored. These in-contact tasks are particularly relevant due to challenges arising from indirect force perception in robot control. Therefore, in this work, we investigate a representative example of such a task: robotic ultrasound. A user study was conducted to assess the transferability of results from a simulated user study to real-world conditions, comparing two force-assistance approaches. The XR simulation replicates the physical study set-up employing a virtual robotic arm, its control interface, ultrasound imaging, and two force-assistance methods: automation and force visualization. Our results indicate that while differences in force deviation, perceived workload, and trust emerge between real and simulated setups, the overall findings remain consistent. Specifically, partial automation of robot control improves performance and trust while reducing workload, and visual feedback decreases force deviation in both real and simulated conditions. These findings highlight the potential of XR for comparative studies, even in complex robotic tasks.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Mielke, T; Heinrich, F; Hansen, C
Gesturing Towards Efficient Robot Control: Exploring Sensor Placement and Control Modes for Mid-Air Human-Robot Interaction Proceedings Article
In: 2025 IEEE International Conference on Robotics and Automation (ICRA), 2025.
@inproceedings{mielke_gesturing_2025,
title = {Gesturing Towards Efficient Robot Control: Exploring Sensor Placement and Control Modes for Mid-Air Human-Robot Interaction},
author = {T Mielke and F Heinrich and C Hansen},
doi = {10.1109/ICRA55743.2025.11127519},
year = {2025},
date = {2025-09-02},
urldate = {2025-01-01},
booktitle = {2025 IEEE International Conference on Robotics and Automation (ICRA)},
abstract = {While collaborative robots effectively combine robotic precision with human capabilities, traditional control methods such as button presses or hand guidance can be slow and physically demanding. This has led to an increasing interest in natural user interfaces that integrate hand gesturebased interactions for more intuitive and flexible robot control. Therefore, this paper systematically explores mid-air robot control by comparing position and rate control modes with different state-of-the-art and novel sensor placements. A user study was conducted to evaluate each combination in terms of accuracy, task duration, perceived workload, and physical exertion. Our results indicate that position control is more efficient than rate control. Traditional desk-mounted sensors can provide a good balance between accuracy and comfort. However, robot-mounted sensors are a viable alternative for short-term, accurate control with less spatial requirements. Legmounted sensors, while comfortable, pose challenges to handeye coordination. Based on these findings, we provide design implications for improving the usability and comfort of midair human-robot interaction. Future research should extend this evaluation to a wider range of tasks and environments.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Mielke, T; Heinrich, F; Hansen, C
Enhancing AR-to-Robot Registration Accuracy: A Comparative Study of Marker Detection Algorithms and Registration Parameters Proceedings Article
In: 2025 IEEE International Conference on Robotics and Automation (ICRA), IEEE, 2025.
@inproceedings{mielke_enhancing_2025,
title = {Enhancing AR-to-Robot Registration Accuracy: A Comparative Study of Marker Detection Algorithms and Registration Parameters},
author = {T Mielke and F Heinrich and C Hansen},
doi = {10.1109/ICRA55743.2025.11128039},
year = {2025},
date = {2025-09-02},
urldate = {2025-09-02},
booktitle = {2025 IEEE International Conference on Robotics and Automation (ICRA)},
publisher = {IEEE},
abstract = {Augmented Reality (AR) offers potential for enhancing human-robot collaboration by enabling intuitive interaction and real-time feedback. A crucial aspect of AR-robot integration is accurate spatial registration to align virtual content with the physical robotic workspace. This paper systematically investigates the effects of different tracking techniques and registration parameters on AR-to-robot registration accuracy, focusing on paired-point methods. We evaluate four marker detection algorithms - ARToolkit, Vuforia, ArUco, and retroreflective tracking - analyzing the influence of viewing distance, angle, marker size, point distance, distribution, and quantity. Our results show that ARToolkit provides the highest registration accuracy. While larger markers and positioning registration point centroids close to target locations consistently improved accuracy, other factors such as point distance and quantity were highly dependent on the tracking techniques used. Additionally, we propose an effective refinement method using point cloud registration, significantly improving accuracy by integrating data from points recorded between registration locations. These findings offer practical guidelines for enhancing AR-robot registration, with future work needed to assess the transferability to other AR devices and robots.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Mielke, T; Allgaier, M; Schott, D; Hansen, C; Heinrich, F
Virtual Studies, Real Results? Assessing the Impact of Virtualization on Human-Robot Interaction Proceedings Article
In: Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pp. 1–8, ACM, Yokohama Japan, 2025, ISBN: 979-8-4007-1395-8.
@inproceedings{mielke_virtual_2025,
title = {Virtual Studies, Real Results? Assessing the Impact of Virtualization on Human-Robot Interaction},
author = {T Mielke and M Allgaier and D Schott and C Hansen and F Heinrich},
url = {https://dl.acm.org/doi/10.1145/3706599.3719724},
doi = {10.1145/3706599.3719724},
isbn = {979-8-4007-1395-8},
year = {2025},
date = {2025-04-01},
urldate = {2025-04-01},
booktitle = {Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
publisher = {ACM},
address = {Yokohama Japan},
abstract = {Extended Reality (XR) shows potential for human-centered evaluation of real-world scenarios and could improve efficiency and safety in robotic research. However, the validity of XR Human-Robot Interaction (HRI) studies remains underexplored. This paper investigates the transferability of HRI studies across virtualization levels for three tasks. Our results indicate XR study validity is task-specific, with task virtualization as a key influencing factor. Partially virtualized settings with virtual tasks and a real robot, as well as fully virtualized setups with a simulated robot, yielded results comparable to real setups for pick-and-place and robotic ultrasound. However, for precision-dependent peg-in-hole, differences were observed between real and virtualized conditions regarding completion time, perceived workload, and ease. Demonstrating the task dependency of XR transferability and comparing virtualization levels, our work takes an important step in assessing XR study validity. Future work should isolate factors affecting transferability and assess relative validity in the absence of absolute validity.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Mielke, T; Heinrich, F; Hansen, C
SensARy Substitution: Augmented Reality Techniques to Enhance Force Perception in Touchless Robot Control Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 31, no. 5, pp. 3235–3244, 2025, ISSN: 1941-0506.
@article{mielke_sensary_2025,
title = {SensARy Substitution: Augmented Reality Techniques to Enhance Force Perception in Touchless Robot Control},
author = {T Mielke and F Heinrich and C Hansen},
url = {https://ieeexplore.ieee.org/document/10926846},
doi = {10.1109/TVCG.2025.3549856},
issn = {1941-0506},
year = {2025},
date = {2025-03-14},
urldate = {2025-05-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {31},
number = {5},
pages = {3235–3244},
abstract = {The lack of haptic feedback in touchless human-robot interaction is critical in applications such as robotic ultrasound, where force perception is crucial to ensure image quality. Augmented reality (AR) is a promising tool to address this limitation by providing sensory substitution through visual or vibrotactile feedback. The implementation of visual force feedback requires consideration not only of feedback design but also of positioning. Therefore, we implemented two different visualization types at three different positions and investigated the effects of vibrotactile feedback on these approaches. Furthermore, we examined the effects of multimodal feedback compared to visual or vibrotactile output alone. Our results indicate that sensory substitution eases the interaction in contrast to a feedback-less baseline condition, with the presence of visual support reducing average force errors and being subjectively preferred by the participants. However, the more feedback was provided, the longer users needed to complete their tasks. Regarding visualization design, a 2D bar visualization reduced force errors compared to a 3D arrow concept. Additionally, the visualizations being displayed directly on the ultrasound screen were subjectively preferred. With findings regarding feedback modality and visualization design our work represents an important step toward sensory substitution for touchless human-robot interaction.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Schreiter, J; Mielke, T; Georgiades, M; Pech, M; Hansen, C; Heinrich, F
Exploring Interaction Concepts for the Manipulation of a Collaborative Robot: A Comparative Study Proceedings Article
In: Proceedings of the 2025 ACM/IEEE International Conference on Human-Robot Interaction, pp. 55–64, IEEE Press, Melbourne, Australia, 2025.
@inproceedings{schreiter_exploring_2025,
title = {Exploring Interaction Concepts for the Manipulation of a Collaborative Robot: A Comparative Study},
author = {J Schreiter and T Mielke and M Georgiades and M Pech and C Hansen and F Heinrich},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
booktitle = {Proceedings of the 2025 ACM/IEEE International Conference on Human-Robot Interaction},
pages = {55–64},
publisher = {IEEE Press},
address = {Melbourne, Australia},
series = {HRI '25},
abstract = {Robotic systems have the potential to enhance a wide range of domains, such as medical workflows, by automating individual steps of complex processes. However, human-robot interaction (HRI) is of critical importance, as effective collaboration between humans and robots is essential even in highly automated environments. Recent research has predominantly focused on the development of interaction methods rather than systematically comparing existing approaches. Therefore, we conducted a user study (n=20) to compare different HRI concepts for end effector manipulation combined with clutching mechanisms for manipulation activation in an alignment task using the example of robotic ultrasound (US). Manipulation methods included hand-guiding, teleoperation, and touchless interaction, while clutching mechanisms were realized through hand, voice, and foot interaction. The results indicate advantages of hand-guiding for manipulation. While no significant differences were observed between clutching mechanisms, strong evidence suggests comparable performance across these modalities. Notably, significant interaction effects on perceived workload reveal that the optimal clutching mechanism depends on the selected manipulation technique. This work underscores the critical importance of selecting appropriate HRI concepts and understanding the dependencies of manipulation techniques with clutching mechanisms. While our study included the usage of a robotic US, the insights gained are broadly transferable across various domains involving robotic manipulation tasks in human-robot collaboration.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Mielke, T; Joeres, F; Schott, D; Hansen, C
Interactive Registration Methods for Augmented Reality in Robotics: A Comparative Evaluation Proceedings Article
In: 2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct), pp. 501–506, IEEE, Sydney, Australia, 2023, ISBN: 979-8-3503-2891-2.
@inproceedings{mielke_interactive_2023,
title = {Interactive Registration Methods for Augmented Reality in Robotics: A Comparative Evaluation},
author = {T Mielke and F Joeres and D Schott and C Hansen},
url = {https://ieeexplore.ieee.org/document/10322246/},
doi = {10.1109/ISMAR-Adjunct60411.2023.00109},
isbn = {979-8-3503-2891-2},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {2023 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)},
pages = {501–506},
publisher = {IEEE},
address = {Sydney, Australia},
abstract = {Augmented Reality (AR) visualization has shown potential for supporting intuitive and efficient human-robot interaction in a range of tasks. Since all these tasks are spatially related to the robot, the precise positioning of the AR content is critical to the applicability. However, most research has primarily focused on developing visualizations rather than exploring methods for aligning AR content in the robotic workspace. This paper aims to bridge this gap by implementing and comparing different interactive registration methods, including two point-based and one manual approach. We comparatively evaluated these registration methods in a user study (n=21), measuring registration accuracy, duration, and subjective user feedback. Our results indicate that the point-based methods outperform the manual approach in terms of both accuracy and perceived workload. Furthermore, participants achieved significantly faster performance with a point-based approach using physically defined registration points compared to a point-based approach using markers attached to the robot.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schreiter, J; Mielke, T; Schott, D; Thormann, M; Omari, J; Pech, M; Hansen, C
A multimodal user interface for touchless control of robotic ultrasound Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 18, no. 8, pp. 1429–1436, 2022, ISSN: 1861-6429.
@article{schreiter_multimodal_2022,
title = {A multimodal user interface for touchless control of robotic ultrasound},
author = {J Schreiter and T Mielke and D Schott and M Thormann and J Omari and M Pech and C Hansen},
url = {https://link.springer.com/10.1007/s11548-022-02810-0},
doi = {10.1007/s11548-022-02810-0},
issn = {1861-6429},
year = {2022},
date = {2022-12-01},
urldate = {2022-12-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {18},
number = {8},
pages = {1429–1436},
abstract = {Purpose Past research contained the investigation and development of robotic ultrasound. In this context, interfaces which allow for interaction with the robotic system are of paramount importance. Few researchers have addressed the issue of developing non-tactile interaction approaches, although they could be beneficial for maintaining sterility during medical procedures. Interaction could be supported by multimodality, which has the potential to enable intuitive and natural interaction. To assess the feasibility of multimodal interaction for non-tactile control of a co-located robotic ultrasound system, a novel human–robot interaction concept was developed.
Methods The medical use case of needle-based interventions under hybrid computed tomography and ultrasound imaging was analyzed by interviewing four radiologists. From the resulting workflow, interaction tasks were derived which include human–robot interaction. Based on this, characteristics of a multimodal, touchless human–robot interface were elaborated, suitable interaction modalities were identified, and a corresponding interface was developed, which was thereafter evaluated in a user study with eight participants.
Results The implemented interface includes voice commands, combined with hand gesture control for discrete control and navigation interaction of the robotic US probe, respectively. The interaction concept was evaluated by the users in the form of a quantitative questionnaire with a average usability. Qualitative analysis of interview results revealed user satisfaction with the implemented interaction methods and potential improvements to the system.
Conclusion A multimodal, touchless interaction concept for a robotic US for the use case of needle-based procedures in interventional radiology was developed, incorporating combined voice and hand gesture control. Future steps will include the integration of a solution for the missing haptic feedback and the evaluation of its clinical suitability.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Mielke, T; Joeres, F; Hansen, C
Natural 3D Object Manipulation for Interactive Laparoscopic Augmented Reality Registration Proceedings Article
In: Chen, Jessie Y. C.; Fragomeni, Gino (Ed.): Virtual, Augmented and Mixed Reality: Design and Development, pp. 317–328, Springer International Publishing, Cham, 2022, ISBN: 978-3-031-05939-1.
@inproceedings{mielke_natural_2022,
title = {Natural 3D Object Manipulation for Interactive Laparoscopic Augmented Reality Registration},
author = {T Mielke and F Joeres and C Hansen},
editor = {Jessie Y. C. Chen and Gino Fragomeni},
doi = {10.1007/978-3-031-05939-1_21},
isbn = {978-3-031-05939-1},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Virtual, Augmented and Mixed Reality: Design and Development},
pages = {317–328},
publisher = {Springer International Publishing},
address = {Cham},
abstract = {Due to the growing focus on minimally invasive surgery, there is increasing interest in intraoperative software support. For example, augmented reality can be used to provide additional information. Accurate registration is required for effective support. In this work, we present a manual registration method that aims at mimicking natural manipulation of 3D objects using tracked surgical instruments. This method is compared to a point-based registration method in a simulated laparoscopic environment. Both registration methods serve as an initial alignment step prior to surface-based registration refinement. For the evaluation, we conducted a user study with 12 participants. The registration methods were compared in terms of registration accuracy, registration duration, and subjective usability feedback. No significant differences could be found with respect to the previously mentioned criteria between the manual and the point-based registration methods. Thus, the manual registration did not outperform the reference method. However, we found that our method offers qualitative advantages, which may make it more suitable for some application scenarios. Furthermore we identified possible approaches for improvement, which should be investigated in the future to strengthen possible advantages of our registration method.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Mielke, T
Entwicklung einer multimodalen Benutzerschnittstelle zur berührungslosen Steuerung eines Roboterarms im medizinischen Interventionsraum Masters Thesis
2022.
@mastersthesis{mielke_entwicklung_2022,
title = {Entwicklung einer multimodalen Benutzerschnittstelle zur berührungslosen Steuerung eines Roboterarms im medizinischen Interventionsraum},
author = {T Mielke},
url = {https://www.var.ovgu.de/pub/2022_MA_Tonia_Mielke.pdf},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
abstract = {In interventional radiology, minimally invasive procedures can be performed using medical
imaging. To safely navigate the instruments to target structures during needle-guided
procedures, the combination of computed tomography and ultrasound can be used for
visualization. Ultrasound offers the advantage of imaging without ionizing radiation.
However, radiologists must simultaneously position the probe and needle during the
procedure, which can lead to unergonomic postures and inaccuracies. To overcome these
challenges, a robotic arm can be used to position the ultrasound probe. While some
tasks such as visualization of the needle tip could be performed autonomously in the
future, there are still tasks that need to be performed manually. Therefore, an interface for
direct communication with the robot is required, which facilitates, for example, manual
visualization of the target lesion or correction of automatic robot control.
In this work, a multimodal interface for human-robot interaction with a robotic arm for
ultrasound imaging is developed. For this purpose, different interaction concepts with
different interaction modalities were developed and evaluated. In a user study, we were able
to show that for discrete interactions such as mode selection, interaction via speech is more
suitable than gaze interaction. For navigation interactions, control via hand movements
resulted in a lower task completion time than via head movements. Based on these results,
a final interaction concept consisting of speech and hand interaction was developed. This
concept was finally evaluated in a qualitative user study. As a result, it was determined that
the system enables user-friendly manual positioning of the robot for ultrasound imaging.
Furthermore, additional functions could be identified that could contribute to the overall
improvement of the system.},
keywords = {},
pubstate = {published},
tppubtype = {mastersthesis}
}

Joeres, F; Mielke, T; Hansen, C
Laparoscopic augmented reality registration for oncological resection site repair Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 16, no. 9, pp. 1577–1586, 2021, ISSN: 1861-6429.
@article{joeres_laparoscopic_2021,
title = {Laparoscopic augmented reality registration for oncological resection site repair},
author = {F Joeres and T Mielke and C Hansen},
url = {https://doi.org/10.1007/s11548-021-02336-x},
doi = {10.1007/s11548-021-02336-x},
issn = {1861-6429},
year = {2021},
date = {2021-09-01},
urldate = {2021-09-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {16},
number = {9},
pages = {1577–1586},
abstract = {Resection site repair during laparoscopic oncological surgery (e.g. laparoscopic partial nephrectomy) poses some unique challenges and opportunities for augmented reality (AR) navigation support. This work introduces an AR registration workflow that addresses the time pressure that is present during resection site repair.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}

Mielke, T
Entwicklung eines Registrierungskonzepts für laparoskopische Augmented Reality Bachelor Thesis
2020.
@bachelorthesis{mielke_entwicklung_2020,
title = {Entwicklung eines Registrierungskonzepts für laparoskopische Augmented Reality},
author = {T Mielke},
url = {https://www.var.ovgu.de/pub/BA_Mielke_geschwaerzt.pdf},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
abstract = {Das Nierenzellkarzinom ist der häufigste an der Niere lokalisierte maligne Tumor. Die
einzige bekannte kurative Behandlungsform für diese Tumorart ist dessen chirurgische
Entfernung. Bei Tumoren in frühen Stadien kann zur Erhaltung des größtmöglichen Teils
der normal funktionierenden Niere eine Nierenteilresektion durchgeführt werden. Dabei soll
bei vollständiger Resektion des Tumors möglichst viel gesundes Gewebe geschont werden.
Daraus resultieren Herausforderungen, unter anderem bei der Reparatur von Schäden an
der Niere nach der Tumorresektion. Bei diesen kann intraoperative Softwareunterstützung
wie beispielsweise Augmented Reality hilfreich sein.
In dieser Arbeit wird ein Programm für die korrekte Positionierung und Ausrichtung
der virtuellen Inhalte in dem realen Umfeld entwickelt, umgesetzt und evaluiert. Dabei
handelt es sich um ein zweistufiges Registrierungskonzept, dessen erster Schritt eine
initiale punktbasierte Ausrichtung und eine oberflächenbasierte Registrierung umfasst. Der
zweite Schritt besteht aus einer punktbasierten Registrierung, die auf zuvor positionierten
Markern basiert.
Das Registrierungskonzept wurde hinsichtlich Genauigkeit in Form des Target-Registration-
Errors (TRE) und der Dauer in Form des Task-Completion-Times (TCT) evaluiert. Dazu
wurde die initiale, auf anatomischen Landmarken basierende Registrierung (TRE: 11.83
± 1.26 mm, TCT: 42.72 ± 2.78 s) mit der sekundären Registrierung (TRE: 11.36 ± 1.47
mm, TCT: 38.52 ± 2.08 s) verglichen. Es zeigte sich bei einer signifikanten Verringerung
der Registrierungsdauer keine signifikante Änderung der Genauigkeit. Zusätzlich konnte
bei der Evaluierung einer Teilgruppe festgestellt werden, dass bei verbesserter Genauigkeit
der initialen zur oberflächenbasierten Registrierung auch eine signifikante Verbesserung
der Genauigkeit der sekundären Registrierung (TRE: 9.83 ± 1.42 mm) gegenüber der
initialen Registrierung (TRE: 14.43 ± 1.30 mm) erreicht werden kann. Außerdem wurde
eine Dierenz des TREs zwischen der oberflächenbasierten (TRE: 9.01 ± 1.01 mm) und
der sekundären Registrierung von 2.35 ± 0.96 mm festgestellt.},
keywords = {},
pubstate = {published},
tppubtype = {bachelorthesis}
}