Wilhelm Herbrich, M.Sc.
Short Bio
2015–2023 Full stack developer in the fields of AR POS systems and libraries
2020–2023 Master's degree in Visual Computing
2013–2017 Bachelor's degree in Media & Computer Science
Research Interests
Currently researching dyadic hand interaction possibilities in web-based virtual reality
Publications
2025

Herbrich, W; Zittlau, P; Joeres, F; Hansen, C
Prototype development of a cross-reality digital twin ecosystem: the web, open source and open data Proceedings Article
In: 2025 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 459–462, 2025.
@inproceedings{herbrich_prototype_2025,
title = {Prototype development of a cross-reality digital twin ecosystem: the web, open source and open data},
author = {W Herbrich and P Zittlau and F Joeres and C Hansen},
url = {https://ieeexplore.ieee.org/abstract/document/10972895},
doi = {10.1109/VRW66409.2025.00100},
year = {2025},
date = {2025-03-01},
urldate = {2025-03-01},
booktitle = {2025 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {459–462},
abstract = {This work contributes to a broader initiative aimed at transforming a former industrial port area into a dynamic Knowledge Transfer Space (KTS). To support this transformation, we explore the development of a cross-reality (CR) digital twin of the port area, which integrates user interfaces with varying degrees of virtuality. We evaluate different web technologies, focusing on the balance between accessibility, immersion, scalability, and performance. By comparing client-side rendering with pixel streaming approaches, we aim to identify suitable solutions for prototyping a CR digital twin ecosystem. The development of a prototype is ongoing, based on a client-side rendering approach. The outcomes contribute to developing an open and transferable CR digital twin applicable to similar urban projects in other cities.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2024

Joeres, F; Zittlau, P; Herbrich, W; Heinrich, F; Rose, G; Hansen, C
Concept development of a cross-reality ecosystem for urban knowledge transfer spaces Proceedings Article
In: 2024 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct), pp. 166–169, 2024, (ISSN: 2771-1110).
@inproceedings{joeres_concept_2024,
title = {Concept development of a cross-reality ecosystem for urban knowledge transfer spaces},
author = {F Joeres and P Zittlau and W Herbrich and F Heinrich and G Rose and C Hansen},
url = {https://ieeexplore.ieee.org/abstract/document/10765174},
doi = {10.1109/ISMAR-Adjunct64951.2024.00043},
year = {2024},
date = {2024-10-01},
urldate = {2024-10-01},
booktitle = {2024 IEEE International Symposium on Mixed and Augmented Reality Adjunct (ISMAR-Adjunct)},
pages = {166–169},
abstract = {This paper presents the development of a cross-reality (CR) ecosystem designed for an urban knowledge transfer space (KTS) in a post-industrial urban environment. The project is part of a larger initiative aimed at transforming a former industrial river port into a dynamic KTS, facilitating interactions between scientific, commercial, residential, and cultural stakeholders. Our research explores the potential of multimodal mixed reality (XR) technologies to enhance engagement with the content and stakeholders of the KTS. Through a three-phase process, we identified key stakeholders and their target audiences, selected appropriate XR technologies, and developed initial use cases that integrate web applications, mobile augmented reality (AR), and XR head-mounted displays. The preliminary findings indicate that these technologies can effectively cater to diverse user groups, providing different levels of virtuality and interaction. However, challenges remain, particularly in stakeholder engagement and the evolving nature of the KTS initiative. Ongoing work includes the development of a Web-XR-based prototype, which will be iteratively refined to better meet user needs and adapt to future technological advancements. This research contributes to the understanding of how CR technologies can be employed in urban transformation processes, offering insights into the design of flexible and scalable CR ecosystems.},
note = {ISSN: 2771-1110},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Schwenderling, L; Herbrich, W; Joeres, F; Hansen, C
A Novel Framework for Hand Visualization in Web-Based Collaborative XR Proceedings Article
In: 2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 18–23, IEEE, Orlando, FL, USA, 2024, ISBN: 979-8-3503-7449-0.
@inproceedings{schwenderling_novel_2024,
title = {A Novel Framework for Hand Visualization in Web-Based Collaborative XR},
author = {L Schwenderling and W Herbrich and F Joeres and C Hansen},
url = {https://ieeexplore.ieee.org/document/10536317/},
doi = {10.1109/VRW62533.2024.00010},
isbn = {979-8-3503-7449-0},
year = {2024},
date = {2024-03-01},
urldate = {2024-03-01},
booktitle = {2024 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
pages = {18–23},
publisher = {IEEE},
address = {Orlando, FL, USA},
abstract = {Many extended reality (XR) applications are platform-specific, making accessibility and cross-platform collaboration difficult. Web-based collaborative XR can enhance adoption of XR technologies, using the browser as a platform-independent interface. However, challenges arise from the browser environment, such as performance limitations. To this end, we present a WebXR-based framework for hand interaction in cross-platform collaboration in XR. A network structure and methods for collaborative and individual object manipulation complement the integrated hand tracking. Three different fidelity levels to represent the hands of remote users were implemented to accommodate different performance capabilities. Concepts ranged from virtual hands to discrete poses with abstract objects. A sample application was implemented with a puzzle task. Two users collaborated in the browsers of the Microsoft HoloLens 2 and the Meta Quest 2. Qualitative and quantitative data on user performance (n=9), and frame rate recordings (n=1) were collected. All users were able to solve the puzzle together quickly and intu-itively. The Quest environment was preferred, as there were more performance issues with the HoloLens. Hand interaction was well-received and proved to be sufficient as the only form of communication. Simpler representations of the hands lead to a higher frame rate, whereby the effects were device-dependent. The impact on task performance was low. Hand interaction enables an intuitive exchange of objects and basic communication in cross-platform collaboration via browsers. Depending on the XR environment, however, device-specific performance limitations must be taken into account by modulating the amount of data and rendering effort.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2023

Schwenderling, L; Kleinau, A; Herbrich, W; Kasireddy, H; Heinrich, F; Hansen, C
Activation modes for gesture-based interaction with a magic lens in AR anatomy visualisation Journal Article
In: Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization, vol. 11, no. 4, pp. 1243–1250, 2023, ISSN: 2168-1163, (Publisher: Taylor & Francis).
@article{schwenderling_activation_2023,
title = {Activation modes for gesture-based interaction with a magic lens in AR anatomy visualisation},
author = {L Schwenderling and A Kleinau and W Herbrich and H Kasireddy and F Heinrich and C Hansen},
url = {https://doi.org/10.1080/21681163.2022.2157749},
doi = {10.1080/21681163.2022.2157749},
issn = {2168-1163},
year = {2023},
date = {2023-07-01},
urldate = {2023-07-01},
journal = {Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization},
volume = {11},
number = {4},
pages = {1243–1250},
abstract = {Learning human anatomy is key for health-related education and often requires expensive and time-consuming cadaver dissection courses. Augmented reality (AR) for the representation of spatially registered 3D models can be used as a low-cost and flexible alternative. However, suitable visualisation and interaction approaches are needed to display multilayered anatomy data. This paper features a spherical volumetric AR Magic Lens controlled by mid-air hand gestures to explore the human anatomy on a phantom. Defining how gestures control associated actions is important for intuitive interaction. Therefore, two gesture activation modes were investigated in a user study (n = 24). Performing the gestures once to toggle actions showed a higher interaction count since an additional stop gesture was used. Holding the gestures was favoured in the qualitative feedback. Both modes showed similar performance in terms of accuracy and task completion time. Overall, direct gesture manipulation of a magic lens for anatomy visualisation is, thus, recommended.},
note = {Publisher: Taylor & Francis},
keywords = {},
pubstate = {published},
tppubtype = {article}
}