<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JMIR</journal-id>
      <journal-id journal-id-type="nlm-ta">J Med Internet Res</journal-id>
      <journal-title>Journal of Medical Internet Research</journal-title>
      <issn pub-type="epub">1438-8871</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v22i8e18637</article-id>
      <article-id pub-id-type="pmid">32788146</article-id>
      <article-id pub-id-type="doi">10.2196/18637</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Original Paper</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Original Paper</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Augmented Reality System for Digital Rectal Examination Training and Assessment: System Validation</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Eysenbach</surname>
            <given-names>Gunther</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Friedrich</surname>
            <given-names>Christoph</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Aksoy</surname>
            <given-names>Mehmet</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author">
          <name name-style="western">
            <surname>Muangpoon</surname>
            <given-names>Theerapat</given-names>
          </name>
          <degrees>MSc</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-8110-5861</ext-link>
        </contrib>
        <contrib id="contrib2" contrib-type="author" corresp="yes">
          <name name-style="western">
            <surname>Haghighi Osgouei</surname>
            <given-names>Reza</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <address>
            <institution>Faculty of Medicine, Department of Surgery and Cancer</institution>
            <institution>Imperial College London</institution>
            <addr-line>Rm H38, ICCESS, 3rd Floor, Chelsea and Westminster Hospital</addr-line>
            <addr-line>369 Fulham Road</addr-line>
            <addr-line>London, SW10 9NH</addr-line>
            <country>United Kingdom</country>
            <phone>44 7940419331</phone>
            <email>r.haghighi-osgouei@imperial.ac.uk</email>
          </address>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-5015-9576</ext-link>
        </contrib>
        <contrib id="contrib3" contrib-type="author">
          <name name-style="western">
            <surname>Escobar-Castillejos</surname>
            <given-names>David</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-6583-5274</ext-link>
        </contrib>
        <contrib id="contrib4" contrib-type="author">
          <name name-style="western">
            <surname>Kontovounisios</surname>
            <given-names>Christos</given-names>
          </name>
          <degrees>MD, PhD, FACS, FRCSE</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-1828-1395</ext-link>
        </contrib>
        <contrib id="contrib5" contrib-type="author">
          <name name-style="western">
            <surname>Bello</surname>
            <given-names>Fernando</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-4136-0355</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>Faculty of Medicine, Department of Surgery and Cancer</institution>
        <institution>Imperial College London</institution>
        <addr-line>London</addr-line>
        <country>United Kingdom</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Reza Haghighi Osgouei <email>r.haghighi-osgouei@imperial.ac.uk</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <month>8</month>
        <year>2020</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>13</day>
        <month>8</month>
        <year>2020</year>
      </pub-date>
      <volume>22</volume>
      <issue>8</issue>
      <elocation-id>e18637</elocation-id>
      <history>
        <date date-type="received">
          <day>9</day>
          <month>3</month>
          <year>2020</year>
        </date>
        <date date-type="rev-request">
          <day>18</day>
          <month>4</month>
          <year>2020</year>
        </date>
        <date date-type="rev-recd">
          <day>9</day>
          <month>5</month>
          <year>2020</year>
        </date>
        <date date-type="accepted">
          <day>13</day>
          <month>5</month>
          <year>2020</year>
        </date>
      </history>
      <copyright-statement>©Theerapat Muangpoon, Reza Haghighi Osgouei, David Escobar-Castillejos, Christos Kontovounisios, Fernando Bello. Originally published in the Journal of Medical Internet Research (http://www.jmir.org), 13.08.2020.</copyright-statement>
      <copyright-year>2020</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in the Journal of Medical Internet Research, is properly cited. The complete bibliographic information, a link to the original publication on http://www.jmir.org/, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://www.jmir.org/2020/8/e18637" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>Digital rectal examination is a difficult examination to learn and teach because of limited opportunities for practice; however, the main challenge is that students and tutors cannot see the finger when it is palpating the anal canal and prostate gland inside the patients.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>This paper presents an augmented reality system to be used with benchtop models commonly available in medical schools with the aim of addressing the problem of lack of visualization. The system enables visualization of the examining finger, as well as of the internal organs when performing digital rectal examinations. Magnetic tracking sensors are used to track the movement of the finger, and a pressure sensor is used to monitor the applied pressure. By overlaying a virtual finger on the real finger and a virtual model on the benchtop model, students can see through the examination and finger maneuvers.</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>The system was implemented in the Unity game engine (Unity Technologies) and uses a first-generation HoloLens (Microsoft Inc) as an augmented reality device. To evaluate the system, 19 participants (9 clinicians who routinely performed digital rectal examinations and 10 medical students) were asked to use the system and answer 12 questions regarding the usefulness of the system.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>The system showed the movement of an examining finger in real time with a frame rate of 60 fps on the HoloLens and accurately aligned the virtual and real models with a mean error of 3.9 mm. Users found the movement of the finger was realistic (mean 3.9, SD 1.2); moreover, they found the visualization of the finger and internal organs were useful for teaching, learning, and assessment of digital rectal examinations (finger: mean 4.1, SD 1.1; organs: mean 4.6, SD 0.8), mainly targeting a novice group.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>The proposed augmented reality system was designed to improve teaching and learning of digital rectal examination skills by providing visualization of the finger and internal organs. The initial user study proved its applicability and usefulness.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>Augmented Reality</kwd>
        <kwd>Digital Rectal Examination (DRE)</kwd>
        <kwd>Magnetic Tracker</kwd>
        <kwd>Pressure Sensor</kwd>
        <kwd>Medical Education</kwd>
        <kwd>Usability</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <p>Digital rectal examination (DRE) is a physical examination for detecting rectal and prostate abnormalities. Focusing on prostate examination, DRE requires that an index finger to palpate the prostate gland to detect abnormalities in gland size, tenderness, and surface texture. Even though it is a recommendation of the American Cancer Society to perform DRE to screen and detect prostate cancer in patients with colorectal symptoms, multiple studies [<xref ref-type="bibr" rid="ref1">1</xref>,<xref ref-type="bibr" rid="ref2">2</xref>] have found that, during their final year, medical students are not confident in their abilities to perform the examination. The lack of confidence in performing DRE among medical students has mainly been attributed to not having adequate practice in medical schools [<xref ref-type="bibr" rid="ref1">1</xref>].</p>
      <p>A standard method to train DRE skills is to practice on a benchtop model. This model is a plastic human mannequin with skin-like rubber to represent the rectal canal with several plastic replaceable prostate models (<xref rid="figure1" ref-type="fig">Figure 1</xref>). Even though students can touch and feel the prostate gland through the rubber rectum, no visualization of finger movement or internal organs can be obtained because it lacks transparency. Similarly, this model does not provide enough information to examiners to assess the techniques used by students to perform DRE.</p>
      <p>There have been several attempts to improve the visualization of DRE on a benchtop model. Early attempts include a training system using virtual reality technology, together with a Phantom haptic device [<xref ref-type="bibr" rid="ref3">3</xref>] which displayed a simplified 3D model of kidneys, rectum, bladder, and prostate along with the virtual representation of the examining finger on the 2D monitor. This system was evaluated against the rubber model, and it was concluded that it could be a new way to train DRE if the realism of the haptic system was improved [<xref ref-type="bibr" rid="ref3">3</xref>]. In another study [<xref ref-type="bibr" rid="ref4">4</xref>], a similar approach used a haptic interface for palpating the prostate gland. In follow-up work and to improve the design of haptic-based learning tools, Granados et al [<xref ref-type="bibr" rid="ref5">5</xref>] conducted a study to better understand palpation techniques of experts while conducting DRE on a real subject. Dobson et al [<xref ref-type="bibr" rid="ref6">6</xref>] proposed a system using virtual reality technology to visualize the anatomy in the pelvic area [<xref ref-type="bibr" rid="ref6">6</xref>]. For their system, the user had to wear special glasses to view the model in 3D which was displayed on the 67-inch×50-inch screen (VR ImmersaDesk). It was shown that the system helped medical students in gaining more understanding of the anatomy and results in better exam scores [<xref ref-type="bibr" rid="ref6">6</xref>]. Rissanen et al [<xref ref-type="bibr" rid="ref7">7</xref>] introduced Annotated Simulation Records for DRE, which focused on using virtual reality technology to reveal useful data from the sensor during DRE practice. In this system, the urologist selected the most useful parameters to be annotated to help in teaching DRE. This system was evaluated by medical students, and it was found that the numerical annotations helped them learn faster than verbal feedback. Balkissoon et al [<xref ref-type="bibr" rid="ref8">8</xref>] introduced a DRE training system that consisted of a physical benchtop model and a 2D screen for visualizing the DRE in which multiple sensors were attached to the prostate gland in the benchtop model to measure user applied pressure on various locations. Visual information, including applied force, palpated area, and palpation at each location, was displayed during the examination. The study [<xref ref-type="bibr" rid="ref8">8</xref>] showed that the sensors could help the instructor to observe and assess the performance of medical students performing DREs. In another model [<xref ref-type="bibr" rid="ref9">9</xref>], a similar concept, embedding pressure sensors in the model, was followed.</p>
      <p>Displaying information, such as finger position and pressure on a 2D screen, was demonstrated to be beneficial in understanding and performing DRE; however, the user experience was not ideal due to the lack of colocation between the benchtop model and the display.</p>
      <p>In this paper, we present an augmented reality system for DRE visualization. It uses sensors attached to the examining finger to track its maneuvers and to monitor applied pressure. It also displays the DRE and essential information overlaid on the real benchtop model using an augmented reality device. The main goal was to improve the user experience of a widely available benchtop model. The paper first describes the visualization of the examining finger and internal organs, the step-by-step guidance for DRE, and the performance recording feature. Results, including performance measures and feedback from clinicians and medical students, are then reported, followed by discussion and conclusions.</p>
      <fig id="figure1" position="float">
        <label>Figure 1</label>
        <caption>
          <p>A standard benchtop model used in medical schools for teaching and practicing digital rectal examination.</p>
        </caption>
        <graphic xlink:href="jmir_v22i8e18637_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </fig>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Hardware and Software</title>
        <p>The proposed augmented reality system is used as an extension to a standard Rectal Examination Trainer (Mk2, Limbs &#38; Things Inc) benchtop model (<xref rid="figure1" ref-type="fig">Figure 1</xref>, [<xref ref-type="bibr" rid="ref10">10</xref>]). The model is a semirealistic representation of the buttocks, anus, and rectum, allowing for the practice of diagnostic skills associated with rectal examination. It includes additional rectal examination perineum, which contains two rectal pathologies (polyp and carcinoma). For prostate examination, five interchangeable prostates are provided: normal, bilateral benign, unilateral benign, bilateral carcinoma, and unilateral carcinoma. The model can be used for both digital examination of prostate and rectum, as well as for the insertion and use of anoscope and proctoscope.</p>
        <p>We used the HoloLens (version 1, Microsoft Inc) as an augmented reality head-mounted display (<xref rid="figure2" ref-type="fig">Figure 2</xref>). The HoloLens is immersive and see-through to help the user perceive the environment as realistic. It also enables interactions with holographic content. The HoloLens features an inertial measurement unit (accelerometer, gyroscope, and magnetometer), 4 environment understanding sensors (2 on each side), an energy-efficient depth camera with a 120°×120° angle of view, a 2.4-megapixel photographic video camera, a 4-microphone array, and an ambient light sensor [<xref ref-type="bibr" rid="ref11">11</xref>].</p>
        <p>HoloLens has been used in various medical simulations; VimedixAR (CAE Inc) was the first ultrasound simulator to integrate a HoloLens [<xref ref-type="bibr" rid="ref12">12</xref>]; with this system, health care professionals manipulate representations of realistic anatomical parts and view the ultrasound beam in real time as it passes through human anatomy. With CAE LucinaAR, clinical learners can view 3D holograms of a fetus, as it descends the birth canal, and learn to manage complex deliveries [<xref ref-type="bibr" rid="ref13">13</xref>]. HoloPatient (Microsoft Inc) is a mixed-reality learning tool for nursing, allied health, and medical schools that delivers simulated patient experiences [<xref ref-type="bibr" rid="ref14">14</xref>]. Learning Heart (Spheregen) is a HoloLens application that assists students in understanding the physiology of the heart [<xref ref-type="bibr" rid="ref15">15</xref>].</p>
        <p>The 3D software in our system was developed using the Unity games engine (version 2018.3.7f1; Unity Technologies) [<xref ref-type="bibr" rid="ref16">16</xref>] cross-platform authoring tool for creating 3D content.</p>
        <fig id="figure2" position="float">
          <label>Figure 2</label>
          <caption>
            <p>Microsoft HoloLens, a head-mounted augmented reality device to display 3D virtual objects.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig2.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Real-Time Performance Visualization</title>
        <p>To track and show the movement of the examining finger inside the benchtop model during the examination, we used a Trakstar magnetic tracking system (Northern Digital Inc) to obtain the position and orientation (pose) of the examining finger in real time, due to its ability to operate without line-of-sight [<xref ref-type="bibr" rid="ref17">17</xref>]. It consists of a midrange magnetic field transmitter and a 6 degrees-of-freedom receiver (model 180), and it was connected to an electronics unit for amplification and digitization. The 6 degrees-of-freedom sensor has a position accuracy of 1.40 mm RMS and orientation accuracy of 0.50° RMS. The combination of the transmitter and the receiver allows tracking within a 30×40×30 cm<sup>3</sup> zone, which is large enough for tracking the examining finger inside the benchtop model. The sensor was attached to the finger using thin tape (<xref rid="figure3" ref-type="fig">Figure 3</xref>, [<xref ref-type="bibr" rid="ref17">17</xref>]). Sensor data were read at 40 Hz by the computer via an API using a previously developed C++ plug-in [<xref ref-type="bibr" rid="ref18">18</xref>]. Once read, sensor data was transferred to the HoloLens via Wi-Fi. Position and orientation were transformed into the real-world coordinate system with the help of Vuforia Engine (version 8.1.7) [<xref ref-type="bibr" rid="ref19">19</xref>]. The transformation was achieved by using the HoloLens to track the pose of the image target in world coordinates (<xref rid="figure4" ref-type="fig">Figure 4</xref>). This transformation enabled synchronization and overlay of the virtual finger on the tracked real finger, and for the result to be seen through the HoloLens (<xref rid="figure5" ref-type="fig">Figure 5</xref>).</p>
        <fig id="figure3" position="float">
          <label>Figure 3</label>
          <caption>
            <p>A Trakstar magnetic positioning sensor attached to the examining finger.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig3.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <fig id="figure4" position="float">
          <label>Figure 4</label>
          <caption>
            <p>Conversion process of the position tracking coordinates to real-world (HoloLens) coordinates using Vuforia image tracking.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig4.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <fig id="figure5" position="float">
          <label>Figure 5</label>
          <caption>
            <p>The virtual finger in blue overlaid onto the real examining finger with a blue medical glove.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig5.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <p>To visualize the internal components of the benchtop model and to overlay the relevant virtual anatomy, it was necessary to first align the virtual model (<xref rid="figure6" ref-type="fig">Figure 6</xref>) with the physical benchtop model. For this purpose, the iterative closest point algorithm was used [<xref ref-type="bibr" rid="ref20">20</xref>]; it takes the position of 7 anatomical landmarks as an input (<xref rid="figure7" ref-type="fig">Figure 7</xref>) and yields a transformation matrix. This matrix is used to rotate and translate the virtual benchtop model and virtual internal organs to align with the physical benchtop model (<xref rid="figure8" ref-type="fig">Figure 8</xref>). Once aligned, the user can visualize the movement of the examining finger and the model by directly looking at the model with the HoloLens (<xref rid="figure9" ref-type="fig">Figure 9</xref>).</p>
        <fig id="figure6" position="float">
          <label>Figure 6</label>
          <caption>
            <p>3D virtual benchtop model.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig6.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <fig id="figure7" position="float">
          <label>Figure 7</label>
          <caption>
            <p>Red landmarks on a benchtop model used as inputs to the iterative closest point algorithm.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig7.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <fig id="figure8" position="float">
          <label>Figure 8</label>
          <caption>
            <p>A virtual transparent benchtop model with a 3D virtual prostate inside is aligned with the physical benchtop model.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig8.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <fig id="figure9" position="float">
          <label>Figure 9</label>
          <caption>
            <p>Real-time visualization of digital rectal examination using the proposed augmented reality system.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig9.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <p>A FingerTPS force sensor [<xref ref-type="bibr" rid="ref21">21</xref>] was also attached to the examining finger to estimate the pressure applied to the prostate during palpation. The force sensor is flat and thin and can be worn under a surgical glove (<xref rid="figure10" ref-type="fig">Figure 10</xref>, [<xref ref-type="bibr" rid="ref21">21</xref>]). The force sensor data were transferred to the HoloLens using the same process as for the pose information from the tracking sensor. A force visualizer, represented as a color bar with 3 regions showing different levels of pressure applied to the prostate gland, was displayed to provide real-time feedback (<xref rid="figure11" ref-type="fig">Figure 11</xref>).</p>
        <fig id="figure10" position="float">
          <label>Figure 10</label>
          <caption>
            <p>A FingerTPS pressure sensor. It measures pressure applied during the prostate palpation.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig10.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <fig id="figure11" position="float">
          <label>Figure 11</label>
          <caption>
            <p>Real-time visualization of force applied by the examining finger.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig11.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Internal Organ Labeling</title>
        <p>Labels were used for displaying relevant information regarding internal anatomy inside the benchtop model; however, since there were several anatomical structures inside a small area, this made it difficult to label the anatomy in an effective way (in a way that avoids overlap with other labels or with the anatomy). Also, since our system allowed users to look at the anatomy from different perspectives, labels needed to be dynamically positioned. To address these issues, we implemented a view management system, capable of resolving occlusion among labels. Our labeling system used the labeling technique suggested by Tatzgern [<xref ref-type="bibr" rid="ref22">22</xref>] (overcoming occlusion by limiting update rates, ie, not continuously moving the labels to separate them, but only when they occlude each other) combined with the theory by Hartzmann [<xref ref-type="bibr" rid="ref23">23</xref>] to ensure that the labels were always near the object and that the lines from labels to objects did not cross each other. To achieve these criteria, the following steps are iteratively applied until all occlusions are resolved: (1) Place the label near the referenced object on the surface of the sphere, centered at the center of mass of the benchtop model. (2) Iterate through all labels to find all those that may be occluded by another label, establishing the side of the occlusion. (3) Move the occluded label to the opposite side to resolve the occlusion.</p>
        <p>The labels were created using a ToolTip component of Unity, provided by the Microsoft Mixed Reality Toolkit (version 2.0.0 RC2) [<xref ref-type="bibr" rid="ref24">24</xref>]. This component can rotate the label to always face the user, making it more readable (<xref rid="figure12" ref-type="fig">Figure 12</xref>).</p>
        <fig id="figure12" position="float">
          <label>Figure 12</label>
          <caption>
            <p>Occlusion between labels is resolved by pushing them apart along the circumference of the sphere, whose center is at the center of mass of the benchtop model.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig12.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Step-By-Step Guidance</title>
        <p>Our augmented reality system incorporated step-by-step guidance to help trainees follow the correct steps and trajectory during the examination. Steps were extracted from the cognitive task analysis study performed by Low-Beer et al [<xref ref-type="bibr" rid="ref25">25</xref>]. The system automatically tracked the position of the finger inside the rectal canal and checked whether the trainee had correctly followed the step. The user interface was designed to be readable and to require the least possible head movement to see all the contents (<xref rid="figure13" ref-type="fig">Figure 13</xref>), despite the narrow field of view of the HoloLens display.</p>
        <fig id="figure13" position="float">
          <label>Figure 13</label>
          <caption>
            <p>Step-by-step guidance next to the virtual benchtop model. DRE: digital rectal examination.</p>
          </caption>
          <graphic xlink:href="jmir_v22i8e18637_fig13.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Performance Recording</title>
        <p>In addition to step-by-step guidance, our augmented reality system also allowed recording and playback of the examination. The pose of the examining finger and the benchtop model were recorded so that, when played back, the virtual examining finger and the virtual benchtop model could be accurately displayed on the HoloLens. This feature is useful trainees as they can observe experts repeatedly, or they can analyze their own performance. It can also be used by an examiner to assess student performance from different angles.</p>
      </sec>
      <sec>
        <title>Model Alignment Evaluation</title>
        <p>The accuracy of the model alignment system was evaluated by performing an alignment task five times by an experienced user. After each alignment task was performed, the positions of each landmark on both the physical and virtual benchtop models were then measured using the magnetic tracking sensor. The error of the alignment system was then calculated as a distance between a point on the virtual model and a corresponding point on the physical model.</p>
      </sec>
      <sec>
        <title>Pilot User Study</title>
        <p>An initial validation study was conducted. Clinicians (n=9; 6 men, 3 women; mean 37.8, SD 5.4 years of age) who routinely perform DRE, and medical students (n=10; 7 men, 3 women; mean 21.8, SD 2.4 years of age) were recruited. The study was approved by the National Health Service Patient Safety Agency Research Ethics Committee (09/H0701/68). Before the study, a consent form was signed by each participant. During the study, participants were asked to perform DREs on the benchtop model with the augmented reality system, wearing both the position tracking sensor and the pressure sensor under a standard surgical glove. They were also asked to pay attention to the information displayed on the HoloLens, such as the force bar and the guidance panel. Once finished, they were asked to watch the recorded performance. Afterward, participants were asked to complete an online questionnaire using a 5-point Likert scale from 1 (definitely disagree) to 5 (definitely agree) regarding the usefulness of the system (<xref ref-type="table" rid="table1">Table 1</xref>).</p>
        <table-wrap position="float" id="table1">
          <label>Table 1</label>
          <caption>
            <p>Questions assessing the usefulness of the system.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="80"/>
            <col width="920"/>
            <thead>
              <tr valign="top">
                <td>Number</td>
                <td>Question</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>Q1</td>
                <td>The record of the expert's performance would be useful for DRE<sup>a</sup> teaching or learning</td>
              </tr>
              <tr valign="top">
                <td>Q2</td>
                <td>The movement of the examining finger would be useful for DRE training</td>
              </tr>
              <tr valign="top">
                <td>Q3</td>
                <td>Being able to visualize the internal organs in the benchtop model could help a trainee better understand DRE</td>
              </tr>
              <tr valign="top">
                <td>Q4</td>
                <td>The real-time visualization of force applied to the model would be useful for DRE training</td>
              </tr>
              <tr valign="top">
                <td>Q5</td>
                <td>The step-by-step guidance would be useful for DRE training</td>
              </tr>
              <tr valign="top">
                <td>Q6</td>
                <td>The movement of the examining finger inside the model is realistic and accurate</td>
              </tr>
              <tr valign="top">
                <td>Q7</td>
                <td>The virtual representation of benchtop model can be aligned accurately on the physical model</td>
              </tr>
              <tr valign="top">
                <td>Q8</td>
                <td>The AR<sup>b</sup> system is easy to use and understand</td>
              </tr>
              <tr valign="top">
                <td>Q9</td>
                <td>The AR system requires minimum movement to operate</td>
              </tr>
              <tr valign="top">
                <td>Q10</td>
                <td>During the performance, you feel tired or fatigued</td>
              </tr>
              <tr valign="top">
                <td>Q11</td>
                <td>The AR system can enhance current teaching and learning of DRE</td>
              </tr>
              <tr valign="top">
                <td>Q12</td>
                <td>I would recommend the AR system to be integrated into the medical curriculum</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table1fn1">
              <p><sup>a</sup>DRE: digital rectal examination.</p>
            </fn>
            <fn id="table1fn2">
              <p><sup>b</sup>AR: augmented reality.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <p>The augmented reality prototype system ran stably and achieved a frame rate of 60 fps, which is the highest possible frame rate of the HoloLens and Unity. The alignment between the virtual and real fingers and between the virtual and benchtop models was acceptable when visually inspected. The average alignment error of each landmark was an overall average of 1.73, 2.91, and 1.91 mm in the x, y, and z directions, respectively, or a root mean square of 3.9 mm (<xref ref-type="table" rid="table2">Table 2</xref>).</p>
      <p>The usefulness of the system was assessed from the answers given to the questionnaire (<xref rid="figure14" ref-type="fig">Figure 14</xref>). Most participants would recommend this system to be integrated into medical school curriculum (mean 4.3, SD 0.8).</p>
      <table-wrap position="float" id="table2">
        <label>Table 2</label>
        <caption>
          <p>Model alignment error.</p>
        </caption>
        <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
          <col width="100"/>
          <col width="300"/>
          <col width="300"/>
          <col width="300"/>
          <thead>
            <tr valign="top">
              <td>Landmark</td>
              <td colspan="3">Alignment error (mm)</td>
            </tr>
            <tr valign="top">
              <td>
                <break/>
              </td>
              <td>x, mean (SD)</td>
              <td>y, mean (SD)</td>
              <td>z, mean (SD)</td>
            </tr>
          </thead>
          <tbody>
            <tr valign="top">
              <td>1</td>
              <td>0.46 (0.72)</td>
              <td>1.96 (2.49)</td>
              <td>0.48 (0.34)</td>
            </tr>
            <tr valign="top">
              <td>2</td>
              <td>2.38 (0.29)</td>
              <td>2.24 (0.39)</td>
              <td>2.02 (0.25)</td>
            </tr>
            <tr valign="top">
              <td>3</td>
              <td>2.04 (0.25)</td>
              <td>0.78 (0.29)</td>
              <td>4.38 (0.86)</td>
            </tr>
            <tr valign="top">
              <td>4</td>
              <td>1.02 (0.45)</td>
              <td>5.68 (1.85)</td>
              <td>3.06 (0.62)</td>
            </tr>
            <tr valign="top">
              <td>5</td>
              <td>1.18 (0.62)</td>
              <td>6.40 (0.32)</td>
              <td>0.96 (0.35)</td>
            </tr>
            <tr valign="top">
              <td>6</td>
              <td>1.90 (0.61)</td>
              <td>1.20 (0.40)</td>
              <td>1.92 (0.50)</td>
            </tr>
            <tr valign="top">
              <td>7</td>
              <td>2.98 (0.67)</td>
              <td>2.12 (0.27)</td>
              <td>0.52 (0.66)</td>
            </tr>
            <tr valign="top">
              <td>Mean</td>
              <td>1.73 (0.95)</td>
              <td>2.91 (2.37)</td>
              <td>1.91 (1.72)</td>
            </tr>
          </tbody>
        </table>
      </table-wrap>
      <fig id="figure14" position="float">
        <label>Figure 14</label>
        <caption>
          <p>Results from the Likert-scale questionnaire. The numbers and hence the length of each bar indicate the number of participants choosing that rate. DRE: digital rectal examination.</p>
        </caption>
        <graphic xlink:href="jmir_v22i8e18637_fig14.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </fig>
      <p>According to the results, some features were more useful than others. For example, most participants agreed that performance recording was useful (mean 4.5, SD 0.6). Regarding the real-time feedback feature, most participants agreed that the visualization of an examining finger was useful (mean 4.1, SD 1.1). The highest score was achieved for the usefulness of the visualization of internal organs (mean 4.6, SD 0.8). Step-by-step guidance was also one of the most highly rated features (mean 4.5, SD 0.6). Regarding the usability of the system, participants responded that it was easy to use (mean 4.4, SD 0.6), and they did not feel fatigued after using the system (mean 1.7, SD 0.7). The alignment of the virtual benchtop model with the real benchtop model was also rated as very accurate (mean 4.1, SD 1.1).</p>
      <p>The results from both groups, medical students and clinicians, are given in <xref rid="figure15" ref-type="fig">Figure 15</xref>. The scores obtained from the students were higher than those from the clinicians for all features (mean 0.63, SD 0.41).</p>
      <fig id="figure15" position="float">
        <label>Figure 15</label>
        <caption>
          <p>Comparison between questionnaire results from medical students and clinicians (1 = definitely disagree, 5 = definitely agree) regarding the usefulness of the augmented reality system. DRE: digital rectal examination.</p>
        </caption>
        <graphic xlink:href="jmir_v22i8e18637_fig15.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </fig>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <sec>
        <title>Principal Findings</title>
        <p>The results showed that medical students and clinicians were interested in the system and recognized the value of visualization of DRE, performance recording, and step-by-step guidance in improving the learning and teaching of DRE skills. Medical students could visualize the movement of the finger and the pressure applied to the prostate. This visualization and sensor data, combined with the step-by-step guidance, allowed them to receive feedback in real time while performing DRE. Using the record-and-playback feature, students could not only rewatch examinations performed by an expert, but examiners could also review student performance from multiple viewing angles.</p>
        <p>Most users reported that the HoloLens and the tracking sensors were comfortable to wear. Even with a narrow field of view, the user interface did not appear cluttered, facilitating navigation through the menu and visualization of the performance, while at the same time facilitating step-by-step guidance on the left-hand side of the display. The model alignment was also perceived as accurate which demonstrated the use of the iterative closest point algorithm, and the readings from the electromagnetic positioning sensor were valid.</p>
        <p>User experience was improved by displaying all information on a head-mounted augmented reality display that also allowed the overlaying of the virtual finger on the real finger. This colocation allowed trainees to avoid having to change visual focus from the benchtop model to a separate display.</p>
        <p>When comparing the feedback from medical students to the feedback from clinicians, it was observed that the mean ratings from medical students were higher than those from clinicians for all questions, with an average difference of 20%. A likely explanation is that experienced clinicians had higher expectations, and they were comparing the quality of the simulation with that of real cases because of their experience performing DREs. Having less experience, students would be more likely to require visualization and guidance, while clinicians would not because tactile feedback was adequate, given their level of experience. In addition, it may also reflect the increased acceptability and interest in this type of technology by younger generations.</p>
        <p>While the proposed system was demonstrated to be beneficial, it has some limitations. Wearing and adjusting the HoloLens properly can take some time and require assistance. In addition, operating and interacting with the HoloLens by using gestures such as pinching, also requires practice. Regarding the sensors, properly wearing the tracking and pressure sensors is crucial, and at the moment, requires the presence of an assistant to be properly placed on the examining finger. In terms of implementation, the current alignment between the virtual and the physical benchtop model was done manually through 7 landmarks; however, automatic alignment would be faster, more accurate, and more convenient. With respect to features, a scoring system with real-time feedback would be valuable for teaching and assessing DRE skills. Apart from these, the benchtop model itself was reported to be much stiffer than real patients and generally unrealistic with limited anatomical landmarks.</p>
      </sec>
      <sec>
        <title>Conclusions</title>
        <p>This paper presents an augmented reality system for teaching and learning DRE that can be used with widely available benchtop models. It was designed to assist both trainees and teachers in learning, teaching, and practicing DRE, as well as to allow examiners to assess student technique. With colocation of the virtual and real models, students only need to focus on the benchtop model to visualize all important information. Even though the results from the user study are positive, further research is needed to evaluate the system. This would include more robust quantitative analysis with a larger number of participants and varying levels of experience. The augmented reality system could be improved by using the second version of the HoloLens which offers a larger field of view and resolution, and it could also be improved by using a haptic-based instead of standard benchtop model so that a wider variety of prostate glands and abnormalities may be generated and used during the examination. Such a model would also be able to directly track finger movement and estimate pressure applied without the need for external sensors. Finally, formative and summative assessment of DRE performance will be an important component of the next version of our system.</p>
      </sec>
    </sec>
  </body>
  <back>
    <app-group/>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">DRE</term>
          <def>
            <p>digital rectal examination</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">RMS</term>
          <def>
            <p>root mean square</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>This work was partly funded by a Cancer Research UK Early Detection Project—Improving early detection of colorectal cancer through an innovative highly realistic simulation environment (award reference: C63674/A26161). The authors also would like to acknowledge and express their gratitude to Mr Agni Lahiri and Dr Neerav Joshi for their assistance during the pilot user study.</p>
    </ack>
    <fn-group>
      <fn fn-type="conflict">
        <p>None declared.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Popadiuk</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Pottle</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Curran</surname>
              <given-names>V</given-names>
            </name>
          </person-group>
          <article-title>Teaching digital rectal examinations to medical students</article-title>
          <source>Academic Medicine</source>
          <year>2002</year>
          <volume>77</volume>
          <issue>11</issue>
          <fpage>1140</fpage>
          <lpage>1146</lpage>
          <pub-id pub-id-type="doi">10.1097/00001888-200211000-00017</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wolf</surname>
              <given-names>AMD</given-names>
            </name>
            <name name-style="western">
              <surname>Wender</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Etzioni</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Thompson</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>D'Amico</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Volk</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Brooks</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Dash</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Guessous</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Andrews</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>DeSantis</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Smith</surname>
              <given-names>RA</given-names>
            </name>
            <collab>American Cancer Society Prostate Cancer Advisory Committee</collab>
          </person-group>
          <article-title>American Cancer Society guideline for the early detection of prostate cancer: update 2010</article-title>
          <source>CA Cancer J Clin</source>
          <year>2010</year>
          <volume>60</volume>
          <issue>2</issue>
          <fpage>70</fpage>
          <lpage>98</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.3322/caac.20066"/>
          </comment>
          <pub-id pub-id-type="doi">10.3322/caac.20066</pub-id>
          <pub-id pub-id-type="medline">20200110</pub-id>
          <pub-id pub-id-type="pii">caac.20066</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Burdea</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Patounakis</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Popescu</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Weiss</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Virtual reality-based training for the diagnosis of prostate cancer</article-title>
          <source>IEEE Trans Biomed Eng</source>
          <year>1999</year>
          <volume>46</volume>
          <issue>10</issue>
          <fpage>1253</fpage>
          <lpage>1260</lpage>
          <pub-id pub-id-type="doi">10.1109/10.790503</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Granados</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Mayer</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Norton</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Ellis</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Mobasheri</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Low-Beer</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Higham</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Kneebone</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Bello</surname>
              <given-names>F</given-names>
            </name>
          </person-group>
          <article-title>Haptics modelling for digital rectal examinations</article-title>
          <year>2014</year>
          <conf-name>International Symposium on Biomedical Simulation</conf-name>
          <conf-date>2014 Oct 16</conf-date>
          <conf-loc>Strasbourg, France</conf-loc>
          <publisher-name>Springer, Cham</publisher-name>
          <fpage>40</fpage>
          <lpage>49</lpage>
          <pub-id pub-id-type="doi">10.1007/978-3-319-12057-7_5</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Granados</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Cox</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Low-Beer</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Higham</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Kneebone</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Bello</surname>
              <given-names>F</given-names>
            </name>
          </person-group>
          <article-title>Taking the pressure off the patient - understanding digital rectal examinations on a real subject</article-title>
          <source>IEEE Trans Biomed Eng</source>
          <year>2020</year>
          <fpage>1</fpage>
          <lpage>1</lpage>
          <pub-id pub-id-type="doi">10.1109/tbme.2020.2971826</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Dobson</surname>
              <given-names>HD</given-names>
            </name>
            <name name-style="western">
              <surname>Pearl</surname>
              <given-names>RK</given-names>
            </name>
            <name name-style="western">
              <surname>Orsay</surname>
              <given-names>CP</given-names>
            </name>
            <name name-style="western">
              <surname>Rasmussen</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Evenhouse</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Ai</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Blew</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Dech</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Edison</surname>
              <given-names>MI</given-names>
            </name>
            <name name-style="western">
              <surname>Silverstein</surname>
              <given-names>JC</given-names>
            </name>
            <name name-style="western">
              <surname>Abcarian</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Virtual reality</article-title>
          <source>Diseases of the Colon &#38; Rectum</source>
          <year>2003</year>
          <volume>46</volume>
          <issue>3</issue>
          <fpage>349</fpage>
          <lpage>352</lpage>
          <pub-id pub-id-type="doi">10.1007/s10350-004-6554-9</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Rissanen</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Kume</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Kuroda</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Kuroda</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Yoshimura</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Yoshihara</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Asynchronous teaching of psychomotor skills through vr annotations: evaluation in digital rectal examination</article-title>
          <source>Studies in Health Technology and Informatics</source>
          <year>2008</year>
          <volume>132</volume>
          <issue>16</issue>
          <fpage>411</fpage>
          <lpage>416</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="http://ebooks.iospress.nl/volumearticle/11369"/>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Balkissoon</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Blossfield</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Salud</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Ford</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Pugh</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>Lost in translation: unfolding medical students' misconceptions of how to perform a clinical digital rectal examination</article-title>
          <source>The American Journal of Surgery</source>
          <year>2009</year>
          <month>04</month>
          <volume>197</volume>
          <issue>4</issue>
          <fpage>525</fpage>
          <lpage>532</lpage>
          <pub-id pub-id-type="doi">10.1016/j.amjsurg.2008.11.025</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Gerling</surname>
              <given-names>GJ</given-names>
            </name>
            <name name-style="western">
              <surname>Childress</surname>
              <given-names>RM</given-names>
            </name>
            <name name-style="western">
              <surname>Martin</surname>
              <given-names>ML</given-names>
            </name>
          </person-group>
          <article-title>Quantifying palpation techniques in relation to performance in a clinical prostate exam</article-title>
          <source>IEEE Trans Inform Technol Biomed</source>
          <year>2010</year>
          <month>07</month>
          <volume>14</volume>
          <issue>4</issue>
          <fpage>1088</fpage>
          <lpage>1097</lpage>
          <pub-id pub-id-type="doi">10.1109/titb.2010.2041064</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="web">
          <article-title>Male rectal examination trainer - standard</article-title>
          <source>Limbs &#38; Things</source>
          <access-date>2020-07-17</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://limbsandthings.com/uk/products/60170/60170-male-rectal-examination-trainer-standard">https://limbsandthings.com/uk/products/60170/60170-male-rectal-examination-trainer-standard</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="web">
          <article-title>Microsoft Hololens</article-title>
          <source>Hololens Gen 1</source>
          <access-date>2020-07-17</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.wevolver.com/wevolver.staff/hololens.gen.1">https://www.wevolver.com/wevolver.staff/hololens.gen.1</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="web">
          <source>VimedixAR ultrasound simulator</source>
          <access-date>2020-07-17</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://caehealthcare.com/hololens/">https://caehealthcare.com/hololens/</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="web">
          <source>CAE Lucina - Validated High-Fidelity Maternal/Fetal Training</source>
          <access-date>2020-07-17</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://caehealthcare.com/patient-simulation/lucina/">https://caehealthcare.com/patient-simulation/lucina/</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="web">
          <source>Introducing HoloPatient</source>
          <access-date>2020-07-17</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.pearson.com/uk/web/pearsontq/news-and-blogs/2017/04/introducing-holopatient.html">https://www.pearson.com/uk/web/pearsontq/news-and-blogs/2017/04/introducing-holopatient.html</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="web">
          <source>SphereGen's 'Learning Heart' - Ted Dinsmore AR in ACTION</source>
          <access-date>2020-07-17</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.youtube.com/watch?v=fSz2Ce1YcQ0">https://www.youtube.com/watch?v=fSz2Ce1YcQ0</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="web">
          <source>Unreal Real-Time Development Platform</source>
          <access-date>2020-07-17</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://unity.com/">https://unity.com/</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="web">
          <article-title>3D Guidance driveBAY and trakSTAR</article-title>
          <source>NDI Measurement Sciences</source>
          <access-date>2020-07-17</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="http://www.ndigital.com/msci/products/drivebay-trakstar">http://www.ndigital.com/msci/products/drivebay-trakstar</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Grandos</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Perhác</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Rosby</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Tan</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Tan</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Higham</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Thalmann</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Low-Beer</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Bello</surname>
              <given-names>F</given-names>
            </name>
          </person-group>
          <article-title>See-through visualisation for training and assessing unsighted physical examinations</article-title>
          <year>2017</year>
          <conf-name>13th Workshop on Virtual Reality Interactions and Physical Simulations</conf-name>
          <conf-date>2017 Apr 23</conf-date>
          <conf-loc>Lyon, France</conf-loc>
          <fpage>85</fpage>
          <lpage>92</lpage>
          <pub-id pub-id-type="doi">10.2312/vriphys.20171087</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="web">
          <source>Vuforia Engine</source>
          <access-date>2020-07-17</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://developer.vuforia.com/">https://developer.vuforia.com/</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Besl</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>McKay</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>A method for registration of 3-D shapes</article-title>
          <source>IEEE Trans. Pattern Anal. Mach. Intell</source>
          <year>1992</year>
          <month>2</month>
          <volume>14</volume>
          <issue>2</issue>
          <fpage>239</fpage>
          <lpage>256</lpage>
          <pub-id pub-id-type="doi">10.1109/34.121791</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="web">
          <source>Finger TPS - Finger And Hand Pressure Measurement</source>
          <access-date>2020-07-17</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://pressureprofile.com/body-pressure-mapping/finger-tps">https://pressureprofile.com/body-pressure-mapping/finger-tps</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Tatzgern</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Kalkofen</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Grasset</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Schmalstieg</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>Hedgehog labeling: view management techniques for external labels in 3D space</article-title>
          <year>2014</year>
          <conf-name>IEEE Virtual Reality (VR)</conf-name>
          <conf-date>2014 Mar 29</conf-date>
          <conf-loc>Minneapolis, USA</conf-loc>
          <fpage>27</fpage>
          <lpage>32</lpage>
          <pub-id pub-id-type="doi">10.1109/VR.2014.6802046</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hartmann</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Götzelmann</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Ali</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Strothotte</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Metrics for functional and aesthetic label layouts</article-title>
          <year>2005</year>
          <conf-name>International Symposium on Smart Graphics</conf-name>
          <conf-date>2005 Aug 22</conf-date>
          <conf-loc>Frauenwörth, Germany</conf-loc>
          <fpage>115</fpage>
          <lpage>126</lpage>
          <pub-id pub-id-type="doi">10.1007/11536482_10</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="web">
          <source>Mixed Reality Toolkit (MRTK)</source>
          <access-date>2020-07-17</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://microsoft.github.io/MixedRealityToolkit-Unity/README.html">https://microsoft.github.io/MixedRealityToolkit-Unity/README.html</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Low-Beer</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Kinnison</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Baillie</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Bello</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Kneebone</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Higham</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Hidden practice revealed: using task analysis and novel simulator design to evaluate the teaching of digital rectal examination</article-title>
          <source>The American Journal of Surgery</source>
          <year>2011</year>
          <month>01</month>
          <volume>201</volume>
          <issue>1</issue>
          <fpage>46</fpage>
          <lpage>53</lpage>
          <pub-id pub-id-type="doi">10.1016/j.amjsurg.2010.09.004</pub-id>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
