<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="review-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JMIR</journal-id>
      <journal-id journal-id-type="nlm-ta">J Med Internet Res</journal-id>
      <journal-title>Journal of Medical Internet Research</journal-title>
      <issn pub-type="epub">1438-8871</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v21i5e11925</article-id>
      <article-id pub-id-type="pmid">31066679</article-id>
      <article-id pub-id-type="doi">10.2196/11925</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Review</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Review</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Use of Commercial Off-The-Shelf Devices for the Detection of Manual Gestures in Surgery: Systematic Literature Review</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Eysenbach</surname>
            <given-names>Gunther</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Kowalewski</surname>
            <given-names>Karl</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Sánchez Margallo</surname>
            <given-names>Juan Alberto</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Davies</surname>
            <given-names>Benjamin</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="author" id="contrib1" equal-contrib="yes">
          <name name-style="western">
            <surname>Alvarez-Lopez</surname>
            <given-names>Fernando</given-names>
          </name>
          <degrees>MD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff2" ref-type="aff">2</xref>
        </contrib>
        <contrib contrib-type="author" id="contrib2" equal-contrib="yes">
          <name name-style="western">
            <surname>Maina</surname>
            <given-names>Marcelo Fabián</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">http://orcid.org/0000-0002-1889-1097</ext-link>
        </contrib>
        <contrib contrib-type="author" id="contrib3" corresp="yes" equal-contrib="yes">
          <name name-style="western">
            <surname>Saigí-Rubió</surname>
            <given-names>Francesc</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <address>
            <institution>Faculty of Health Sciences</institution>
            <institution>Universitat Oberta de Catalunya</institution>
            <addr-line>Avinguda del Tibidabo 39-43</addr-line>
            <addr-line>Barcelona, 08035</addr-line>
            <country>Spain</country>
            <phone>34 933263622</phone>
            <email>fsaigi@uoc.edu</email>
          </address>
          <ext-link ext-link-type="orcid">http://orcid.org/0000-0001-9616-1551</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
      <label>1</label>
      <institution>Faculty of Health Sciences</institution>
      <institution>Universitat Oberta de Catalunya</institution>  
      <addr-line>Barcelona</addr-line>
      <country>Spain</country></aff>
      <aff id="aff2">
      <label>2</label>
      <institution>Faculty of Health Sciences</institution>
      <institution>Universidad de Manizales</institution>  
      <addr-line>Caldas</addr-line>
      <country>Colombia</country></aff>
      <aff id="aff3">
      <label>3</label>
      <institution>Faculty of Psychology and Education Sciences</institution>
      <institution>Universitat Oberta de Catalunya</institution>  
      <addr-line>Barcelona</addr-line>
      <country>Spain</country></aff>
      <author-notes>
        <corresp>Corresponding Author: Francesc Saigí-Rubió 
        <email>fsaigi@uoc.edu</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <month>05</month>
        <year>2019</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>03</day>
        <month>05</month>
        <year>2019</year>
      </pub-date>
      <volume>21</volume>
      <issue>5</issue>
      <elocation-id>e11925</elocation-id>
      <!--history from ojs - api-xml-->
      <history>
        <date date-type="received">
          <day>12</day>
          <month>8</month>
          <year>2018</year>
        </date>
        <date date-type="rev-request">
          <day>13</day>
          <month>10</month>
          <year>2018</year>
        </date>
        <date date-type="rev-recd">
          <day>4</day>
          <month>1</month>
          <year>2019</year>
        </date>
        <date date-type="accepted">
          <day>25</day>
          <month>1</month>
          <year>2019</year>
        </date>
      </history>
      <copyright-statement>©Fernando Alvarez-Lopez, Marcelo Fabián Maina, Francesc Saigí-Rubió. Originally published in the Journal of Medical Internet Research (http://www.jmir.org), 14.04.2019.</copyright-statement>
      <copyright-year>2019</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in the Journal of Medical Internet Research, is properly cited. The complete bibliographic information, a link to the original publication on http://www.jmir.org/, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://www.jmir.org/2019/5/e11925/" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>The increasingly pervasive presence of technology in the operating room raises the need to study the interaction between the surgeon and computer system. A new generation of tools known as commercial off-the-shelf (COTS) devices enabling touchless gesture–based human-computer interaction is currently being explored as a solution in surgical environments.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>The aim of this systematic literature review was to provide an account of the state of the art of COTS devices in the detection of manual gestures in surgery and to identify their use as a simulation tool for motor skills teaching in minimally invasive surgery (MIS).</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>For this systematic literature review, a search was conducted in PubMed, Excerpta Medica dataBASE, ScienceDirect, Espacenet, OpenGrey, and the Institute of Electrical and Electronics Engineers databases. Articles published between January 2000 and December 2017 on the use of COTS devices for gesture detection in surgical environments and in simulation for surgical skills learning in MIS were evaluated and selected.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>A total of 3180 studies were identified, 86 of which met the search selection criteria. Microsoft Kinect (Microsoft Corp) and the Leap Motion Controller (Leap Motion Inc) were the most widely used COTS devices. The most common intervention was image manipulation in surgical and interventional radiology environments, followed by interaction with virtual reality environments for educational or interventional purposes. The possibility of using this technology to develop portable low-cost simulators for skills learning in MIS was also examined. As most of the articles identified in this systematic review were proof-of-concept or prototype user testing and feasibility testing studies, we concluded that the field was still in the exploratory phase in areas requiring touchless manipulation within environments and settings that must adhere to asepsis and antisepsis protocols, such as angiography suites and operating rooms.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>COTS devices applied to hand and instrument gesture–based interfaces in the field of simulation for skills learning and training in MIS could open up a promising field to achieve ubiquitous training and presurgical warm up.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>minimally invasive surgery</kwd>
        <kwd>user-computer interface</kwd>
        <kwd>operating room</kwd>
        <kwd>education, medical</kwd>
        <kwd>computer-assisted surgery</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <sec>
        <title>Background</title>
        <p>The increasingly pervasive presence of technology in the operating room raises the need to study the interaction between the surgeon and computer system. In sterile environments, using the hand to operate a mouse, keyboard, or touchscreen is unacceptable as it alters the normal pace of surgery and breaks asepsis and antisepsis protocols [<xref ref-type="bibr" rid="ref1">1</xref>-<xref ref-type="bibr" rid="ref6">6</xref>]. Using a physical barrier between the surgeon’s gloves and the interaction device [<xref ref-type="bibr" rid="ref7">7</xref>], or the foot for manipulation, are not practical solutions either, as they do not allow fine interaction and carry risks of contamination [<xref ref-type="bibr" rid="ref8">8</xref>]. Moreover, using a person to manipulate images in accordance with the surgeon’s verbal instructions has proven difficult and is prone to giving rise to misunderstandings when the visualization of specific areas of the image are requested [<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref10">10</xref>].</p>
        <p>Early solutions to circumvent any contact between the surgeon and computer were based on voice recognition Automated Endoscopic System for Optimal Positioning (AESOP) and HERMES (Stryker Europe) [<xref ref-type="bibr" rid="ref11">11</xref>,<xref ref-type="bibr" rid="ref12">12</xref>], but these systems were impractical as they were difficult to use when performing complex tasks [<xref ref-type="bibr" rid="ref13">13</xref>]. Natural user interfaces were first developed in the 1990s to enable interaction with the computer through natural human movements to manipulate radiological images in sterile surgical environments [<xref ref-type="bibr" rid="ref14">14</xref>]. Gesture-based interfaces were another variant [<xref ref-type="bibr" rid="ref15">15</xref>]. These enabled touchless manipulations to be performed and held great promise as a viable solution in the operating room and autopsy suites [<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref16">16</xref>-<xref ref-type="bibr" rid="ref19">19</xref>]. However, they could not be employed in sterile environments as they required some contact when gloves or position sensors were used [<xref ref-type="bibr" rid="ref20">20</xref>-<xref ref-type="bibr" rid="ref24">24</xref>].</p>
        <p>Early attempts to use touchless gestures in minimally invasive surgery (MIS) involved hand and facial gestures [<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref25">25</xref>]. Gesture recognition systems with Web and video cameras were later described [<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref27">27</xref>] using the time-of-flight principle [<xref ref-type="bibr" rid="ref28">28</xref>] and achieving interaction with the OsiriX viewer [<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref29">29</xref>]. However, these systems were very expensive and inaccurate and required calibration and a complex setup, making them impractical for use in the operating room [<xref ref-type="bibr" rid="ref30">30</xref>].</p>
        <p>A new generation of tools known as commercial off-the-shelf (COTS) devices enabling touchless gesture–based human-computer interaction is currently being explored as a solution in surgical environments. The term COTS refers to a device that can be taken from a shelf, that is, sold over the counter. In addition to being low-cost, wireless, and ergonomic, they facilitate real-time interactivity and allow the user to point to and manipulate objects with 6 degrees of freedom [<xref ref-type="bibr" rid="ref31">31</xref>]. Hansen et al described the use of the Wii Remote (Nintendo) for the intraoperative modification of resection planes in liver surgery [<xref ref-type="bibr" rid="ref32">32</xref>], whereas Gallo et al used it for pointing to and manipulating 3-dimensional (3D) medical data in a number of ways [<xref ref-type="bibr" rid="ref31">31</xref>,<xref ref-type="bibr" rid="ref33">33</xref>-<xref ref-type="bibr" rid="ref36">36</xref>]. However, intraoperative manipulation of the device required it to be wrapped in a sterile bag, thus eliminating the concept of contactless. In November 2010, the Microsoft Kinect (MK) 3D depth camera system (Microsoft Corp) was launched as a device for the Xbox 360 games console. The first descriptions of MK for medical use were in relation to physical and cognitive rehabilitation [<xref ref-type="bibr" rid="ref37">37</xref>]. Subsequent experiences in this field showed that additional studies were required on issues such as effectiveness, commitment, and usability [<xref ref-type="bibr" rid="ref38">38</xref>-<xref ref-type="bibr" rid="ref40">40</xref>]. Its use in an operating room was first reported in 2011, at Sunnybrook Hospital in Toronto, when it was used to view magnetic resonance imaging and computed tomography scans, eventually giving rise to the GestSure system [<xref ref-type="bibr" rid="ref13">13</xref>]. In 2012, the Leap Motion Controller (LMC; Leap Motion Inc) was launched, and in July 2013, the Myo armband (Thalmic Labs) was launched.</p>
        <p>Construct validity [<xref ref-type="bibr" rid="ref41">41</xref>,<xref ref-type="bibr" rid="ref42">42</xref>], concurrent validity [<xref ref-type="bibr" rid="ref43">43</xref>,<xref ref-type="bibr" rid="ref44">44</xref>], and predictive validity [<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref46">46</xref>] studies, as well as systematic reviews [<xref ref-type="bibr" rid="ref47">47</xref>,<xref ref-type="bibr" rid="ref48">48</xref>], have shown that simulation in virtual reality environments is an effective tool for motor skills learning in MIS. However, the high cost of virtual reality and augmented reality simulators calls for the development of new, portable low-cost solutions enabling ubiquitous learning. New COTS technologies that allow hand gestures and instrument movements to be detected open up an interesting field of exploration for the development and validation of new simulation models in virtual environments. One of the objectives of this systematic review was to recognize the existence of developments in this area.</p>
      </sec>
      <sec>
        <title>Objectives</title>
        <p>The aim of this systematic review was to provide an account of the state of the art of COTS devices in the detection of manual gestures in surgery and to identify their use as a simulation tool for motor skills teaching in MIS.</p>
      </sec>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Article Retrieval</title>
        <p>A search was conducted in the electronic databases PubMed, Excerpta Medica database (EMBASE), ScienceDirect, Espacenet, OpenGrey, and the Institute of Electrical and Electronics Engineers (IEEE) for articles published between January 2000 and December 2017, using combinations of the following Medical Subject Headings (MeSH) terms: <italic>surgery</italic>, <italic>computer simulation</italic>, <italic>simulation training</italic>, <italic>laparoscopy</italic>, <italic>minimally invasive surgical procedures</italic>, <italic>robotic surgical procedures</italic>, and <italic>virtual reality</italic>. The following were used as free terms: <italic>commercial off-the-shelf</italic>, <italic>COTS</italic>, <italic>surgical education</italic>, <italic>surgical simulation</italic>, <italic>Wii</italic>, <italic>Microsoft Kinect</italic>, <italic>Xbox Kinect</italic>, <italic>Leap Motion</italic>, <italic>Leap Motion Controller</italic>, <italic>Myo armband</italic>, and <italic>gesture control</italic>. The search strategy used a combination of MeSH terms and free terms. Boolean operators (AND and OR) were used to expand, exclude, or join keywords in the search. The devised strategy was applied first to PubMed and then to the remaining databases.</p>
        <p>The search was limited to English-language publications and was complemented using the snowballing technique to identify relevant articles in the references of articles returned by our search [<xref ref-type="bibr" rid="ref49">49</xref>]. A manual search was also conducted on the indices of the following publications: <italic>Surgical Endoscopy</italic>, <italic>Surgical Innovation</italic>, <italic>Minimally Invasive Therapy and Allied Technologies</italic>, the <italic>Journal of Medical Internet Research</italic>, and the <italic>Journal of Surgical Education</italic>. The snowballing search and the manual reviews enabled the retrieval of conference proceedings, letters to the editor, and simple concept descriptions. A MeaSurement Tool to Assess systematic Reviews (AMSTAR) [<xref ref-type="bibr" rid="ref50">50</xref>] and Preferred Reporting Items for Systematic Reviews and Meta-Analysis (PRISMA) [<xref ref-type="bibr" rid="ref51">51</xref>] checklists were used to ensure the quality of the review. In total, 3 authors assessed the risk of bias. Disagreement on bias assessment and the interpretation of results was resolved by consensus discussions.</p>
      </sec>
      <sec>
        <title>Study Selection</title>
        <p>A total of 3180 studies were identified, and the abstracts were reviewed to determine whether they met the inclusion and exclusion criteria. The inclusion criteria were (1) original research articles, (2) proof-of-concept or prototype user testing and feasibility testing studies, (3) studies conducted in surgical environments (preoperative, intraoperative, or postoperative), and (4) studies carried out in real or simulated surgical settings. The exclusion criteria were (1) studies on COTS devices requiring hand contact, (2) studies conducted in nonsurgical clinical environments, and (3) studies on the technical description of devices that did not include criteria of clinical usability, feasibility, or acceptance as an outcome. Studies on COTS devices requiring hand contact (ie, Wii) were excluded from the analysis. After the first review of the titles and abstracts, 361 studies were selected, 220 of which corresponded to the Wii device and were therefore discarded. Of the 141 remaining articles, 55 were duplicate references. After reading the full texts of these studies, 86 were deemed to have met the search selection criteria. The search and selection processes are summarized in <xref ref-type="fig" rid="figure1">Figure 1</xref>.</p>
        <fig id="figure1" position="float">
          <label>Figure 1</label>
          <caption>
            <p>Flow diagram of studies through the review.</p>
          </caption>
          <graphic xlink:href="jmir_v21i5e11925_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <p>We used a standardized form for data extraction, which included the following items: study, device on which the study was conducted, year of publication, aim, type of study, intervention, metrics, sample, and results and conclusions; clinical areas in which the study was conducted and types of surgical intervention (<xref ref-type="table" rid="table1">Tables 1</xref>-<xref ref-type="table" rid="table4">4</xref>) (see <xref ref-type="app" rid="app1">Multimedia Appendices 1</xref>-<xref ref-type="app" rid="app3">3</xref> for the full <xref ref-type="table" rid="table1">Tables 1</xref>-<xref ref-type="table" rid="table3">3</xref>) and use of gesture-based COTS devices in surgery (<xref ref-type="table" rid="table5">Table 5</xref>). In total, 2 authors (FAL and MM) screened all the articles individually. Discrepancies were always resolved through discussion with the senior author (FSR) whenever necessary. All the data were analyzed qualitatively and quantitatively.</p>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <p>Of the 86 articles identified, 43 (50%) were on MK, 31 (36%) were on the LMC, 2 compared MK with the LMC [<xref ref-type="bibr" rid="ref77">77</xref>,<xref ref-type="bibr" rid="ref113">113</xref>], 1 compared the LMC with the Myo armband [<xref ref-type="bibr" rid="ref58">58</xref>], 1 compared MK with the LMC and the Myo armband [<xref ref-type="bibr" rid="ref52">52</xref>], 6 were on web, video, or commercial cameras (7%), and 2 reviewed gesture interaction in general [<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref65">65</xref>]. The data and detailed information on the studies reviewed are shown in <xref ref-type="table" rid="table1">Tables 1</xref>-<xref ref-type="table" rid="table3">3</xref> (see <xref ref-type="app" rid="app1">Multimedia Appendices 1</xref>-<xref ref-type="app" rid="app3">3</xref> for the full <xref ref-type="table" rid="table1">Tables 1</xref>-<xref ref-type="table" rid="table3">3</xref>). The results are organized by the type of COTS device used (<xref ref-type="table" rid="table1">Tables 1</xref>-<xref ref-type="table" rid="table3">3</xref>, see <xref ref-type="app" rid="app1">Multimedia Appendices 1</xref>-<xref ref-type="app" rid="app3">3</xref> for the full <xref ref-type="table" rid="table1">Tables 1</xref>-<xref ref-type="table" rid="table3">3</xref>), by the type of surgical specialties in which COTS devices were used (<xref ref-type="table" rid="table4">Table 4</xref>), and by the type of use made of COTS devices in surgery, including simulation for motor skills learning (<xref ref-type="table" rid="table5">Table 5</xref>).</p>
      <table-wrap position="float" id="table1">
        <label>Table 1</label>
        <caption>
          <p>Summary of included studies evaluating Microsoft Kinect.</p>
        </caption>
        <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
          <col width="80"/>
          <col width="220"/>
          <col width="150"/>
          <col width="160"/>
          <col width="160"/>
          <col width="230"/>
          <thead>
            <tr valign="top">
              <td>Study</td>
              <td>Aim</td>
              <td>Type of study</td>
              <td>Intervention</td>
              <td>Sample</td>
              <td>Results/Conclusions</td>
            </tr>
          </thead>
          <tbody>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref17">17</xref>]</td>
              <td>To describe a system for the interactive exploration of medical images through a gesture-controlled interface using MK<sup>a</sup>.</td>
              <td>Proof-of-concept.</td>
              <td>Manipulation of CT<sup>b</sup>, MRI<sup>c</sup> and Positron emission tomography images.</td>
              <td>Not described.</td>
              <td>As the interface does not require direct contact or calibration, it is suitable for use in the operating room.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref99">99</xref>]</td>
              <td>To explore the potential simplifications derived from using 3D<sup>d</sup> sensors in medical augmented reality applications by designing a low-cost system.</td>
              <td>Proof-of-concept.</td>
              <td>Augmented reality in Medicine.</td>
              <td>Not described.</td>
              <td>The concept is feasible but the whole process is still too time-consuming to be executed in real time.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref101">101</xref>]</td>
              <td>To present an augmented reality magic mirror for anatomy teaching.</td>
              <td>Proof-of-concept.</td>
              <td>Augmented reality in Medicine. Anatomy education.</td>
              <td>A hospital and a school.</td>
              <td>The system can be used for educational purposes, to improve communication between doctor and patients. A possible use for anatomy teaching in surgery is not mentioned.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref5">5</xref>]</td>
              <td>To evaluate the response time and usability (gestures and voice commands) compared with mouse and keyboard controls.</td>
              <td>Prototype user testing and feasibility testing.</td>
              <td>Manipulation of CT images.</td>
              <td>2 radiologists and 8 forensic pathologists who recreated 12 images.</td>
              <td>Users took 1.4 times longer to recreate an image with gesture control and rated the system 3.4 out of 5 for ease of use in comparison with the keyboard and mouse. The voice recognition system did not work properly.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref84">84</xref>]</td>
              <td>To develop a system to allow the surgeon to interact with the standard PACS system during sterile surgical management of orthopedic patients.</td>
              <td>Proof-of-concept.</td>
              <td>Manipulation of radiological images in orthopedics.</td>
              <td>Not described.</td>
              <td>This is the first example of this technology being used to control digital X-rays in clinical practice.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref83">83</xref>]</td>
              <td>To present a sterile method for the surgeon to manipulate images using touchless freehand gestures.</td>
              <td>Experiment.</td>
              <td>Manipulation of MRI images.</td>
              <td>9 veterinary surgeons. 22 students.</td>
              <td>The hypothesis that contextual information integrated with hand trajectory gesture information can significantly improve the overall recognition system performance was validated. The recognition accuracy was 98.7%</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref76">76</xref>]</td>
              <td>To evaluate an MK-based interaction system for manipulating imaging data using ‘Magic Lens visualization.‘</td>
              <td>Proof-of-concept in the operating room.</td>
              <td>Manipulation of radiological images.</td>
              <td>A laryngoplasty.</td>
              <td>The surgeon can manipulate the preoperative information with the intraoperative video and the simulations to correctly place the implant.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref79">79</xref>]</td>
              <td>To compare the accuracy and speed of interaction of MK with that of a mouse. To study the performance of the interaction methods in rotation tasks and localization of internal structures in a 3D dataset.</td>
              <td>User testing.</td>
              <td>Manipulation of radiological images.</td>
              <td>15 users.</td>
              <td>The gesture-based interface outperformed the traditional mouse with respect to time and accuracy in the orientation and rotation task. The mouse was superior in terms of accuracy of localization of internal structures. However, the gesture-based interface was found to have the fastest target localization time.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref74">74</xref>]</td>
              <td>To develop a user-friendly touchless system for controlling the presentation of medical images based on hand gesture recognition in the operating room.</td>
              <td>Proof-of-concept in the operating room.</td>
              <td>Manipulation of radiological images in orthopedic surgery.</td>
              <td>Not described.</td>
              <td>The system does not require calibration and was adapted to the surgical environment following the principles of asepsis/antisepsis.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref30">30</xref>]</td>
              <td>To present a touchless gesture interface that allows the surgeon to control medical images using hand gestures.</td>
              <td>Proof-of-concept and prototype feasibility testing.</td>
              <td>Manipulation of CT images.</td>
              <td>Enucleation of 4 tumors in 3 urology patients.</td>
              <td>First description in the literature of a gesture user interface using MK in the operating room in in-vivo surgery, showing that it is an efficient and low-cost solution.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref100">100</xref>]</td>
              <td>To develop a low-cost augmented reality interface projected onto a mannequin simulator.</td>
              <td>Proof-of-concept.</td>
              <td>Augmented reality for education in Medicine.</td>
              <td>A physical simulator, video projector, Wii Remote and MK.</td>
              <td>The manipulations obtained using MK were similar to those described with the Wii.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref67">67</xref>]</td>
              <td>To develop a version of a gesture-based system for controlling images.</td>
              <td>Proof-of-concept.</td>
              <td>Manipulation of MRI images.</td>
              <td>Resection of a glioma.</td>
              <td>Except for the scanning movement, each movement was recognized with great accuracy. The algorithm can be installed in the clinical area.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref128">128</xref>]</td>
              <td>To use MK to operate an automated operating-room light system.</td>
              <td>Prototype user testing.</td>
              <td>Manipulation of operating room lights.</td>
              <td>18 volunteers.</td>
              <td>The gestures were easy to learn and the movement of the light beam was sufficiently precise.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref102">102</xref>]</td>
              <td>To create a touchless head tracking system for an immersive virtual operating room.</td>
              <td>Proof-of-concept.</td>
              <td>Virtual reality for simulation and education in surgery.</td>
              <td>A 3D virtual operating room with a virtual operating table.</td>
              <td>Using MK, it was possible to implement a very accurate interactive tracking system regardless of the complexity of the virtual reality system.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref85">85</xref>]</td>
              <td>To present a new prototype that allows the user to control the OsiriX system with finger gestures using a low-cost depth camera.</td>
              <td>Proof-of-concept and prototype feasibility testing.</td>
              <td>Manipulation of CT images.</td>
              <td>4 forensic pathologists, 1 radiologist and 1 engineer.</td>
              <td>On average, 4.5 min were required to learn to use the system. <break/>Participants rated the intuitiveness of the gestures with 3.8 out of 5 and control of the images with 3.8 out of 5. The low cost of the system makes it affordable for any potential user.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref104">104</xref>]</td>
              <td>To present a new immersive surgical training system.</td>
              <td>Proof-of-concept and prototype fidelity testing.</td>
              <td>Virtual reality for education in surgery.</td>
              <td>Cholecystectomy training on animal tissue blocks.</td>
              <td>Initial feedback from the residents showed that the system is much more effective than the conventional videotaped system.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref60">60</xref>]</td>
              <td>To test a speech and gesture-controlled interventional radiology system.</td>
              <td>User testing.</td>
              <td>Manipulation of CT and angiography images.</td>
              <td>10 radiology residents used commands under different lighting conditions during 18 angiographies and 10 CT- guided punctures.</td>
              <td>93% of commands were recognized successfully. Speech commands were less prone to errors than gesture commands. 60% of participants would use the application in their routine clinical practice.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref86">86</xref>]</td>
              <td>To develop an image operation system for image manipulation using a motion sensor.</td>
              <td>Proof-of-concept.</td>
              <td>Manipulation of angiographic images.</td>
              <td>Not described.</td>
              <td>The system can be implemented as a useful tool in angiography for controlling image viewing using gestures in the operating room.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref19">19</xref>]</td>
              <td>The working hypothesis is that contextual information such as the focus of attention, integrated with gestural information, can significantly improve overall system recognition performance compared with interfaces relying on gesture recognition alone.</td>
              <td>Ethnographic study. Experiment. Survey.</td>
              <td>Manipulation of MRI images.</td>
              <td>10 veterinary surgeons. 20 volunteers.</td>
              <td>The surgeon’s intention to perform a gesture can be accurately recognized by observing environmental cues (context). The hypothesis was validated by a drop in the false positive rate of gesture recognition from 20.76% to 2.33%. A significant rate of reduction of the mean task completion time indicated that the user operates the interface more efficiently with experience. The tracking algorithm occasionally failed in the presence of several people in the camera’s field of view.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref96">96</xref>]</td>
              <td>To examine the functionality and usability of MK to complete the visualization of 3D anatomical images.</td>
              <td>User testing. Survey.</td>
              <td>Manipulation of anatomical images.</td>
              <td>32 participants: Medical students, professors and anatomy laboratory staff.</td>
              <td>MK users reached accuracy levels almost identical to those who used a mouse, and spent less time on performing the same tasks. MK showed potential as a device for interaction with medical images.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref103">103</xref>]</td>
              <td>To examine usability for navigating through 3D medical images using MK compared with a traditional mouse.</td>
              <td>User testing. Survey.</td>
              <td>Manipulation of anatomical images. Education.</td>
              <td>17 veterinary students.</td>
              <td>Improvements should be made to MK before it can be implemented as a device for medical use. The preferred method was the mouse. MK has the potential to reduce time on the task.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref13">13</xref>]</td>
              <td>To develop a prototype and to examine the feasibility of this new device to help bridge the sterility barrier and eliminate the time and space gap that exists between image review and visual correlation with real-time operative field anatomy.</td>
              <td>Proof-of-concept and prototype feasibility testing.</td>
              <td>Manipulation of CT and MRI images.</td>
              <td>2 MIS<sup>e</sup> procedures and 4 open procedures performed by a surgeon.</td>
              <td>The system worked well in a wide range of lighting conditions and procedures. There was an increase in the use of intraoperative image consultation. The gesture library was intuitive and easy to learn. Gestures were mastered within 10 min.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref61">61</xref>]</td>
              <td>To investigate a solution for manipulating medical images using MK.</td>
              <td>Proof-of-concept and prototype feasibility testing.</td>
              <td>Manipulation of CT images.</td>
              <td>29 radiologists (diagnostic and interventional).</td>
              <td>The potential of the device to enhance image-guided treatment in an interventional radiology suite while maintaining a sterile surgical field was demonstrated. 69% of those surveyed believed that the device could be useful in the interventional radiology field.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref112">112</xref>]</td>
              <td>To investigate the need for posture and position training during bronchoscopy using a tool called ETrack</td>
              <td>Pilot study.</td>
              <td>Analysis of the operator’s movements during a bronchoscopy. Education.</td>
              <td>Not described.</td>
              <td>The results highlight the importance of posture during bronchoscopy and the need to implement a training module for the simulator.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref71">71</xref>]</td>
              <td>To evaluate a new touchless, portable, low-cost 3D measurement system for objective breast assessment.</td>
              <td>Concurrent validation study.</td>
              <td>Calculation of breast implant volumes.</td>
              <td>9 silicone implants of known volumes.</td>
              <td>The implant volumes were calculated with an error margin of 10%. Reproducibility was satisfactory. The system was validated for clinical use.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref106">106</xref>]</td>
              <td>To describe a gesture-controlled 3D teaching tool in which temporal bone anatomy is manipulated without using a mouse or keyboard. To provide a teaching tool for patient-specific anatomy.</td>
              <td>Proof-of-concept.</td>
              <td>Manipulation of anatomical images. Education.</td>
              <td>0.15 mm slice thickness cadaveric temporal bone images.</td>
              <td>The interactive 3D model developed seems promising as an educational tool.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref62">62</xref>]</td>
              <td>To develop hand recognition software based on MK, linked to an interventional CT, to manipulate images.</td>
              <td>Feasibility testing</td>
              <td>Manipulation of CT images in surgery.</td>
              <td>10 interventional radiology procedures. 1 operator.</td>
              <td>Tested on 10 procedures, feasibility was 100%. The system also allowed information to be obtained without using the CT system interface or a third party, and without the loss of operator sterility.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref131">131</xref>]</td>
              <td>To present a novel method for training intentional and nonintentional gesture recognition.</td>
              <td>Experiment.</td>
              <td>Performance of a simulated brain biopsy on a mannequin assisted by images manipulated using gestures.</td>
              <td>19 subjects.</td>
              <td>Continuous gesture recognition was successful 92.26% of the time with a reliability of 89.97%. Significant improvements in task completion time were obtained through the context integration effect.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref113">113</xref>]</td>
              <td>To evaluate 2 contactless hand tracking systems, the LMC<sup>f</sup> and MK, for their potential to control surgical robots.</td>
              <td>Experiment.</td>
              <td>Manipulation of robots in surgery.</td>
              <td>4 trained surgeons.</td>
              <td>Neither system has the high level of accuracy and robustness that would be required for controlling medical robots.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref107">107</xref>]</td>
              <td>To use a projector for visualization and to provide intuitive means for direct interaction with the information projected onto the surgical surface, using MK to capture the interaction zone and the surgeon’s actions on a deformable surface.</td>
              <td>Proof-of-concept.</td>
              <td>Augmented reality in surgery.</td>
              <td>Not described.</td>
              <td>The system eliminates the need for the surgeon to look at a location other than the surgical field. It therefore removes distractions and enhances his or her performance. It not only provides the surgeon with medical data during the intervention, but also allows interaction with such information by using gestures.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref10">10</xref>]</td>
              <td>To present an ethnographic study of a system based on MK developed to allow touchless control of medical images during vascular surgery. The study aims to go beyond demonstrating technical feasibility in order to understand the collaborative practices that emerge from its use in this context.</td>
              <td>Ethnographic study.</td>
              <td>Manipulation of radiological images.</td>
              <td>Endovascular suite of a large hospital.</td>
              <td>With touchless interaction, the visual resources were embedded and made meaningful in the collaborative practices of surgery. The importance of direct and dynamic control of the images by the clinicians in the context of talks and in the context of other artefact use is discussed.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref130">130</xref>]</td>
              <td>To evaluate a system for manipulating an operating table using gestures.</td>
              <td>Prototype user testing.</td>
              <td>Manipulation of an operating table.</td>
              <td>15 participants.</td>
              <td>Major problems were encountered during gesture recognition and with obstruction by other people in the interaction area due to the size and layout of the operating room. The system cannot yet be integrated into a surgical environment.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref110">110</xref>]</td>
              <td>To study the technical skills of colonoscopists using MK for motion analysis to develop a tool to guide colonoscopy education and to select discriminative motion patterns.</td>
              <td>Construct validity study.</td>
              <td>Analysis of the movements of the operator during a colonoscopy.</td>
              <td>10 experienced and 11 novice endoscopists.</td>
              <td>Certain types of metric can be used to discriminate between experienced and novice operators.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref72">72</xref>]</td>
              <td>To develop a 3D surface imaging system and to assess the accuracy and repeatability on a female mannequin.</td>
              <td>Interrater reliability study.</td>
              <td>Measurement of the surface distances of the breast on a mannequin.</td>
              <td>A female mannequin.</td>
              <td>MK seems to be a useful and feasible system for capturing 3D images of the breast. There was agreement between the measurements obtained by the system and those taken manually with a measuring tape.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref105">105</xref>]</td>
              <td>To present a new surgical training system.</td>
              <td>Proof-of-concept.</td>
              <td>Real-time immersive 3D surgical training. Education.</td>
              <td>Not described.</td>
              <td>Preliminary experiments show that this immersive training system is portable, effective and reliable.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref68">68</xref>]</td>
              <td>To present the development and clinical testing of a device that enables intraoperative control of images with hand gestures during neurosurgical procedures.</td>
              <td>Proof-of-concept. Initial clinical testing.</td>
              <td>Manipulation of MRI images.</td>
              <td>30 neurosurgical operations.</td>
              <td>OPECT demonstrated high effectiveness, simplicity of use and precise recognition of the individual user profile. In all cases, surgeons were satisfied with the performance of the device.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref68">68</xref>]</td>
              <td>To test whether an automatic motion analysis system could be used to explore if there is a correlation in scope movements and the level of experience of the surgeon performing the bronchoscopy.</td>
              <td>Construct validity study. Prospective, comparative study.</td>
              <td>Analysis of the operator’s movements during a bronchoscopy. Education.</td>
              <td>11 novice, 9 intermediate and 9 experienced bronchoscopy operators performed 3 procedures each on a bronchoscopy simulator.</td>
              <td>The motion analysis system could discriminate between different levels of experience. Automatic feedback on correct movements during self-directed training on simulators might help new bronchoscopists learn how to handle the bronchoscope like an expert.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref77">77</xref>]</td>
              <td>To compare 2 commercial motion sensors (MK and the LMC) to manipulate CT images, in terms of their utility, usability, speed, accuracy and user acceptance.</td>
              <td>Two-strand sequential observational study. Qualitative and quantitative descriptive field study using a semi-structured questionnaire.</td>
              <td>Manipulation of CT images.</td>
              <td>42 participants: radiologists, surgeons and interventional radiologists.</td>
              <td>Marginal to average acceptability of the 2 devices. MK was found to be more useful and easier to use, but the LMC was more accurate. Further research is required to establish the design specifications, installation guidelines and user training requirements to ensure successful implementation in clinical areas.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref57">57</xref>]</td>
              <td>To develop an integrated and comprehensive operating room information system compatible with HL7 and DICOM (MediNav). A natural user interface is designed specifically for operating rooms based on MK.</td>
              <td>Prototype user testing.</td>
              <td>Users tested the application’s various modules.</td>
              <td>A prototype system is tested in a live operating room at an Iranian teaching hospital. 30 general surgeries.</td>
              <td>The results of usability tests are promising, and indicate that integration of these systems into a complete solution is the key. Touchless natural user interfaces can help to collect and visualize medical information in a comprehensive manner.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref75">75</xref>]</td>
              <td>To propose a novel system to visualize a surgical scene in augmented reality using the different sources of information provided by a C-arm and MK.</td>
              <td>Prototype user testing.</td>
              <td>Augmented reality in orthopedic surgery.</td>
              <td>Simulations of 12 orthopedic procedures. 5 participating clinicians, 3 experienced surgeons, 2 fourth-year medical students.</td>
              <td>The system showed promising results with respect to better surgical scene understanding and improved depth perception using augmented reality in simulated orthopedic surgery.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref114">114</xref>]</td>
              <td>To explore 3D perception technologies in the operating room.</td>
              <td>Ethnographic. Prototype testing.</td>
              <td>Detection of the interaction between operating staff and the robot.</td>
              <td>Not described.</td>
              <td>The paper described a supervision system for the operating room that enables intention tracking. The system had low latency, good registration accuracy and high tracking reliability, which make it useful for workflow monitoring, tracking and avoiding collisions between medical robots and operating room staff.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref125">125</xref>]</td>
              <td>To use MK and color markers to track the position of MIS instruments in real time.</td>
              <td>Comparative study between MK and the SinaSim trainer.</td>
              <td>Movement of the instrument to position its tip in 81 holes of a Plexiglas plate on 5 occasions.</td>
              <td>1 user.</td>
              <td>Although the new method had inferior accuracy compared with mechanical sensors, its low cost and portability make it a candidate for replacing traditional tracking methods.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref80">80</xref>]</td>
              <td>To compare 3 different interaction modes for image manipulation in a surgery setting: 1) A gesture-controlled approach using MK; 2) verbal instructions to a third party; and 3) direct manipulation using a mouse.</td>
              <td>Crossover randomized controlled trial with blocked randomization.</td>
              <td>Interaction modes were direct manipulation using a mouse, verbal instructions given to a third party, and gesture-controlled manipulation using MK.</td>
              <td>30 physicians and senior medical students</td>
              <td>Under the premise that a mouse cannot be used directly during surgery, gesture-controlled approaches were shown to be superior to verbal instructions for image manipulation.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref121">121</xref>]</td>
              <td>To evaluate the feasibility, validity, and reliability of the training system for motion parameter and ergonomic analyses between different experience levels of surgeons using the NDI Polaris System and MK camera.</td>
              <td>Construct validity, concurrent validity and test-retest reliability. Prospective blinded study.</td>
              <td>Tying of intra-corporeal MIS knots.</td>
              <td>10 MIS novices, 10 intermediate level and 10 experts.</td>
              <td>Validity and reliability of the self-developed sensor and expert model-based MIS training system ‘iSurgeon’ were established.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref73">73</xref>]</td>
              <td>To analyze preoperative breast volume in patients with breast cancer in order to predict implant size for reconstruction.</td>
              <td>Exploratory study.</td>
              <td>MK was used to acquire 3D images of the patients’ breasts before surgery and after surgery.</td>
              <td>10 patients.</td>
              <td>This study showed the feasibility of using fast, simple and inexpensive 3D imaging technology for predicting implant size before surgery, although there were significant technical challenges in determining breast volume by surface imaging.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref52">52</xref>]</td>
              <td>To evaluate the feasibility of using 3 different gesture control sensors (MK, the LMC and the Myo armband) to interact in a sterile manner with preoperative data as well as in settings of an integrated operating room during MIS.</td>
              <td>Pilot user study.</td>
              <td>2 hepatectomies and 2 partial nephrectomies on an experimental porcine model.</td>
              <td>3 surgeons.</td>
              <td>Natural user interfaces are feasible for directly interacting, in a more intuitive and sterile manner, with preoperative images and integrated operating room functionalities during MIS. The combination of the Myo armband and voice commands provided the most intuitive and accurate natural user interface.</td>
            </tr>
          </tbody>
        </table>
        <table-wrap-foot>
          <fn id="table1fn1">
            <p><sup>a</sup>MK: Microsoft Kinect.</p>
          </fn>
          <fn id="table1fn2">
            <p><sup>b</sup>CT: Computed Tomography.</p>
          </fn>
          <fn id="table1fn3">
            <p><sup>c</sup>MRI: magnetic resonance imaging.</p>
          </fn>
          <fn id="table1fn4">
            <p><sup>d</sup>3D: 3-dimensional.</p>
          </fn>
          <fn id="table1fn5">
            <p><sup>e</sup>MIS: minimally invasive surgery.</p>
          </fn>
          <fn id="table1fn6">
            <p><sup>f</sup>LMC: Leap Motion Controller.</p>
          </fn>
        </table-wrap-foot>
      </table-wrap>
      <table-wrap position="float" id="table2">
        <label>Table 2</label>
        <caption>
          <p>Summary of included studies evaluating the Leap Motion Controller.</p>
        </caption>
        <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
          <col width="80"/>
          <col width="220"/>
          <col width="150"/>
          <col width="160"/>
          <col width="160"/>
          <col width="230"/>
          <thead>
            <tr valign="top">
              <td>Study</td>
              <td>Aim</td>
              <td>Type of study</td>
              <td>Intervention</td>
              <td>Sample</td>
              <td>Results/Conclusions</td>
            </tr>
          </thead>
          <tbody>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref63">63</xref>]</td>
              <td>To evaluate the implementation of a low-cost device for touchless PACS control in an interventional radiology suite. To demonstrate that interaction with gestures can decrease the duration of the procedures, the risk of re-intervention, and improve technical performance.</td>
              <td>Proof-of-concept and prototype feasibility testing.</td>
              <td>Manipulation of images in interventional radiology.</td>
              <td>Interventional radiology suite.</td>
              <td>The LMC<sup>a</sup> is a feasible, portable and low-cost alternative to other touchless PACS interaction systems. A decrease in the need for re-intervention was reported, but no explanation was given of how it was measured.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref54">54</xref>]</td>
              <td>To present the first experience of using new systems for image control in the operating room: the LMC and OsiriX.</td>
              <td>Proof-of-concept.</td>
              <td>Manipulation of CT<sup>b</sup> and MRI<sup>c</sup> images.</td>
              <td>2 general surgeons, 1 urologist, 3 orthopedic surgeons and 2 surgeons</td>
              <td>The average training time was 5 min. The system is very cost-effective, efficient and prevents contamination during surgery. First experience of using the LMC to control CT and MRI images during surgery.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref116">116</xref>]</td>
              <td>To validate the possibility of performing precise telesurgical tasks by means of the LMC.</td>
              <td>Comparative study of the Sigma.7 electro-mechanical device and the LMC.</td>
              <td>Peg transferring task and answering a questionnaire. The success rate of peg transfers.</td>
              <td>10 researchers.</td>
              <td>The results allowed the authors to confirm that fine tracking of the hand could be performed with the LMC. The observed performance of the optical interface proved to be comparable with that of traditional electro-mechanical devices.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref87">87</xref>]</td>
              <td>To describe a piece of software for image processing with OsiriX using finger gestures.</td>
              <td>Proof-of-concept.</td>
              <td>Manipulation of radiological images.</td>
              <td>Not described.</td>
              <td>It is possible to implement gesture control of medical devices with low-cost, minimal resources. The device is very sensitive to surface dirt and this affects performance. The device favors the occlusion phenomenon.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref113">113</xref>]</td>
              <td>To evaluate 2 contactless hand tracking systems, the LMC and MK<sup>d</sup>, for their potential to control surgical robots.</td>
              <td>Experiment.</td>
              <td>Manipulation of robots in surgery.</td>
              <td>4 trained surgeons.</td>
              <td>Neither system has the high level of accuracy and robustness that would be required for controlling medical robots.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref129">129</xref>]</td>
              <td>To evaluate the LMC for simple 2-dimensional interaction and the action of entering a value.</td>
              <td>Proof-of-concept and prototype testing.</td>
              <td>Manipulation of medical information and operating room lights.</td>
              <td>A 90-min conference on computer science and untrained users.</td>
              <td>The user cases should be carefully classified and the most appropriate gestures for each application should be detected and implemented. Optimal lighting conditions for the LMC have still not been evaluated as unwanted light with deterioration of the IR light emitted may lead to a reduction in the recognition rate.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref81">81</xref>]</td>
              <td>To compare the average time required by the conventional method using a mouse and an operating method with a finger-motion sensor.</td>
              <td>Observational study.</td>
              <td>Manipulation of angiographic images.</td>
              <td>11 radiologists who observed a simulated clinical case.</td>
              <td>After a practice time of 30 min, the average operation time by the finger method was significantly shorter than that by the mouse method.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref14">14</xref>]</td>
              <td>To develop a workstation that allows intraoperative touchless control of diagnostic and surgical images in dentistry.</td>
              <td>Prototype user testing.</td>
              <td>Manipulation of radiological images.</td>
              <td>2 surgeons. A case series of 11 dental surgery procedures.</td>
              <td>The system performed very well. Its low cost favors its incorporation into clinical facilities of developing countries, reducing the number of staff required in operating rooms.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref88">88</xref>]</td>
              <td>To propose an interface to control hand gestures and gestures with hand-held tools. In this approach, hand-held tools can become gesture devices that the user can use to control the images.</td>
              <td>Prototype user testing.</td>
              <td>Manipulation of ultrasound images.</td>
              <td>12 participants.</td>
              <td>Users were able to significantly improve their performance with practice.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref56">56</xref>]</td>
              <td>To develop a software application for the manipulation of a 3D<sup>e</sup> pancreatic or liver tumor model by using CT and real-time elastography data.</td>
              <td>Proof-of-concept.</td>
              <td>Manipulation of CT and real-time elastography images.</td>
              <td>15 patients with liver cancer and 10 patients with pancreatic cancer.</td>
              <td>A 3D model of liver and pancreatic tumors was successfully implemented with a hands-free interaction device suitable for sterile environments and for aiding diagnostic or therapeutic interventions.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref117">117</xref>]</td>
              <td>To present a new gesture recognition system for manipulating 2 surgical robots in a virtual simulator.</td>
              <td>Proof-of-concept.</td>
              <td>Manipulation of robots in surgery.</td>
              <td>2 surgical robots in a virtual simulator.</td>
              <td>The device provided satisfactory accuracy and speed. It requires a more complete Application Programming Interface.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref90">90</xref>]</td>
              <td>To propose a web-based interface to retrieve medical images using gestures.</td>
              <td>User testing. Pilot study.</td>
              <td>Manipulation of radiological images.</td>
              <td>2 users.</td>
              <td>User feedback was positive. Users reported fatigue with prolonged use of gestures. Additional studies are required to validate the interface.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref64">64</xref>]</td>
              <td>To describe the use of the LMC for image manipulation during hepatic transarterial chemoembolization and internal radiotherapy procedures.</td>
              <td>Proof-of-concept.</td>
              <td>Manipulation of images in interventional radiology.</td>
              <td>Not described.</td>
              <td>Gesture-based imaging control may lead to increased efficacy and safety with decreased radiation exposure during hepatic transarterial chemoembolization procedures.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref77">77</xref>]</td>
              <td>To compare 2 commercial motion sensors (MK and the LMC) to manipulate CT images, in terms of their utility, usability, speed, accuracy and user acceptance.</td>
              <td>Two-strand sequential observational study. Qualitative and quantitative descriptive field study using a semi-structured questionnaire.</td>
              <td>Manipulation of CT images.</td>
              <td>42 participants: radiologists, surgeons and interventional radiologists.</td>
              <td>Marginal to average acceptability of the 2 devices. MK was found to be more useful and easier to use, but the LMC was more accurate. Further research is required to establish the design specifications, installation guidelines and user training requirements to ensure successful implementation in clinical areas.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref91">91</xref>]</td>
              <td>To evaluate a new method for image manipulation using a motion sensor.</td>
              <td>Observational study. User testing and proof-of-concept.</td>
              <td>Manipulation of radiological images in dentistry.</td>
              <td>14 students. 6 images.</td>
              <td>Using the system, several processes can be performed quickly with finger movements. Using gestures was significantly superior to using a mouse in terms of time.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref92">92</xref>]</td>
              <td>To develop a new system for manipulating images using a motion sensor.</td>
              <td>Observational study.</td>
              <td>Manipulation of radiological images in dentistry.</td>
              <td>14 students. 25 images.</td>
              <td>The operation time with the LMC was significantly shorter than with the conventional method using a mouse.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref108">108</xref>]</td>
              <td>To design a virtual 3D online environment for motor skills learning in MIS<sup>f</sup> using exercises from the MISR-VR. The environment is designed in Unity, and the LMC is used as the device for interaction with the MIS forceps.</td>
              <td>Letter to the editor.</td>
              <td>None.</td>
              <td>Not described</td>
              <td>If it can be shown that 3D online environments mediated by natural user interfaces enable motor skills learning in MIS, a new field of research and development in the area of surgical simulation will be opened up.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref124">124</xref>]</td>
              <td>Patent for accurate 3D instrument positioning.</td>
              <td>Patent.</td>
              <td>None.</td>
              <td>Not described</td>
              <td>Representing, on an output display, 3D positions and orientations of an instrument while medical procedures are being performed.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref69">69</xref>]</td>
              <td>To describe the configuration for using the LMC in neurosurgery for image manipulation during a surgical procedure.</td>
              <td>User testing.</td>
              <td>Manipulation of images during a surgical procedure.</td>
              <td>Resection of a meningioma and sarcoma surgery.</td>
              <td>The learning curve only took 30 min. Although the main disadvantage was the lack of standardization of the gestures, the LMC is a low-cost, reliable and easily personalized device for controlling images in the surgical environment.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref109">109</xref>]</td>
              <td>To develop skills in students and professionals using computer simulation technologies based on hand gesture capture systems.</td>
              <td>User testing.</td>
              <td>Description of the virtual environment.</td>
              <td>Not described.</td>
              <td>Simulation and new gesture recognition technologies open up new possibilities for the generation of computer-mediated procedures for medical training.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref93">93</xref>]</td>
              <td>To present a gesture-controlled projection display that enables a direct and natural physician-machine interaction during CT-based interventions.</td>
              <td>User testing (pilot and main).</td>
              <td>8 tasks manipulating CT images.</td>
              <td>12 participants (biomedical engineers, medical students and radiologists).</td>
              <td>Gesture recognition is robust, although there is potential for improvement. The gesture training times are less than 10 min, but vary considerably between study participants.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref94">94</xref>]</td>
              <td>To develop an anatomy learning system using the LMC.</td>
              <td>User testing.</td>
              <td>Manipulation of 220 anatomical images.</td>
              <td>30 students and lecturers from an anatomy department.</td>
              <td>The anatomy learning system using the LMC was successfully developed and it is suitable and acceptable as a support tool in an anatomy learning system.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref123">123</xref>]</td>
              <td>To study the possibility of tracking laparoscopic instruments using the LMC in a box trainer.</td>
              <td>Experiment.</td>
              <td>3 static experiments and 1 dynamic experiment.</td>
              <td>1 user.</td>
              <td>The LMC had acceptable precision for tracking laparoscopic instruments in a box trainer.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref126">126</xref>]</td>
              <td>To assess the potential of the LMC to track the movement of hands using MIS instruments.</td>
              <td>Construct validity, concurrent validity. Comparative study with the InsTrac.</td>
              <td>Passing a thread through pegs using the eoSim simulator.</td>
              <td>3 experts and 10 novices.</td>
              <td>The LMC is able to track the movement of hands using instruments in a MIS box simulator. Construct validity was demonstrated. Concurrent validity was only demonstrated for time and instrument path distance. A number of limitations to the tracking method used by LMC have been identified.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref118">118</xref>]</td>
              <td>To explore the use of the LMC in endonasal pituitary surgery and to compare it with the Phantom Omni.</td>
              <td>Comparative study between the LMC and the Phantom Omni.</td>
              <td>16 resections of simulated pituitary gland tumors using a robot manipulated by the Phantom Omni and by the LMC.</td>
              <td>3 neurosurgeons.</td>
              <td>Users were able to achieve a very similar percentage of resection and procedure duration using the LMC.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref95">95</xref>]</td>
              <td>To try to interact with medical images via a web browser using the LMC.</td>
              <td>Prototype user testing.</td>
              <td>Rotation, panning, scaling and selection of slices of a reconstructed 3D model based on CT or MRI.</td>
              <td>1 user.</td>
              <td>It is feasible to build this system and interaction can be carried out in real time.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref58">58</xref>]</td>
              <td>To analyze the value of 2 gesture input modalities (the Myo armband and the LMC) versus 2 clinically established methods (task delegation and joystick control).</td>
              <td>User study. Comparative study.</td>
              <td>Simulating a diagnostic neuroradiological vascular treatment with 2 frequently used interaction tasks in an experimental operating room.</td>
              <td>10 neuroradiologists</td>
              <td>Novel input modalities have the potential to carry out single tasks more efficiently than clinically established methods.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref120">120</xref>]</td>
              <td>To investigate the potential of a virtual reality simulator for the assessment of basic laparoscopic skills, based on the LMC</td>
              <td>Face and construct validity.</td>
              <td>3 basic tasks: camera navigation, instrument navigation, and two-handed operation.</td>
              <td>2 groups of surgeons (28 experts and 21 novices).</td>
              <td>This study provides evidence of the potential use of the LMC for assessing basic laparoscopic skills. The proposed system allows the dexterity of hand movements to be evaluated.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref52">52</xref>]</td>
              <td>To evaluate the feasibility of using 3 different gesture control sensors (MK, the LMC and the Myo armband) to interact in a sterile manner with preoperative data as well as in settings of an integrated operating room during MIS.</td>
              <td>Pilot user study.</td>
              <td>2 hepatectomies and 2 partial nephrectomies on an experimental porcine model.</td>
              <td>3 surgeons</td>
              <td>Natural user interfaces are feasible for directly interacting, in a more intuitive and sterile manner, with preoperative images and integrated operating room functionalities during MIS. The combination of the Myo armband and voice commands provided the most intuitive and accurate natural user interface.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref127">127</xref>]</td>
              <td>To evaluate the LMC as a tool for the objective measurement and assessment of surgical dexterity among users at different experience levels.</td>
              <td>Construct validity study.</td>
              <td>Surgical knot tying and manual transfer of objects.</td>
              <td>11 participants.</td>
              <td>The study showed 100% accuracy in discriminating between expert and novice performances.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref66">66</xref>]</td>
              <td>To design an affordable and easily accessible endoscopic third ventriculostomy simulator based on the LMC, and to compare it with the NeuroTouch for its usability and training effectiveness.</td>
              <td>Concurrent and construct validity study.</td>
              <td>4 ellipsoid practice targeting tasks and 36 ventricle targeting tasks.</td>
              <td>16 novice users and 2 expert neurosurgeons</td>
              <td>An easy-access simulator was created, which has the potential to become a training tool and a surgical training assessment tool. This system can be used for planning procedures using patient datasets.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref119">119</xref>]</td>
              <td>To present the LMC as a novel control device to manipulate the RAVEN-II robot.</td>
              <td>Comparative study between the LMC and the electro-mechanical Sigma.7.</td>
              <td>Comparison of peg manipulations during a training task with a contact-based device (Sigma.7).</td>
              <td>3 operators.</td>
              <td>With contactless control, manipulability is not as good as it is with contact-based control. Complete control of the surgical instruments is feasible. This work is promising for the development of future human-machine interfaces dedicated to robotic surgical training systems.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref98">98</xref>]</td>
              <td>To evaluate the effect of using virtual reality surgery on the self-confidence and knowledge of surgical residents (the LMC and Oculus Rift).</td>
              <td>Multisite, single-blind, parallel, randomized controlled trial.</td>
              <td>The study group used the virtual reality surgery application. The control group used similar content in a standard presentation.</td>
              <td>95 residents from 7 dental schools.</td>
              <td>Immersive virtual reality experiences improve the knowledge and self-confidence of the surgical residents.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref97">97</xref>]</td>
              <td>To develop and validate a novel training tool for Le Fort I osteotomy based on immersive virtual reality (the LMC and Oculus Rift).</td>
              <td>Face and content validity.</td>
              <td>A pre-intervention questionnaire to understand training needs and a postintervention feedback questionnaire.</td>
              <td>7 consultant oral and maxillofacial surgeons.</td>
              <td>The results confirmed the clinical applicability of virtual reality for delivering training in orthognathic surgery.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref70">70</xref>]</td>
              <td>To investigate the feasibility and practicability of a low-cost multimodal head-mounted display system in neuroendoscopic surgery (the LMC and Oculus Rift).</td>
              <td>Proof-of-concept in the operating room.</td>
              <td>Ventriculocysto- cisternostomy. Ventriculostomy. Tumoral biopsy.</td>
              <td>21 patients with ventricular diseases. 1 neurosurgeon.</td>
              <td>The head-mounted display system is feasible, practical, helpful, and relatively cost efficient in neuroendoscopic surgery.</td>
            </tr>
          </tbody>
        </table>
        <table-wrap-foot>
          <fn id="table2fn1">
            <p><sup>a</sup>LMC: Leap Motion Controller.</p>
          </fn>
          <fn id="table2fn2">
            <p><sup>b</sup>CT: Computed Tomography.</p>
          </fn>
          <fn id="table2fn3">
            <p><sup>c</sup>MRI: magnetic resonance imaging.</p>
          </fn>
          <fn id="table2fn4">
            <p><sup>d</sup>3D: 3-dimensional.</p>
          </fn>
          <fn id="table2fn5">
            <p><sup>e</sup>MK: Microsoft Kinect.</p>
          </fn>
          <fn id="table2fn6">
            <p><sup>f</sup>MIS: minimally invasive surgery.</p>
          </fn>
        </table-wrap-foot>
      </table-wrap>
      <table-wrap position="float" id="table3">
        <label>Table 3</label>
        <caption>
          <p>Summary of included studies evaluating other devices.</p>
        </caption>
        <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
          <col width="80"/>
          <col width="180"/>
          <col width="180"/>
          <col width="180"/>
          <col width="190"/>
          <col width="190"/>
          <thead>
            <tr valign="top">
              <td>Study</td>
              <td>Device</td>
              <td>Aim</td>
              <td>Type of study</td>
              <td>Intervention</td>
              <td>Results/Conclusions</td>
            </tr>
          </thead>
          <tbody>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref53">53</xref>]</td>
              <td>Camera with Complementary Metal-Oxide-Semiconductor sensor</td>
              <td>To propose an architecture for a real-time multimodal system to provide a touchless user interface in surgery.</td>
              <td>Prototype user testing.</td>
              <td>Gesture detection in computer-assisted surgery.</td>
              <td>The preliminary results show good usability and rapid learning. The average time to click anywhere on the screen was less than 5 seconds. Lighting conditions affected the performance of the system. The surgeon showed strong interest in the system and satisfactorily assessed the use of gestures within the operating room.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref82">82</xref>]</td>
              <td>Webcam</td>
              <td>To describe a vision-based system that can interpret gestures in real time to manipulate objects within a medical data visualization environment.</td>
              <td>Prototype user testing.</td>
              <td>Manipulation of medical data (radiology images and selection of medical records) and movement of objects and windows on the screen.</td>
              <td>The system implemented in a sterile environment demonstrated performance rates between 95% and 100%.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref27">27</xref>]</td>
              <td>Canon VC-C4 color camera</td>
              <td>To describe a vision-based gesture capture system that interprets gestures in real time to manipulate medical images.</td>
              <td>Beta testing during a surgical procedure. Experiment.</td>
              <td>A beta test of a system prototype was conducted during a live brain biopsy operation, where neurosurgeons were able to browse through MRI<sup>a</sup> images of the patient’s brain using the sterile hand gesture interface.</td>
              <td>Gesture recognition accuracy was 96%. For every repeat of trials, the task completion time decreased by 28% and the learning curve levelled off at the 10th attempt. The gestures were learned very quickly and there was a significant decrease in the number of excess gestures. Rotation accuracy was reasonable. The surgeons rated the system as easy to use, with a rapid response, and useful in the surgical environment.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref26">26</xref>]</td>
              <td>Canon VC-C4 camera</td>
              <td>To evaluate the Gestix system.</td>
              <td>Prototype user testing.</td>
              <td>Manipulation of MRI images during a neurosurgical biopsy.</td>
              <td>The system setup time was 20 min. The surgeons found the Gestix system easy to use, with a rapid response, and easy to learn. The system does not require the use of wearable devices.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref59">59</xref>]</td>
              <td>Interaction with gestures in general</td>
              <td>Fieldwork focusing on work practices and interactions in an angiography suite and on understanding the collaborative work practices in terms of image production and use.</td>
              <td>Ethnographic study of minimally invasive image-guided procedures within an interventional radiology department.</td>
              <td>Manipulation of radiological images.</td>
              <td>The paper discusses the implications of the findings in the work environment for touchless interaction technologies, and suggests that these will be of importance in considering new input techniques in other medical settings.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref115">115</xref>]</td>
              <td>Commercial video camera</td>
              <td>To describe the development of Gestonurse, a robotic system for surgical instruments.</td>
              <td>Proof-of-concept.</td>
              <td>Surgical instrumentation using a robot.</td>
              <td>95% of gestures were recognized correctly. The system was only 0.83 seconds slower when compared with the performance of a human instrument handler.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref65">65</xref>]</td>
              <td>Touchless interaction systems in general</td>
              <td>To understand and use common practices in the surgical setting from a proxemics point of view to uncover implications for the design of touchless interaction systems. The aim is to think of touchlessness in terms of its spatial properties. What does spatial separation imply for the introduction of the touchless control of medical images?</td>
              <td>Ethnographic study.</td>
              <td>Field observations of work practices in neurosurgery.</td>
              <td>Alternative ideas, such as multiple cameras, are the kind of solution that these findings suggest. Such reflections and considerations can be revealed through careful analysis of the spatial organization of activity and proxemics of particular interaction mechanisms. However, it is very important to study current practice in order to speculate about new systems, because they in turn may alter practice.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref122">122</xref>]</td>
              <td>Webcam</td>
              <td>To present a system for tracking the movement of MIS<sup>b</sup> instruments based on an orthogonal webcam system installed in a physical simulator.</td>
              <td>Experiment.</td>
              <td>Recording the movements of the instrument within an imaginary cube.</td>
              <td>The results showed a resolution of 0.616 mm on each axis of work, linearity and repeatability in motion tracking, as well as automatic detection of the 3D position of the tip of the surgical instruments with sufficient accuracy. The system is a low-cost and portable alternative to traditional instrument tracking devices.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref52">52</xref>]</td>
              <td>MK, the LMC<sup>c</sup>, the Myo armband and voice control</td>
              <td>To evaluate the feasibility of using 3 different gesture control sensors (MK, the LMC and the Myo armband) to interact in a sterile manner with preoperative data as well as in settings of an integrated operating room during MIS.</td>
              <td>Pilot user study.</td>
              <td>2 hepatectomies and 2 partial nephrectomies on an experimental porcine model.</td>
              <td>Natural user interfaces are feasible for directly interacting, in a more intuitive and sterile manner, with preoperative images and integrated operating room functionalities during MIS. The combination of the Myo armband and voice commands provided the most intuitive and accurate natural user interface.</td>
            </tr>
            <tr valign="top">
              <td>[<xref ref-type="bibr" rid="ref58">58</xref>]</td>
              <td>The Myo armband and the LMC</td>
              <td>To analyze the value of 2 gesture input modalities (the Myo armband and the LMC) versus 2 clinically established methods (task delegation and joystick control).</td>
              <td>User study. Comparative study.</td>
              <td>Simulating a diagnostic neuroradiological vascular treatment with 2 frequently used interaction tasks in an experimental operating room.</td>
              <td>Novel input modalities have the potential to carry out single tasks more efficiently than clinically established methods.</td>
            </tr>
          </tbody>
        </table>
        <table-wrap-foot>
          <fn id="table3fn1">
            <p><sup>a</sup>MRI: magnetic resonance imaging.</p>
          </fn>
          <fn id="table3fn2">
            <p><sup>b</sup>MIS: minimally invasive surgery.</p>
          </fn>
          <fn id="table3fn3">
            <p><sup>c</sup>LMC: Leap Motion Controller.</p>
          </fn>
        </table-wrap-foot>
      </table-wrap>
      <table-wrap position="float" id="table4">
        <label>Table 4</label>
        <caption>
          <p>Clinical areas and types of surgical intervention in which gesture-based commercial off-the-shelf devices were used.</p>
        </caption>
        <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
          <col width="300"/>
          <col width="600"/>
          <col width="100"/>
          <thead>
            <tr valign="top">
              <td>Clinical areas</td>
              <td>Types of surgical intervention</td>
              <td>Studies</td>
            </tr>
          </thead>
          <tbody>
            <tr valign="top">
              <td>General surgery (N=7)</td>
              <td>Intraoperative image control, image-guided minimally invasive surgery (adrenalectomy, pancreatectomy, liver resection, a Whipple procedure, as well as liver and pancreatic cancer and renal carcinoma resection), open and laparoscopic bile duct surgery, cholecystectomy, and hepatectomy and nephrectomy in an animal model.</td>
              <td>[<xref ref-type="bibr" rid="ref13">13</xref>,<xref ref-type="bibr" rid="ref52">52</xref>-<xref ref-type="bibr" rid="ref57">57</xref>]</td>
            </tr>
            <tr valign="top">
              <td>Interventional radiology and angiography (N=7)</td>
              <td>Arterial dilatation with balloon and umbrella devices, hepatic arterial chemoembolization and selective internal radiation therapy, abdominal computed tomography, and interventional neuroradiology.</td>
              <td>[<xref ref-type="bibr" rid="ref58">58</xref>-<xref ref-type="bibr" rid="ref64">64</xref>]</td>
            </tr>
            <tr valign="top">
              <td>Neurosurgery (N=7)</td>
              <td>Biopsies, resection of brain gliomas, resection of a meningioma, ventriculostomy, and intraoperative image control.</td>
              <td>[<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref65">65</xref>-<xref ref-type="bibr" rid="ref70">70</xref>]</td>
            </tr>
            <tr valign="top">
              <td>Plastic surgery (N=3)</td>
              <td>Measurement of breast implant volumes and measurement of distances on the breast surface.</td>
              <td>[<xref ref-type="bibr" rid="ref71">71</xref>-<xref ref-type="bibr" rid="ref73">73</xref>]</td>
            </tr>
            <tr valign="top">
              <td>Orthopedics (N=3)</td>
              <td>Intraoperative image control.</td>
              <td>[<xref ref-type="bibr" rid="ref55">55</xref>,<xref ref-type="bibr" rid="ref74">74</xref>,<xref ref-type="bibr" rid="ref75">75</xref>]</td>
            </tr>
            <tr valign="top">
              <td>Ear, nose, and throat (N=1)</td>
              <td>Laryngoplasty.</td>
              <td>[<xref ref-type="bibr" rid="ref76">76</xref>]</td>
            </tr>
            <tr valign="top">
              <td>Urology (N=2)</td>
              <td>Enucleation of renal tumors and intraoperative image control.</td>
              <td>[<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref54">54</xref>]</td>
            </tr>
          </tbody>
        </table>
      </table-wrap>
      <table-wrap position="float" id="table5">
        <label>Table 5</label>
        <caption>
          <p>Use of gesture-based commercial off-the-shelf devices in surgery.</p>
        </caption>
        <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
          <col width="30"/>
          <col width="700"/>
          <col width="270"/>
          <thead>
            <tr valign="top">
              <td colspan="2">Use</td>
              <td>Studies</td>
            </tr>
          </thead>
          <tbody>
            <tr valign="top">
              <td colspan="3"><bold>Manipulation of images in interventional radiology environments or in the operating room (N=42)</bold></td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Image manipulation</td>
              <td>[<xref ref-type="bibr" rid="ref5">5</xref>,<xref ref-type="bibr" rid="ref13">13</xref>,<xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref19">19</xref>,<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref27">27</xref>,<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref52">52</xref>,<xref ref-type="bibr" rid="ref54">54</xref>,<xref ref-type="bibr" rid="ref56">56</xref>,<xref ref-type="bibr" rid="ref58">58</xref>-<xref ref-type="bibr" rid="ref64">64</xref>,<xref ref-type="bibr" rid="ref67">67</xref>-<xref ref-type="bibr" rid="ref69">69</xref>,<xref ref-type="bibr" rid="ref74">74</xref>,<xref ref-type="bibr" rid="ref76">76</xref>-<xref ref-type="bibr" rid="ref95">95</xref>]</td>
            </tr>
            <tr valign="top">
              <td colspan="3"><bold>Education and training</bold></td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Virtual or augmented reality for educational or interventional purposes (N=16)</td>
              <td>[<xref ref-type="bibr" rid="ref75">75</xref>,<xref ref-type="bibr" rid="ref94">94</xref>,<xref ref-type="bibr" rid="ref96">96</xref>-<xref ref-type="bibr" rid="ref109">109</xref>]</td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Training in endoscopy (bronchoscopy and colonoscopy; N=3)</td>
              <td>[<xref ref-type="bibr" rid="ref110">110</xref>-<xref ref-type="bibr" rid="ref112">112</xref>]</td>
            </tr>
            <tr valign="top">
              <td colspan="3"><bold>Robotic surgery (N=7)</bold></td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Robotics in surgery and in surgical instrumentation</td>
              <td>[<xref ref-type="bibr" rid="ref113">113</xref>-<xref ref-type="bibr" rid="ref119">119</xref>]</td>
            </tr>
            <tr valign="top">
              <td colspan="3"><bold>Tracking of hand or instrument movements during open or minimally invasive surgery</bold></td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Instrument tracking in MIS<sup>a</sup> (N=7)</td>
              <td>[<xref ref-type="bibr" rid="ref108">108</xref>,<xref ref-type="bibr" rid="ref120">120</xref>-<xref ref-type="bibr" rid="ref125">125</xref>]</td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Tracking of hand movements during MIS (N=2)</td>
              <td>[<xref ref-type="bibr" rid="ref109">109</xref>,<xref ref-type="bibr" rid="ref126">126</xref>]</td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Tracking of hand movements during open surgical knot tying (N=1)</td>
              <td>[<xref ref-type="bibr" rid="ref127">127</xref>]</td>
            </tr>
            <tr valign="top">
              <td colspan="3"><bold>Simulation for skills learning in MIS</bold> <bold>(N=4)</bold></td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Simulation for motor skills learning in MIS</td>
              <td>[<xref ref-type="bibr" rid="ref66">66</xref>,<xref ref-type="bibr" rid="ref108">108</xref>,<xref ref-type="bibr" rid="ref120">120</xref>]</td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Using patient-specific 3-dimensional images during MIS in real patients or simulators, and presurgical warm-up</td>
              <td>[<xref ref-type="bibr" rid="ref52">52</xref>,<xref ref-type="bibr" rid="ref66">66</xref>,<xref ref-type="bibr" rid="ref70">70</xref>,<xref ref-type="bibr" rid="ref108">108</xref>]</td>
            </tr>
            <tr valign="top">
              <td colspan="3"><bold>Other uses</bold></td>
              <td><break/></td>
              <td><break/></td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Ethnographic studies (N=5)</td>
              <td>[<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref65">65</xref>,<xref ref-type="bibr" rid="ref78">78</xref>,<xref ref-type="bibr" rid="ref83">83</xref>,<xref ref-type="bibr" rid="ref114">114</xref>]</td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Measurement of breast implant volumes and measurement of distances on the breast surface (N=3)</td>
              <td>[<xref ref-type="bibr" rid="ref71">71</xref>-<xref ref-type="bibr" rid="ref73">73</xref>]</td>
            </tr>
            <tr valign="top">
              <td><break/></td>
              <td>Manipulation of the operating table and lights (N=4)</td>
              <td>[<xref ref-type="bibr" rid="ref128">128</xref>-<xref ref-type="bibr" rid="ref130">130</xref>]</td>
            </tr>
          </tbody>
        </table>
        <table-wrap-foot>
          <fn id="table5fn1">
            <p><sup>a</sup>MIS: minimally invasive surgery.</p>
          </fn>
        </table-wrap-foot>
      </table-wrap>
      <sec>
        <title>Aims, Types of Study, Metrics, Samples, Results and Conclusions</title>
        <p>In 78% (67/86) of the articles, the aim was to develop, create, present, describe, propose, examine, or explore a COTS-based system for gesture recognition in surgery. Most of the articles [<xref ref-type="bibr" rid="ref65">65</xref>] identified in this systematic review were proof-of-concept or prototype user testing and observational and feasibility testing studies (<xref ref-type="table" rid="table1">Tables 1</xref>-<xref ref-type="table" rid="table3">3</xref>, see <xref ref-type="app" rid="app1">Multimedia Appendices 1</xref>-<xref ref-type="app" rid="app3">3</xref> for the full <xref ref-type="table" rid="table1">Tables 1</xref>-<xref ref-type="table" rid="table3">3</xref>). In the 5 ethnographic studies included, the aim was to identify interactions between the staff and gesture-based COTS systems in interventional radiology departments or in the operating room [<xref ref-type="bibr" rid="ref19">19</xref>,<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref65">65</xref>,<xref ref-type="bibr" rid="ref78">78</xref>,<xref ref-type="bibr" rid="ref114">114</xref>]. In 4 studies, the aim was to compare the performance of MK with that of a mouse [<xref ref-type="bibr" rid="ref5">5</xref>,<xref ref-type="bibr" rid="ref79">79</xref>,<xref ref-type="bibr" rid="ref80">80</xref>,<xref ref-type="bibr" rid="ref96">96</xref>]; in 1 study, it was to compare the performance of the LMC with that of a mouse [<xref ref-type="bibr" rid="ref81">81</xref>]; and in 4 studies, it was to compare different COTS devices [<xref ref-type="bibr" rid="ref52">52</xref>,<xref ref-type="bibr" rid="ref58">58</xref>,<xref ref-type="bibr" rid="ref77">77</xref>,<xref ref-type="bibr" rid="ref113">113</xref>]. In 10 studies, the aim was to evaluate face validity [<xref ref-type="bibr" rid="ref97">97</xref>,<xref ref-type="bibr" rid="ref120">120</xref>], content validity [<xref ref-type="bibr" rid="ref97">97</xref>], construct validity [<xref ref-type="bibr" rid="ref66">66</xref>,<xref ref-type="bibr" rid="ref110">110</xref>,<xref ref-type="bibr" rid="ref111">111</xref>,<xref ref-type="bibr" rid="ref120">120</xref>,<xref ref-type="bibr" rid="ref121">121</xref>,<xref ref-type="bibr" rid="ref126">126</xref>,<xref ref-type="bibr" rid="ref127">127</xref>,<xref ref-type="bibr" rid="ref132">132</xref>], or concurrent validity of the devices [<xref ref-type="bibr" rid="ref66">66</xref>,<xref ref-type="bibr" rid="ref71">71</xref>,<xref ref-type="bibr" rid="ref121">121</xref>,<xref ref-type="bibr" rid="ref126">126</xref>]. A total of 7 studies involved experiments [<xref ref-type="bibr" rid="ref19">19</xref>,<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref113">113</xref>,<xref ref-type="bibr" rid="ref115">115</xref>,<xref ref-type="bibr" rid="ref122">122</xref>,<xref ref-type="bibr" rid="ref123">123</xref>,<xref ref-type="bibr" rid="ref131">131</xref>] and there was 1 patent application for an LMC-based application [<xref ref-type="bibr" rid="ref124">124</xref>] and 1 interrater reliability study [<xref ref-type="bibr" rid="ref72">72</xref>]. In addition, 1 study was a quasi-experimental prospective, blinded study with test-retest reliability [<xref ref-type="bibr" rid="ref121">121</xref>]. Only 2 randomized controlled trials were identified [<xref ref-type="bibr" rid="ref80">80</xref>,<xref ref-type="bibr" rid="ref98">98</xref>], and when a tool for assessing risk of bias in randomized trials [<xref ref-type="bibr" rid="ref133">133</xref>] was applied to them, it was found to be low in both.</p>
        <p>In total, 25 out of 86 (29%) articles failed to describe the metric used, whereas 23 out of 86 (27%) used time as the main one. Given the varied nature of the design of the studies, the remaining 38 articles described multiple metrics such as performance rates, percentage of gesture recognition, accuracy of gesture recognition and/or speed of transmission thereof, measures of volume or distance, and questionnaires or interviews. Similarly, the sample types and numbers were very dissimilar: 17.4% of the articles did not describe the sample type, and the remainder stated that the samples comprised medical or veterinary students or specialists in several radiological or surgical specialties (<xref ref-type="table" rid="table4">Table 4</xref>).</p>
      </sec>
      <sec>
        <title>Interventions</title>
        <p>The most common intervention (42 studies) was image manipulation in general radiology, ultrasound imaging, interventional radiology, angiography, computed tomography, magnetic resonance imaging, and real-time elastography (in the operating room, in the operative dentistry setting, or in the interventional radiology suites; <xref ref-type="table" rid="table1">Tables 1</xref>-<xref ref-type="table" rid="table3">3</xref>; see <xref ref-type="app" rid="app1">Multimedia Appendices 1</xref>-<xref ref-type="app" rid="app3">3</xref> for the full <xref ref-type="table" rid="table1">Tables 1</xref>-<xref ref-type="table" rid="table3">3</xref>). <xref ref-type="table" rid="table5">Table 5</xref> shows other uses identified for gesture-based COTS devices in surgical environments.</p>
      </sec>
      <sec>
        <title>Use of Commercial Off-The-Shelf Devices as Simulation Tools for Motor Skills Teaching in Minimally Invasive Surgery</title>
        <p>In the field of skills learning in MIS, in 2013, Pérez et al first described the tracking of laparoscopic instruments using webcams, with encouraging results [<xref ref-type="bibr" rid="ref122">122</xref>]. From 2016, several authors proposed the interesting possibility of using COTS devices for tracking laparoscopic instruments. Such devices include both the LMC [<xref ref-type="bibr" rid="ref108">108</xref>,<xref ref-type="bibr" rid="ref121">121</xref>,<xref ref-type="bibr" rid="ref123">123</xref>,<xref ref-type="bibr" rid="ref124">124</xref>] and MK [<xref ref-type="bibr" rid="ref125">125</xref>]. In 2017, a portable low-cost simulator using the LMC [<xref ref-type="bibr" rid="ref120">120</xref>] for basic motor skills learning in MIS was described, and so too were a simulator for endoscopic third ventriculostomy learning [<xref ref-type="bibr" rid="ref66">66</xref>] and a head-mounted display system using Oculus Rift and the LMC to guide neuroendoscopic surgery by manipulating 3D images [<xref ref-type="bibr" rid="ref70">70</xref>]. Others used the approach of tracking hand movements during MIS training [<xref ref-type="bibr" rid="ref109">109</xref>,<xref ref-type="bibr" rid="ref126">126</xref>]. Only 1 study explored the use of the LMC to assess surgical dexterity in tying surgical knots in open surgery [<xref ref-type="bibr" rid="ref127">127</xref>].</p>
        <p>Furthermore, 1 study compared 3 natural user interfaces (MK, the LMC, and the Myo armband) in combination with voice control to perform 2 hepatectomies and 2 partial nephrectomies on an experimental porcine model [<xref ref-type="bibr" rid="ref52">52</xref>]; similar to the studies by Wright [<xref ref-type="bibr" rid="ref66">66</xref>] and Xu [<xref ref-type="bibr" rid="ref70">70</xref>], this study used 3D reconstructions of preoperative images of the patient, which were manipulated by gestures during surgery. However, the application of gesture control technology in these cases is not for training purposes but for surgical assistance and planification.</p>
      </sec>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <sec>
        <title>Principal Findings</title>
        <p>Using commercial devices to detect manual gestures in surgery is a very topical issue, given the need to manipulate medical images and for real-time 3D reconstructions during procedures without breaking asepsis and antisepsis protocols. Early studies published on this possibility used COTS systems with webcams, Complementary Metal-Oxide-Semiconductor-sensor cameras, and commercial digital cameras [<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref27">27</xref>,<xref ref-type="bibr" rid="ref53">53</xref>,<xref ref-type="bibr" rid="ref82">82</xref>]. These pioneering studies showed that contactless interaction with images and medical information in environments such as operating rooms was possible using low-cost devices.</p>
        <p>In this systematic review, MK and the LMC were identified as the most widely used COTS systems. MK was rated as a useful tool for the manipulation of medical data in sterile environments, with a positive rate of acceptance in 85% (39/46) of the studies on it. The LMC had a positive rate of acceptance in 83% (29/35) of the studies on it. The Myo armband was used to manipulate interventional neuroradiology images [<xref ref-type="bibr" rid="ref58">58</xref>]. In addition, in a comparative study of the Myo armband, MK, and the LMC, they were used to manipulate images while hepatectomies and partial nephrectomies were being performed on an animal model [<xref ref-type="bibr" rid="ref52">52</xref>]. In both cases, the device was rated highly. The main positive characteristics identified for the devices were the following: there was no need for contact; they were low-cost and portable; there was no need for calibration at the time of use; the gesture learning curve was easy; and the gesture recognition rates were high.</p>
      </sec>
      <sec>
        <title>Performance of Individual Devices</title>
        <p>MK [<xref ref-type="bibr" rid="ref30">30</xref>] and the LMC [<xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref81">81</xref>,<xref ref-type="bibr" rid="ref87">87</xref>,<xref ref-type="bibr" rid="ref134">134</xref>,<xref ref-type="bibr" rid="ref135">135</xref>] both use infrared cameras. The MK system is based on the time-of-flight principle [<xref ref-type="bibr" rid="ref61">61</xref>], whereas the LMC is based on a sensor for infrared optical tracking with stereo vision accuracy. The MK depth sensor works at a distance between 0.8 m and 3.5 m, and the interface tracks the skeleton of the system operator. The wide range of distances at which the device recognizes gestures presents problems when using it in close interaction. The LMC detects the positions of fine objects such as finger tips or pen tips in a Cartesian plane. Its interaction zone is an inverted cone of approximately 0.23 m³ and the motion detection range fluctuates between 20 mm and 600 mm [<xref ref-type="bibr" rid="ref91">91</xref>,<xref ref-type="bibr" rid="ref129">129</xref>]. The manufacturer reports an accuracy of 0.01 mm for fingertip detection, although 1 study showed an accuracy of 0.7 mm, which is considered superior to that achieved using MK [<xref ref-type="bibr" rid="ref134">134</xref>,<xref ref-type="bibr" rid="ref136">136</xref>]. The dimensions of the MK device are 280 mm (width), 71 mm (depth), and 66 mm (height) and its weight is 556 g, whereas those of the LMC are 76 mm (width), 30 mm (depth), and 13 mm (height) and its weight is 45 g.</p>
        <p>Only 5 of the 46 (11%) studies that evaluated MK identified disadvantages relating to a longer latency time, difficulty in recreating an image when compared with a keyboard or mouse [<xref ref-type="bibr" rid="ref5">5</xref>], limited gesture recognition, interference between the movements of different people in small environments [<xref ref-type="bibr" rid="ref85">85</xref>,<xref ref-type="bibr" rid="ref89">89</xref>,<xref ref-type="bibr" rid="ref130">130</xref>], and the users’ preference for a mouse in a comparative study [<xref ref-type="bibr" rid="ref96">96</xref>]. Various studies have highlighted the inaccuracy of MK in detecting finger movements [<xref ref-type="bibr" rid="ref5">5</xref>,<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref85">85</xref>,<xref ref-type="bibr" rid="ref137">137</xref>], and the system also requires the use of large format screens [<xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref24">24</xref>,<xref ref-type="bibr" rid="ref54">54</xref>,<xref ref-type="bibr" rid="ref85">85</xref>,<xref ref-type="bibr" rid="ref90">90</xref>]. The system was taken off the market in October 2017.</p>
        <p>With regard to the LMC, once the 6 studies on robotics had been discarded, 4 articles were identified that presented limitations derived from using the device (18%). These studies noted alterations in performance when there was dirt on the surface of the device, as well as the limited number of gestures recognized owing to the occlusion phenomenon [<xref ref-type="bibr" rid="ref87">87</xref>], alterations caused by ambient lighting [<xref ref-type="bibr" rid="ref129">129</xref>], fatigue in some users [<xref ref-type="bibr" rid="ref90">90</xref>], and a lack of studies validating the device for medical use [<xref ref-type="bibr" rid="ref77">77</xref>].</p>
        <p>The Myo armband was launched in 2013. This wearable wireless device is able to record electromyography via 8 stainless steel dry surface electrodes. It has a 9-axis inertial measurement unit sensor, haptic feedback, and Bluetooth communication capability. The main disadvantage is its limited sampling frequency of 200 Hz [<xref ref-type="bibr" rid="ref138">138</xref>-<xref ref-type="bibr" rid="ref140">140</xref>]. In total, 2 studies on the Myo armband were identified. The first concluded that the combination of the Myo armband and voice commands provided the most intuitive and accurate natural user interface [<xref ref-type="bibr" rid="ref141">141</xref>]. The second compared the Myo armband and LMC with traditional image manipulation methods in surgery and concluded that the new input modalities had the potential to become more efficient [<xref ref-type="bibr" rid="ref58">58</xref>].</p>
      </sec>
      <sec>
        <title>Commercial Off-The-Shelf Devices in Robotic Surgery</title>
        <p>Studies on the application of gesture-based COTS devices in robot-assisted surgery failed to demonstrate usefulness, owing to either the high cost of the robotic arm when using commercial cameras in surgical instrumentation [<xref ref-type="bibr" rid="ref115">115</xref>] or, in the case of the LMC, the need for a more robust Application Programming Interface [<xref ref-type="bibr" rid="ref116">116</xref>,<xref ref-type="bibr" rid="ref117">117</xref>] and the lack of sufficient accuracy and robustness for manipulating a medical robot [<xref ref-type="bibr" rid="ref113">113</xref>]. However, an ethnographic study found that MK was useful for workflow monitoring and for avoiding collisions between medical robots and operating room staff [<xref ref-type="bibr" rid="ref114">114</xref>]. A simulation study of endonasal pituitary surgery comparing the LMC with the Phantom Omni showed that surgeons achieved a very similar percentage of tumor mass resection and procedure duration using the LMC to control the robot [<xref ref-type="bibr" rid="ref118">118</xref>]. Another study found that the robotic tools could be controlled by gestures for training purposes but that the level of control had yet to reach that of a contact-based robotic controller [<xref ref-type="bibr" rid="ref119">119</xref>].</p>
      </sec>
      <sec>
        <title>Commercial Off-The-Shelf Devices in Training and Simulation</title>
        <p>Studies on the use of COTS devices for gesture-based interfaces using the hand in the field of education in surgery refer to the use of virtual reality and augmented reality for teaching anatomy or for living the immersive experience within a virtual operating room. A total of 3 studies explored the possibility of using MK as a tool for skills learning in bronchoscopy and colonoscopy by means of simulation [<xref ref-type="bibr" rid="ref110">110</xref>-<xref ref-type="bibr" rid="ref112">112</xref>].</p>
        <p>Various authors explored the possibility of hand tracking [<xref ref-type="bibr" rid="ref109">109</xref>,<xref ref-type="bibr" rid="ref126">126</xref>] or instrument tracking [<xref ref-type="bibr" rid="ref108">108</xref>,<xref ref-type="bibr" rid="ref121">121</xref>-<xref ref-type="bibr" rid="ref125">125</xref>] using COTS devices to assess performance in MIS training. From these 2 approaches, Lahanas [<xref ref-type="bibr" rid="ref120">120</xref>] eventually presented a portable low-cost model of a virtual reality simulator for basic motor skills learning in MIS, which was based on the LMC and capable of tracking instruments. The author also presented face and contrast validity studies. The original forceps tracking problems noted by the author were probably because of the fact that they were black. Problems caused by this color were also described in the study by Oropesa. This issue had already been raised by our group [<xref ref-type="bibr" rid="ref108">108</xref>].</p>
        <p>In the field of simulation for robotic surgery learning, the first studies published [<xref ref-type="bibr" rid="ref113">113</xref>,<xref ref-type="bibr" rid="ref115">115</xref>-<xref ref-type="bibr" rid="ref117">117</xref>] found that the interfaces did not allow robots to be manipulated by gestures. However, the most recent publications [<xref ref-type="bibr" rid="ref118">118</xref>,<xref ref-type="bibr" rid="ref119">119</xref>] have suggested that the LMC could be a low-cost solution for creating control interfaces for surgical robots for the purposes of performing operations or training by means of simulation.</p>
      </sec>
      <sec>
        <title>Ethnographic Studies</title>
        <p>Ethnographic studies [<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref65">65</xref>,<xref ref-type="bibr" rid="ref78">78</xref>,<xref ref-type="bibr" rid="ref83">83</xref>,<xref ref-type="bibr" rid="ref114">114</xref>] deserve a separate mention as they transcend proofs-of-concept and user and prototype testing and approach gesture-based touchless interaction from a holistic viewpoint that includes the social practices of surgery, as well as the way in which medical images and manipulation devices are embedded and made meaningful within the collaborative practices of the surgery [<xref ref-type="bibr" rid="ref10">10</xref>].</p>
      </sec>
      <sec>
        <title>Requirements for the Future</title>
        <p>There was found to be a shortage of objective validation studies (face validity: 1 study; concurrent validity: 3 studies; construct validity: 3 studies; discriminant validity: none; and predictive validity: none) of the different applications developed and presented as prototypes or proofs-of-concept for use in the clinical or teaching field. In teaching, the field of hand gesture–based interfaces should prioritize the following research objectives: first, to transcend studies on technical feasibility and individual hand gesture–based interaction with medical images so as to tackle the issue systematically within a framework of collaborative discussion, as happens in real surgical environments; and second, to conduct experimental studies in simulated surgical environments that allow hand gestures to be validated as a useful tool for touchless interaction in real operating rooms. To that end, the language of hand gestures for medical use would have to be standardized, so that the surgeons’ cognitive load can be reduced. In turn, algorithms should be developed to allow differentiation between intentional and unintentional gestures (spotting) in the small spaces of the operating room. Finally, the problem of temporal segmentation ambiguity (how to define the gesture start and end points) and that of spatial-temporal variability (gestures can vary significantly from one individual to another) must be resolved.</p>
        <p>From the range of evidence found, it is possible to infer that, with regard to the use of COTS devices, there is a very interesting field of study for the development and objective validation (contrast, concurrent, discriminant, and predictive validities) of portable low-cost virtual reality simulators for motor skills learning in MIS and robotic surgery. Such simulators will enable surgeons to do presurgical warm-ups anywhere at any time based on 3D reconstructions of specific patients’ images [<xref ref-type="bibr" rid="ref52">52</xref>,<xref ref-type="bibr" rid="ref66">66</xref>,<xref ref-type="bibr" rid="ref70">70</xref>,<xref ref-type="bibr" rid="ref108">108</xref>]. Thus, surgeons will be able to practice the surgery the night before they are due to perform it from the comfort of their own homes.</p>
        <p>Despite the fact that MK was taken off the market in 2017 and that the LMC software only allows tool tracking up to V2 Tracking, the use of interaction with gesture-based virtual environments in the field of simulation identified in this review will enable new COTS devices (ie, the Myo armband) to be explored for skills learning in MIS and robotic surgery.</p>
      </sec>
      <sec>
        <title>Limitations</title>
        <p>A number of potential methodological limitations in our systematic review should be discussed. First, our inclusion criteria were limited to English-language publications. Second, although we used the most commonly used search engines in the health field (PubMed, EMBASE, ScienceDirect, Espacenet, OpenGrey, and IEEE) and complemented that by using the snowballing technique to identify relevant articles in the results generated by our search, we may have missed a few articles related to our research question. Finally, there may have been some potential for subjectivity in analyzing the findings, although 2 authors carefully reviewed each study independently and then discussed the results while double-checking each process and subsequently resolved any discrepancies through discussions with the third author whenever necessary.</p>
      </sec>
      <sec>
        <title>Conclusions</title>
        <p>As most of the articles identified in this systematic review are proof-of-concept or prototype user testing and feasibility testing studies, we can conclude that the field is still in the exploratory phase in areas requiring touchless manipulation within environments and settings that must adhere to asepsis and antisepsis protocols, such as angiography suites and operating rooms.</p>
        <p>Without doubt, COTS devices applied to hand and instrument gesture–based interfaces in the field of simulation for skills learning and training in MIS could open up a promising field to achieve ubiquitous training and presurgical warm-up.</p>
        <p>The withdrawal of MK from the market and suspension of the instrument tracking function in the latest LMC software versions constitute threats to the new developments identified in this review. Nevertheless, gesture-based interaction devices are clearly useful for manipulating images in interventional radiology environments or the operating room and for the development of virtual reality simulators for skills training in MIS and robotic surgery.</p>
      </sec>
    </sec>
  </body>
  <back>
    <app-group>
      <app id="app1">
        <title>Multimedia Appendix 1</title>
        <p>Summary of included studies evaluating Microsoft Kinect.</p>
        <media xlink:href="jmir_v21i5e11925_app1.pdf" xlink:title="PDF File (Adobe PDF File), 176KB"/>
      </app>
      <app id="app2">
        <title>Multimedia Appendix 2</title>
        <p>Summary of included studies evaluating the Leap Motion Controller.</p>
        <media xlink:href="jmir_v21i5e11925_app2.pdf" xlink:title="PDF File (Adobe PDF File), 132KB"/>
      </app>
      <app id="app3">
        <title>Multimedia Appendix 3</title>
        <p>Summary of included studies evaluating other devices.</p>
        <media xlink:href="jmir_v21i5e11925_app3.pdf" xlink:title="PDF File (Adobe PDF File), 66KB"/>
      </app>
    </app-group>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">3D</term>
          <def>
            <p>3-dimensional</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">COTS</term>
          <def>
            <p>commercial off-the-shelf</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">EMBASE</term>
          <def>
            <p>Excerpta Medica dataBASE</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb4">IEEE</term>
          <def>
            <p>Institute of Electrical and Electronics Engineers</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb5">LMC</term>
          <def>
            <p>Leap Motion Controller</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb6">MeSH</term>
          <def>
            <p>Medical Subject Headings</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb7">MIS</term>
          <def>
            <p>minimally invasive surgery</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb8">MK</term>
          <def>
            <p>Microsoft Kinect</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <fn-group>
      <fn fn-type="con">
        <p>All the authors contributed substantially to the study conception and design, data analysis and interpretation of the findings, and manuscript drafting. FAL participated in the collection and assembly of data. FSR is the guarantor of the paper. All the authors have read, revised, and approved the final manuscript.</p>
      </fn>
      <fn fn-type="conflict">
        <p>None declared.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Bures</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Fishbain</surname>
            <given-names>JT</given-names>
          </name>
          <name name-style="western">
            <surname>Uyehara</surname>
            <given-names>CF</given-names>
          </name>
          <name name-style="western">
            <surname>Parker</surname>
            <given-names>JM</given-names>
          </name>
          <name name-style="western">
            <surname>Berg</surname>
            <given-names>BW</given-names>
          </name>
        </person-group>
        <article-title>Computer keyboards and faucet handles as reservoirs of nosocomial pathogens in the intensive care unit</article-title>
        <source>Am J Infect Control</source>  
        <year>2000</year>  
        <month>12</month>  
        <volume>28</volume>  
        <issue>6</issue>  
        <fpage>465</fpage>  
        <lpage>71</lpage>  
        <pub-id pub-id-type="doi">10.1067/mic.2000.107267</pub-id>
        <pub-id pub-id-type="medline">11114617</pub-id>
        <pub-id pub-id-type="pii">S0196-6553(00)90655-2</pub-id></nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Schultz</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Gill</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Zubairi</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Huber</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Gordin</surname>
            <given-names>F</given-names>
          </name>
        </person-group>
        <article-title>Bacterial contamination of computer keyboards in a teaching hospital</article-title>
        <source>Infect Control Hosp Epidemiol</source>  
        <year>2003</year>  
        <month>04</month>  
        <volume>24</volume>  
        <issue>4</issue>  
        <fpage>302</fpage>  
        <lpage>3</lpage>  
        <pub-id pub-id-type="doi">10.1086/502200</pub-id>
        <pub-id pub-id-type="medline">12725363</pub-id>
        <pub-id pub-id-type="pii">ICHE5209</pub-id></nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Hartmann</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Benson</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Junger</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Quinzio</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Röhrig</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Fengler</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Färber</surname>
            <given-names>UW</given-names>
          </name>
          <name name-style="western">
            <surname>Wille</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Hempelmann</surname>
            <given-names>G</given-names>
          </name>
        </person-group>
        <article-title>Computer keyboard and mouse as a reservoir of pathogens in an intensive care unit</article-title>
        <source>J Clin Monit Comput</source>  
        <year>2004</year>  
        <month>02</month>  
        <volume>18</volume>  
        <issue>1</issue>  
        <fpage>7</fpage>  
        <lpage>12</lpage>  
        <pub-id pub-id-type="doi">10.1023/B:JOCM.0000025279.27084.39</pub-id>
        <pub-id pub-id-type="medline">15139578</pub-id></nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Lu</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Siu</surname>
            <given-names>LK</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Ma</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Chiang</surname>
            <given-names>W</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Lin</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>T</given-names>
          </name>
        </person-group>
        <article-title>Methicillin-resistant Staphylococcus aureus and Acinetobacter baumannii on computer interface surfaces of hospital wards and association with clinical isolates</article-title>
        <source>BMC Infect Dis</source>  
        <year>2009</year>  
        <month>10</month>  
        <day>1</day>  
        <volume>9</volume>  
        <fpage>164</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://bmcinfectdis.biomedcentral.com/articles/10.1186/1471-2334-9-164"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1186/1471-2334-9-164</pub-id>
        <pub-id pub-id-type="medline">19796381</pub-id>
        <pub-id pub-id-type="pii">1471-2334-9-164</pub-id>
        <pub-id pub-id-type="pmcid">PMC2765444</pub-id></nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ebert</surname>
            <given-names>LC</given-names>
          </name>
          <name name-style="western">
            <surname>Hatch</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Ampanozi</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Thali</surname>
            <given-names>MJ</given-names>
          </name>
          <name name-style="western">
            <surname>Ross</surname>
            <given-names>S</given-names>
          </name>
        </person-group>
        <article-title>You can't touch this: touch-free navigation through radiological images</article-title>
        <source>Surg Innov</source>  
        <year>2012</year>  
        <month>09</month>  
        <volume>19</volume>  
        <issue>3</issue>  
        <fpage>301</fpage>  
        <lpage>7</lpage>  
        <pub-id pub-id-type="doi">10.1177/1553350611425508</pub-id>
        <pub-id pub-id-type="medline">22064490</pub-id>
        <pub-id pub-id-type="pii">1553350611425508</pub-id></nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>D'Antonio</surname>
            <given-names>NN</given-names>
          </name>
          <name name-style="western">
            <surname>Rihs</surname>
            <given-names>JD</given-names>
          </name>
          <name name-style="western">
            <surname>Stout</surname>
            <given-names>JE</given-names>
          </name>
          <name name-style="western">
            <surname>Yu</surname>
            <given-names>VL</given-names>
          </name>
        </person-group>
        <article-title>Computer keyboard covers impregnated with a novel antimicrobial polymer significantly reduce microbial contamination</article-title>
        <source>Am J Infect Control</source>  
        <year>2013</year>  
        <month>04</month>  
        <volume>41</volume>  
        <issue>4</issue>  
        <fpage>337</fpage>  
        <lpage>9</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.ajic.2012.03.030</pub-id>
        <pub-id pub-id-type="medline">23036480</pub-id>
        <pub-id pub-id-type="pii">S0196-6553(12)00792-4</pub-id></nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ionescu</surname>
            <given-names>AV</given-names>
          </name>
        </person-group>
        <article-title>A mouse in the OR Ambidextrous</article-title>
        <source>Stanford Univ Journal of Design</source>  
        <year>2006</year>  
        <volume>30</volume>  
        <fpage>2</fpage>
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://triody.com/wp-content/uploads/2015/02/MouseInOr_AmbidextrousMag.pdf"/>
        </comment> </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>van Veelen</surname>
            <given-names>MA</given-names>
          </name>
          <name name-style="western">
            <surname>Snijders</surname>
            <given-names>CJ</given-names>
          </name>
          <name name-style="western">
            <surname>van Leeuwen</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Goossens</surname>
            <given-names>RH</given-names>
          </name>
          <name name-style="western">
            <surname>Kazemier</surname>
            <given-names>G</given-names>
          </name>
        </person-group>
        <article-title>Improvement of foot pedals used during surgery based on new ergonomic guidelines</article-title>
        <source>Surg Endosc</source>  
        <year>2003</year>  
        <month>07</month>  
        <volume>17</volume>  
        <issue>7</issue>  
        <fpage>1086</fpage>  
        <lpage>91</lpage>  
        <pub-id pub-id-type="doi">10.1007/s00464-002-9185-z</pub-id>
        <pub-id pub-id-type="medline">12728372</pub-id></nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Grätzel</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Fong</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Grange</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Baur</surname>
            <given-names>C</given-names>
          </name>
        </person-group>
        <article-title>A non-contact mouse for surgeon-computer interaction</article-title>
        <source>Technol Health Care</source>  
        <year>2004</year>  
        <volume>12</volume>  
        <issue>3</issue>  
        <fpage>245</fpage>  
        <lpage>57</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://core.ac.uk/download/pdf/147896869.pdf"/>
        </comment>  
        <pub-id pub-id-type="medline">15328453</pub-id></nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>O'Hara</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Dastur</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Carrell</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Gonzalez</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Sellen</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Penney</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Varnavas</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Mentis</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Criminisi</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Corish</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Rouncefield</surname>
            <given-names>M</given-names>
          </name>
        </person-group>
        <article-title>Touchless interaction in surgery</article-title>
        <source>Commun ACM</source>  
        <year>2014</year>  
        <month>01</month>  
        <day>1</day>  
        <volume>57</volume>  
        <issue>1</issue>  
        <fpage>70</fpage>  
        <lpage>7</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://www.researchgate.net/publication/262219695_Touchless_Interaction_in_Surgery"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1145/2541883.2541899</pub-id></nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>El-Shallaly</surname>
            <given-names>GE</given-names>
          </name>
          <name name-style="western">
            <surname>Mohammed</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Muhtaseb</surname>
            <given-names>MS</given-names>
          </name>
          <name name-style="western">
            <surname>Hamouda</surname>
            <given-names>AH</given-names>
          </name>
          <name name-style="western">
            <surname>Nassar</surname>
            <given-names>AH</given-names>
          </name>
        </person-group>
        <article-title>Voice recognition interfaces (VRI) optimize the utilization of theatre staff and time during laparoscopic cholecystectomy</article-title>
        <source>Minim Invasive Ther Allied Technol</source>  
        <year>2005</year>  
        <volume>14</volume>  
        <issue>6</issue>  
        <fpage>369</fpage>  
        <lpage>71</lpage>  
        <pub-id pub-id-type="doi">10.1080/13645700500381685</pub-id>
        <pub-id pub-id-type="medline">16754183</pub-id>
        <pub-id pub-id-type="pii">R5J7592038526855</pub-id></nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Nathan</surname>
            <given-names>CO</given-names>
          </name>
          <name name-style="western">
            <surname>Chakradeo</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Malhotra</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>D'Agostino</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Patwardhan</surname>
            <given-names>R</given-names>
          </name>
        </person-group>
        <article-title>The voice-controlled robotic assist scope holder AESOP for the endoscopic approach to the sella</article-title>
        <source>Skull Base</source>  
        <year>2006</year>  
        <month>08</month>  
        <volume>16</volume>  
        <issue>3</issue>  
        <fpage>123</fpage>  
        <lpage>31</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/17268585"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1055/s-2006-939679</pub-id>
        <pub-id pub-id-type="medline">17268585</pub-id>
        <pub-id pub-id-type="pmcid">PMC1586176</pub-id></nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Strickland</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Tremaine</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Brigley</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Law</surname>
            <given-names>C</given-names>
          </name>
        </person-group>
        <article-title>Using a depth-sensing infrared camera system to access and manipulate medical imaging from within the sterile operating field</article-title>
        <source>Can J Surg</source>  
        <year>2013</year>  
        <month>06</month>  
        <volume>56</volume>  
        <issue>3</issue>  
        <fpage>E1</fpage>  
        <lpage>6</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://www.canjsurg.ca/vol56-issue3/56-3-E1/"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1503/cjs.035311</pub-id>
        <pub-id pub-id-type="medline">23706851</pub-id>
        <pub-id pub-id-type="pii">10.1503/cjs.035311</pub-id>
        <pub-id pub-id-type="pmcid">PMC3672422</pub-id></nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Rosa</surname>
            <given-names>GM</given-names>
          </name>
          <name name-style="western">
            <surname>Elizondo</surname>
            <given-names>ML</given-names>
          </name>
        </person-group>
        <article-title>Use of a gesture user interface as a touchless image navigation system in dental surgery: case series report</article-title>
        <source>Imaging Sci Dent</source>  
        <year>2014</year>  
        <month>06</month>  
        <volume>44</volume>  
        <issue>2</issue>  
        <fpage>155</fpage>  
        <lpage>60</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://isdent.org/DOIx.php?id=10.5624/isd.2014.44.2.155"/>
        </comment>  
        <pub-id pub-id-type="doi">10.5624/isd.2014.44.2.155</pub-id>
        <pub-id pub-id-type="medline">24944966</pub-id>
        <pub-id pub-id-type="pmcid">PMC4061300</pub-id></nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="web">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Wachs</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <source>Purdue Equestrain Team</source>  
        <year>2007</year>  
        <access-date>2019-04-01</access-date>
        <comment>Optimal Hand-Gesture Vocabulary Design Methodology for Virtual Robotic Control 
        <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://web.ics.purdue.edu/~jpwachs/papers/PHD_JUAN_JW.pdf">https://web.ics.purdue.edu/~jpwachs/papers/PHD_JUAN_JW.pdf</ext-link>
        <ext-link ext-link-type="webcite" xlink:href="77JJfnGoX"/></comment> </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Yanagihara</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Hiromitsu</surname>
            <given-names>H</given-names>
          </name>
        </person-group>
        <article-title>System for selecting and generating images controlled by eye movements applicable to CT image display</article-title>
        <source>Med Imaging Technol</source>  
        <year>2000</year>  
        <volume>18</volume>  
        <fpage>725</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.11409/mit.18.725"/>
        </comment>  
        <pub-id pub-id-type="doi">10.11409/mit.18.725</pub-id></nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Gallo</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Placitelli</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Ciampi</surname>
            <given-names>M</given-names>
          </name>
        </person-group>
        <article-title>Controller-free exploration of medical image data: experiencing the Kinect</article-title>
        <source>Proceedings of the 2011 24th International Symposium on Computer-Based Medical Systems</source>  
        <year>2011</year>  
        <month>06</month>  
        <day>27</day>  
        <conf-name>CMBS'11</conf-name>
        <conf-date>June 27-30, 2011</conf-date>
        <conf-loc>Bristol, UK</conf-loc>
        <fpage>1</fpage>  
        <lpage>6</lpage>  
        <pub-id pub-id-type="doi">10.1109/CBMS.2011.5999138</pub-id></nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Coddington</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Xu</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Sridharan</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Rege</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Bailey</surname>
            <given-names>R</given-names>
          </name>
        </person-group>
        <article-title>Gaze-based image retrieval system using dual eye-trackers</article-title>
        <year>2012</year>  
        <month>01</month>  
        <day>12</day>  
        <conf-name>2012 IEEE International Conference on Emerging Signal Processing Applications</conf-name>
        <conf-date>January 12-14, 2012</conf-date>
        <conf-loc>Las Vegas, NV, USA</conf-loc>
        <fpage>37</fpage>  
        <pub-id pub-id-type="doi">10.1109/ESPA.2012.6152440</pub-id></nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Jacob</surname>
            <given-names>MG</given-names>
          </name>
          <name name-style="western">
            <surname>Wachs</surname>
            <given-names>JP</given-names>
          </name>
          <name name-style="western">
            <surname>Packer</surname>
            <given-names>RA</given-names>
          </name>
        </person-group>
        <article-title>Hand-gesture-based sterile interface for the operating room using contextual cues for the navigation of radiological images</article-title>
        <source>J Am Med Inform Assoc</source>  
        <year>2013</year>  
        <month>06</month>  
        <volume>20</volume>  
        <issue>e1</issue>  
        <fpage>e183</fpage>  
        <lpage>6</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/23250787"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1136/amiajnl-2012-001212</pub-id>
        <pub-id pub-id-type="medline">23250787</pub-id>
        <pub-id pub-id-type="pii">amiajnl-2012-001212</pub-id>
        <pub-id pub-id-type="pmcid">PMC3715344</pub-id></nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Tani</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Maia</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>von Wangenheim</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>A Gesture Interface for Radiological Workstations</article-title>
        <source>Twentieth IEEE International Symposium on Computer-Based Medical Systems</source>  
        <year>2007</year>  
        <month>06</month>  
        <day>20</day>  
        <conf-name>CMBS'07</conf-name>
        <conf-date>June 20-22, 2007</conf-date>
        <conf-loc>Maribor, Slovenia</conf-loc>
        <publisher-loc>Maribor</publisher-loc>
        <publisher-name>IEEE</publisher-name>
        <fpage>07</fpage>  
        <pub-id pub-id-type="doi">10.1109/CBMS.2007.6</pub-id></nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Zudilova-Seinstra</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>de Koning</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Suinesiaputra</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>van Schooten</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>van der Geest</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Reiber</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Sloot</surname>
            <given-names>P</given-names>
          </name>
        </person-group>
        <article-title>Evaluation of 2D and 3D glove input applied to medical image analysis</article-title>
        <source>Int J Hum Comput Stud</source>  
        <year>2010</year>  
        <month>6</month>  
        <volume>68</volume>  
        <issue>6</issue>  
        <fpage>355</fpage>  
        <lpage>69</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1016/j.ijhcs.2009.08.001"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1016/j.ijhcs.2009.08.001</pub-id></nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="book">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Kirmizibayrak</surname>
            <given-names>C</given-names>
          </name>
        </person-group>
        <source>Interactive Volume Visualization and Editing Methods for Surgical Applications</source>  
        <year>2001</year>  
        <publisher-loc>Washington, DC</publisher-loc>
        <publisher-name>George Washington University</publisher-name></nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Bigdelou</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Schwarz</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Navab</surname>
            <given-names>N</given-names>
          </name>
        </person-group>
        <article-title>An adaptive solution for intra-operative gesture-based human-machine interaction</article-title>
        <source>Proceedings of the 2012 ACM international conference on Intelligent User Interfaces</source>  
        <year>2012</year>  
        <conf-name>IUI'12</conf-name>
        <conf-date>February 14-17, 2012</conf-date>
        <conf-loc>Lisbon, Portugal</conf-loc>
        <publisher-loc>New York, NY, USA</publisher-loc>
        <publisher-name>ACM</publisher-name>
        <fpage>75</fpage>  
        <lpage>84</lpage>  
        <pub-id pub-id-type="doi">10.1145/2166966.2166981</pub-id></nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ren</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>O'Neill</surname>
            <given-names>E</given-names>
          </name>
        </person-group>
        <article-title>3D selection with freehand gesture</article-title>
        <source>Comput Graph</source>  
        <year>2013</year>  
        <month>05</month>  
        <volume>37</volume>  
        <issue>3</issue>  
        <fpage>101</fpage>  
        <lpage>20</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.cag.2012.12.006</pub-id></nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Nishikawa</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Hosoi</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Koara</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Negoro</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Hikita</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Asano</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Kakutani</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Miyazaki</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Sekimoto</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Yasui</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Miyake</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Takiguchi</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Monden</surname>
            <given-names>M</given-names>
          </name>
        </person-group>
        <article-title>FAce MOUSe: a novel human-machine interface for controlling the position of a laparoscope</article-title>
        <source>IEEE Trans Robot Autom</source>  
        <year>2003</year>  
        <month>10</month>  
        <volume>19</volume>  
        <issue>5</issue>  
        <fpage>825</fpage>  
        <lpage>41</lpage>  
        <pub-id pub-id-type="doi">10.1109/TRA.2003.817093</pub-id></nlm-citation>
      </ref>
      <ref id="ref26">
        <label>26</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Wachs</surname>
            <given-names>JP</given-names>
          </name>
          <name name-style="western">
            <surname>Stern</surname>
            <given-names>HI</given-names>
          </name>
          <name name-style="western">
            <surname>Edan</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Gillam</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Handler</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Feied</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Smith</surname>
            <given-names>M</given-names>
          </name>
        </person-group>
        <article-title>A gesture-based tool for sterile browsing of radiology images</article-title>
        <source>J Am Med Inform Assoc</source>  
        <year>2008</year>  
        <volume>15</volume>  
        <issue>3</issue>  
        <fpage>321</fpage>  
        <lpage>3</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/18451034"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1197/jamia.M241</pub-id>
        <pub-id pub-id-type="medline">18451034</pub-id>
        <pub-id pub-id-type="pii">15/3/321</pub-id>
        <pub-id pub-id-type="pmcid">PMC2410001</pub-id></nlm-citation>
      </ref>
      <ref id="ref27">
        <label>27</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Wachs</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Stern</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Edan</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Gillam</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Feied</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Smithd</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Handler</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>Real-time hand gesture interface for browsing medical images</article-title>
        <source>Int J Intell Comput Med Sci Image Process</source>  
        <year>2008</year>  
        <month>01</month>  
        <volume>2</volume>  
        <issue>1</issue>  
        <fpage>15</fpage>  
        <lpage>25</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1080/1931308X.2008.10644149"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1080/1931308X.2008.10644149</pub-id></nlm-citation>
      </ref>
      <ref id="ref28">
        <label>28</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Soutschek</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Penne</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Hornegger</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Kornhuber</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>3-D gesture-based scene navigation in medical imaging applications using Time-of-Flight cameras</article-title>
        <source>IEEE</source>  
        <year>2008</year>  
        <conf-name>2008 IEEE Computer Society Conference on Computer Vision and Pattern Recognition Workshops</conf-name>
        <conf-date>June 23-28, 2008</conf-date>
        <conf-loc>Anchorage, AK, USA</conf-loc>
        <publisher-loc>Anchorage, AK</publisher-loc>
        <publisher-name>IEEE</publisher-name>
        <fpage>08</fpage>  
        <pub-id pub-id-type="doi">10.1109/CVPRW.2008.4563162</pub-id></nlm-citation>
      </ref>
      <ref id="ref29">
        <label>29</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Kipshagen</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Tronnier</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Bonsanto</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Hofmann</surname>
            <given-names>UG</given-names>
          </name>
        </person-group>
        <article-title>Touch-marker-free interaction with medical software</article-title>
        <year>2009</year>  
        <month>09</month>  
        <day>07</day>  
        <conf-name>World Congress on Medical Physics and Biomedical Engineering</conf-name>
        <conf-date>September 7-12, 2009</conf-date>
        <conf-loc>Munich, Germany</conf-loc>
        <publisher-loc>Berlin, Heidelberg</publisher-loc>
        <publisher-name>Springer</publisher-name>
        <fpage>7</fpage>  
        <lpage>12</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1007/978-3-642-03906-5_21"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1007/978-3-642-03906-5_21</pub-id></nlm-citation>
      </ref>
      <ref id="ref30">
        <label>30</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ruppert</surname>
            <given-names>GC</given-names>
          </name>
          <name name-style="western">
            <surname>Reis</surname>
            <given-names>LO</given-names>
          </name>
          <name name-style="western">
            <surname>Amorim</surname>
            <given-names>PH</given-names>
          </name>
          <name name-style="western">
            <surname>de Moraes</surname>
            <given-names>TF</given-names>
          </name>
          <name name-style="western">
            <surname>da Silva</surname>
            <given-names>JV</given-names>
          </name>
        </person-group>
        <article-title>Touchless gesture user interface for interactive image visualization in urological surgery</article-title>
        <source>World J Urol</source>  
        <year>2012</year>  
        <month>10</month>  
        <volume>30</volume>  
        <issue>5</issue>  
        <fpage>687</fpage>  
        <lpage>91</lpage>  
        <pub-id pub-id-type="doi">10.1007/s00345-012-0879-0</pub-id>
        <pub-id pub-id-type="medline">22580994</pub-id></nlm-citation>
      </ref>
      <ref id="ref31">
        <label>31</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Gallo</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>De Pietro</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Marra</surname>
            <given-names>I</given-names>
          </name>
        </person-group>
        <article-title>3D interaction with volumetric medical data: experiencing the Wiimote</article-title>
        <source>Proceedings of the 1st international conference on Ambient media and systems</source>  
        <year>2008</year>  
        <conf-name>Ambi-Sys'08</conf-name>
        <conf-date>February 11-14, 2008</conf-date>
        <conf-loc>Brussels, Belgium</conf-loc>
        <publisher-loc>Quebec, Canada</publisher-loc>
        <publisher-name>ICTS, editor</publisher-name>
        <pub-id pub-id-type="doi">10.1145/1363163.1363177</pub-id></nlm-citation>
      </ref>
      <ref id="ref32">
        <label>32</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Hansen</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Köhn</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Schlichting</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Weiler</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Zidowitz</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Kleemann</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Peitgen</surname>
            <given-names>H</given-names>
          </name>
        </person-group>
        <article-title>Intraoperative modification of resection plans for liver surgery</article-title>
        <source>Int J CARS</source>  
        <year>2008</year>  
        <month>06</month>  
        <day>4</day>  
        <volume>3</volume>  
        <issue>3-4</issue>  
        <fpage>291</fpage>  
        <lpage>7</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/article/10.1007%2Fs11548-008-0161-5"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1007/s11548-008-0161-5</pub-id></nlm-citation>
      </ref>
      <ref id="ref33">
        <label>33</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Gallo</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>De Pietro</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Coronato</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Toward a natural interface to virtual medical imaging environments</article-title>
        <source>Proceedings of the working conference on Advanced visual interfaces</source>  
        <year>2008</year>  
        <conf-name>AVI'08</conf-name>
        <conf-date>May 28-30, 2008</conf-date>
        <conf-loc>Napoli, Italy</conf-loc>
        <publisher-loc>New York</publisher-loc>
        <publisher-name>ACM</publisher-name>
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://dl.acm.org/citation.cfm?id=1385651"/>
        </comment> </nlm-citation>
      </ref>
      <ref id="ref34">
        <label>34</label>
        <nlm-citation citation-type="book">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Gallo</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Pietro</surname>
            <given-names>G</given-names>
          </name>
        </person-group>
        <person-group person-group-type="editor">
          <name name-style="western">
            <surname>Jeong</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Damiani</surname>
            <given-names>E</given-names>
          </name>
        </person-group>
        <article-title>Input devices and interaction techniques for VR-enhanced medicine</article-title>
        <source>Multimedia Techniques for Device and Ambient Intelligence</source>  
        <year>2009</year>  
        <publisher-loc>Boston, MA</publisher-loc>
        <publisher-name>Springer US</publisher-name>
        <fpage>115</fpage> </nlm-citation>
      </ref>
      <ref id="ref35">
        <label>35</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Gallo</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Minutolo</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>de Pietro</surname>
            <given-names>G</given-names>
          </name>
        </person-group>
        <article-title>A user interface for VR-ready 3D medical imaging by off-the-shelf input devices</article-title>
        <source>Comput Biol Med</source>  
        <year>2010</year>  
        <month>03</month>  
        <volume>40</volume>  
        <issue>3</issue>  
        <fpage>350</fpage>  
        <lpage>8</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.compbiomed.2010.01.006</pub-id>
        <pub-id pub-id-type="medline">20149912</pub-id>
        <pub-id pub-id-type="pii">S0010-4825(10)00008-9</pub-id></nlm-citation>
      </ref>
      <ref id="ref36">
        <label>36</label>
        <nlm-citation citation-type="book">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Gallo</surname>
            <given-names>L</given-names>
          </name>
        </person-group>
        <person-group person-group-type="editor">
          <name name-style="western">
            <surname>Tsihrintzis</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Damiani</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Virvou</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Howlett</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Jain</surname>
            <given-names>L</given-names>
          </name>
        </person-group>
        <article-title>A glove-based interface for 3D medical image visualization</article-title>
        <source>Intelligent Interactive Multimedia Systems and Services</source>  
        <year>2010</year>  
        <publisher-loc>Berlin Heidelberg</publisher-loc>
        <publisher-name>Springer</publisher-name>
        <fpage>221</fpage> </nlm-citation>
      </ref>
      <ref id="ref37">
        <label>37</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Chang</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Huang</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>A Kinect-based system for physical rehabilitation: a pilot study for young adults with motor disabilities</article-title>
        <source>Res Dev Disabil</source>  
        <year>2011</year>  
        <volume>32</volume>  
        <issue>6</issue>  
        <fpage>2566</fpage>  
        <lpage>70</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.ridd.2011.07.002</pub-id>
        <pub-id pub-id-type="medline">21784612</pub-id>
        <pub-id pub-id-type="pii">S0891-4222(11)00258-7</pub-id></nlm-citation>
      </ref>
      <ref id="ref38">
        <label>38</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Leiker</surname>
            <given-names>AM</given-names>
          </name>
          <name name-style="western">
            <surname>Miller</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Brewer</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Nelson</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Siow</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Lohse</surname>
            <given-names>K</given-names>
          </name>
        </person-group>
        <article-title>The relationship between engagement and neurophysiological measures of attention in motion-controlled video games: a randomized controlled trial</article-title>
        <source>JMIR Serious Games</source>  
        <year>2016</year>  
        <month>04</month>  
        <day>21</day>  
        <volume>4</volume>  
        <issue>1</issue>  
        <fpage>e4</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://games.jmir.org/2016/1/e4/"/>
        </comment>  
        <pub-id pub-id-type="doi">10.2196/games.5460</pub-id>
        <pub-id pub-id-type="medline">27103052</pub-id>
        <pub-id pub-id-type="pii">v4i1e4</pub-id>
        <pub-id pub-id-type="pmcid">PMC4858597</pub-id></nlm-citation>
      </ref>
      <ref id="ref39">
        <label>39</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Simor</surname>
            <given-names>FW</given-names>
          </name>
          <name name-style="western">
            <surname>Brum</surname>
            <given-names>MR</given-names>
          </name>
          <name name-style="western">
            <surname>Schmidt</surname>
            <given-names>JD</given-names>
          </name>
          <name name-style="western">
            <surname>Rieder</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>de Marchi</surname>
            <given-names>AC</given-names>
          </name>
        </person-group>
        <article-title>Usability evaluation methods for gesture-based games: a systematic review</article-title>
        <source>JMIR Serious Games</source>  
        <year>2016</year>  
        <month>10</month>  
        <day>4</day>  
        <volume>4</volume>  
        <issue>2</issue>  
        <fpage>e17</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://games.jmir.org/2016/2/e17/"/>
        </comment>  
        <pub-id pub-id-type="doi">10.2196/games.5860</pub-id>
        <pub-id pub-id-type="medline">27702737</pub-id>
        <pub-id pub-id-type="pii">v4i2e17</pub-id>
        <pub-id pub-id-type="pmcid">PMC5069401</pub-id></nlm-citation>
      </ref>
      <ref id="ref40">
        <label>40</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Dimaguila</surname>
            <given-names>GL</given-names>
          </name>
          <name name-style="western">
            <surname>Gray</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Merolli</surname>
            <given-names>M</given-names>
          </name>
        </person-group>
        <article-title>Person-generated health data in simulated rehabilitation using Kinect for stroke: literature review</article-title>
        <source>JMIR Rehabil Assist Technol</source>  
        <year>2018</year>  
        <month>05</month>  
        <day>8</day>  
        <volume>5</volume>  
        <issue>1</issue>  
        <fpage>e11</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://rehab.jmir.org/2018/1/e11/"/>
        </comment>  
        <pub-id pub-id-type="doi">10.2196/rehab.9123</pub-id>
        <pub-id pub-id-type="medline">29739739</pub-id>
        <pub-id pub-id-type="pii">v5i1e11</pub-id>
        <pub-id pub-id-type="pmcid">PMC5964303</pub-id></nlm-citation>
      </ref>
      <ref id="ref41">
        <label>41</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Gallagher</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Satava</surname>
            <given-names>RM</given-names>
          </name>
        </person-group>
        <article-title>Virtual reality as a metric for the assessment of laparoscopic psychomotor skills. Learning curves and reliability measures</article-title>
        <source>Surg Endosc</source>  
        <year>2002</year>  
        <month>12</month>  
        <volume>16</volume>  
        <issue>12</issue>  
        <fpage>1746</fpage>  
        <lpage>52</lpage>  
        <pub-id pub-id-type="doi">10.1007/s00464-001-8215-6</pub-id>
        <pub-id pub-id-type="medline">12140641</pub-id></nlm-citation>
      </ref>
      <ref id="ref42">
        <label>42</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Korndorffer</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Clayton</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Tesfay</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Brunner</surname>
            <given-names>W</given-names>
          </name>
          <name name-style="western">
            <surname>Sierra</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Dunne</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Jones</surname>
            <given-names>DB</given-names>
          </name>
          <name name-style="western">
            <surname>Rege</surname>
            <given-names>RV</given-names>
          </name>
          <name name-style="western">
            <surname>Touchard</surname>
            <given-names>CL</given-names>
          </name>
          <name name-style="western">
            <surname>Scott</surname>
            <given-names>DJ</given-names>
          </name>
        </person-group>
        <article-title>Multicenter construct validity for southwestern laparoscopic videotrainer stations</article-title>
        <source>J Surg Res</source>  
        <year>2005</year>  
        <month>09</month>  
        <volume>128</volume>  
        <issue>1</issue>  
        <fpage>114</fpage>  
        <lpage>9</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.jss.2005.03.014</pub-id>
        <pub-id pub-id-type="medline">15916767</pub-id>
        <pub-id pub-id-type="pii">S0022-4804(05)00148-4</pub-id></nlm-citation>
      </ref>
      <ref id="ref43">
        <label>43</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ritter</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Kindelan</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Michael</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Pimentel</surname>
            <given-names>EA</given-names>
          </name>
          <name name-style="western">
            <surname>Bowyer</surname>
            <given-names>MW</given-names>
          </name>
        </person-group>
        <article-title>Concurrent validity of augmented reality metrics applied to the fundamentals of laparoscopic surgery (FLS)</article-title>
        <source>Surg Endosc</source>  
        <year>2007</year>  
        <month>08</month>  
        <volume>21</volume>  
        <issue>8</issue>  
        <fpage>1441</fpage>  
        <lpage>5</lpage>  
        <pub-id pub-id-type="doi">10.1007/s00464-007-9261-5</pub-id>
        <pub-id pub-id-type="medline">17593461</pub-id></nlm-citation>
      </ref>
      <ref id="ref44">
        <label>44</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Hennessey</surname>
            <given-names>I</given-names>
          </name>
          <name name-style="western">
            <surname>Hewett</surname>
            <given-names>P</given-names>
          </name>
        </person-group>
        <article-title>Construct, concurrent, and content validity of the eoSim laparoscopic simulator</article-title>
        <source>J Laparoendosc Adv Surg Tech A</source>  
        <year>2013</year>  
        <month>10</month>  
        <volume>23</volume>  
        <issue>10</issue>  
        <fpage>855</fpage>  
        <lpage>60</lpage>  
        <pub-id pub-id-type="doi">10.1089/lap.2013.0229</pub-id>
        <pub-id pub-id-type="medline">23968255</pub-id></nlm-citation>
      </ref>
      <ref id="ref45">
        <label>45</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Seymour</surname>
            <given-names>NE</given-names>
          </name>
          <name name-style="western">
            <surname>Gallagher</surname>
            <given-names>AG</given-names>
          </name>
          <name name-style="western">
            <surname>Roman</surname>
            <given-names>SA</given-names>
          </name>
          <name name-style="western">
            <surname>O'Brien</surname>
            <given-names>MK</given-names>
          </name>
          <name name-style="western">
            <surname>Bansal</surname>
            <given-names>VK</given-names>
          </name>
          <name name-style="western">
            <surname>Andersen</surname>
            <given-names>DK</given-names>
          </name>
          <name name-style="western">
            <surname>Satava</surname>
            <given-names>RM</given-names>
          </name>
        </person-group>
        <article-title>Virtual reality training improves operating room performance: results of a randomized, double-blinded study</article-title>
        <source>Ann Surg</source>  
        <year>2002</year>  
        <month>10</month>  
        <volume>236</volume>  
        <issue>4</issue>  
        <fpage>458</fpage>  
        <lpage>63; discussion 463</lpage>  
        <pub-id pub-id-type="doi">10.1097/01.SLA.0000028969.51489.B4</pub-id>
        <pub-id pub-id-type="medline">12368674</pub-id>
        <pub-id pub-id-type="pmcid">PMC1422600</pub-id></nlm-citation>
      </ref>
      <ref id="ref46">
        <label>46</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Schijven</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Jakimowicz</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Broeders</surname>
            <given-names>IA</given-names>
          </name>
          <name name-style="western">
            <surname>Tseng</surname>
            <given-names>L</given-names>
          </name>
        </person-group>
        <article-title>The Eindhoven laparoscopic cholecystectomy training course--improving operating room performance using virtual reality training: results from the first E.A.E.S. accredited virtual reality trainings curriculum</article-title>
        <source>Surg Endosc</source>  
        <year>2005</year>  
        <month>09</month>  
        <volume>19</volume>  
        <issue>9</issue>  
        <fpage>1220</fpage>  
        <lpage>6</lpage>  
        <pub-id pub-id-type="doi">10.1007/s00464-004-2240-1</pub-id>
        <pub-id pub-id-type="medline">16132332</pub-id></nlm-citation>
      </ref>
      <ref id="ref47">
        <label>47</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Gurusamy</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Aggarwal</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Palanivelu</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Davidson</surname>
            <given-names>B</given-names>
          </name>
        </person-group>
        <article-title>Systematic review of randomized controlled trials on the effectiveness of virtual reality training for laparoscopic surgery</article-title>
        <source>Br J Surg</source>  
        <year>2008</year>  
        <month>09</month>  
        <volume>95</volume>  
        <issue>9</issue>  
        <fpage>1088</fpage>  
        <lpage>97</lpage>  
        <pub-id pub-id-type="doi">10.1002/bjs.6344</pub-id>
        <pub-id pub-id-type="medline">18690637</pub-id></nlm-citation>
      </ref>
      <ref id="ref48">
        <label>48</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Larsen</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Oestergaard</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Ottesen</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Soerensen</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>The efficacy of virtual reality simulation training in laparoscopy: a systematic review of randomized trials</article-title>
        <source>Acta Obstet Gynecol Scand</source>  
        <year>2012</year>  
        <month>09</month>  
        <volume>91</volume>  
        <issue>9</issue>  
        <fpage>1015</fpage>  
        <lpage>28</lpage>  
        <pub-id pub-id-type="doi">10.1111/j.1600-0412.2012.01482.x</pub-id>
        <pub-id pub-id-type="medline">22693954</pub-id></nlm-citation>
      </ref>
      <ref id="ref49">
        <label>49</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Greenhalgh</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Peacock</surname>
            <given-names>R</given-names>
          </name>
        </person-group>
        <article-title>Effectiveness and efficiency of search methods in systematic reviews of complex evidence: audit of primary sources</article-title>
        <source>Br Med J</source>  
        <year>2005</year>  
        <month>11</month>  
        <day>5</day>  
        <volume>331</volume>  
        <issue>7524</issue>  
        <fpage>1064</fpage>  
        <lpage>5</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/16230312"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1136/bmj.38636.593461.68</pub-id>
        <pub-id pub-id-type="medline">16230312</pub-id>
        <pub-id pub-id-type="pii">bmj.38636.593461.68</pub-id>
        <pub-id pub-id-type="pmcid">PMC1283190</pub-id></nlm-citation>
      </ref>
      <ref id="ref50">
        <label>50</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Shea</surname>
            <given-names>BJ</given-names>
          </name>
          <name name-style="western">
            <surname>Grimshaw</surname>
            <given-names>JM</given-names>
          </name>
          <name name-style="western">
            <surname>Wells</surname>
            <given-names>GA</given-names>
          </name>
          <name name-style="western">
            <surname>Boers</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Andersson</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Hamel</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Porter</surname>
            <given-names>AC</given-names>
          </name>
          <name name-style="western">
            <surname>Tugwell</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Moher</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Bouter</surname>
            <given-names>LM</given-names>
          </name>
        </person-group>
        <article-title>Development of AMSTAR: a measurement tool to assess the methodological quality of systematic reviews</article-title>
        <source>BMC Med Res Methodol</source>  
        <year>2007</year>  
        <month>02</month>  
        <day>15</day>  
        <volume>7</volume>  
        <fpage>10</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://bmcmedresmethodol.biomedcentral.com/articles/10.1186/1471-2288-7-10"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1186/1471-2288-7-10</pub-id>
        <pub-id pub-id-type="medline">17302989</pub-id>
        <pub-id pub-id-type="pii">1471-2288-7-10</pub-id>
        <pub-id pub-id-type="pmcid">PMC1810543</pub-id></nlm-citation>
      </ref>
      <ref id="ref51">
        <label>51</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Liberati</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Altman</surname>
            <given-names>DG</given-names>
          </name>
          <name name-style="western">
            <surname>Tetzlaff</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Mulrow</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Gøtzsche</surname>
            <given-names>PC</given-names>
          </name>
          <name name-style="western">
            <surname>Ioannidis</surname>
            <given-names>JP</given-names>
          </name>
          <name name-style="western">
            <surname>Clarke</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Devereaux</surname>
            <given-names>PJ</given-names>
          </name>
          <name name-style="western">
            <surname>Kleijnen</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Moher</surname>
            <given-names>D</given-names>
          </name>
        </person-group>
        <article-title>The PRISMA statement for reporting systematic reviews and meta-analyses of studies that evaluate healthcare interventions: explanation and elaboration</article-title>
        <source>Br Med J</source>  
        <year>2009</year>  
        <month>07</month>  
        <day>21</day>  
        <volume>339</volume>  
        <fpage>b2700</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/19622552"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1136/bmj.b2700</pub-id>
        <pub-id pub-id-type="medline">19622552</pub-id>
        <pub-id pub-id-type="pmcid">PMC2714672</pub-id></nlm-citation>
      </ref>
      <ref id="ref52">
        <label>52</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Sánchez-Margallo</surname>
            <given-names>FM</given-names>
          </name>
          <name name-style="western">
            <surname>Sánchez-Margallo</surname>
            <given-names>JA</given-names>
          </name>
          <name name-style="western">
            <surname>Moyano-Cuevas</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Pérez</surname>
            <given-names>EM</given-names>
          </name>
          <name name-style="western">
            <surname>Maestre</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>Use of natural user interfaces for image navigation during laparoscopic surgery: initial experience</article-title>
        <source>Minim Invasive Ther Allied Technol</source>  
        <year>2017</year>  
        <month>10</month>  
        <volume>26</volume>  
        <issue>5</issue>  
        <fpage>253</fpage>  
        <lpage>61</lpage>  
        <pub-id pub-id-type="doi">10.1080/13645706.2017.1304964</pub-id>
        <pub-id pub-id-type="medline">28349758</pub-id></nlm-citation>
      </ref>
      <ref id="ref53">
        <label>53</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Grange</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Terrence</surname>
            <given-names>W</given-names>
          </name>
          <name name-style="western">
            <surname>Fong</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Baur</surname>
            <given-names>C</given-names>
          </name>
        </person-group>
        <article-title>M/ORIS: A medical/operating room interaction system</article-title>
        <source>Proceedings of the 6th international conference on Multimodal interfaces</source>  
        <year>2004</year>  
        <conf-name>ICMI'04</conf-name>
        <conf-date>October 13-15, 2004</conf-date>
        <conf-loc>State College, PA, USA</conf-loc>
        <fpage>159</fpage>  
        <lpage>66</lpage>  
        <pub-id pub-id-type="doi">10.1145/1027933.1027962</pub-id></nlm-citation>
      </ref>
      <ref id="ref54">
        <label>54</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Bizzotto</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Costanzo</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Bizzotto</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Regis</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Sandri</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Magnan</surname>
            <given-names>B</given-names>
          </name>
        </person-group>
        <article-title>Leap motion gesture control with OsiriX in the operating room to control imaging: first experiences during live surgery</article-title>
        <source>Surg Innov</source>  
        <year>2014</year>  
        <month>12</month>  
        <volume>21</volume>  
        <issue>6</issue>  
        <fpage>655</fpage>  
        <lpage>6</lpage>  
        <pub-id pub-id-type="doi">10.1177/1553350614528384</pub-id>
        <pub-id pub-id-type="medline">24742500</pub-id>
        <pub-id pub-id-type="pii">1553350614528384</pub-id></nlm-citation>
      </ref>
      <ref id="ref55">
        <label>55</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Bizzotto</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Costanzo</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Maluta</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Dall?Oca</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Lavini</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Sandri</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Preliminary experience with the use of leap motion gesture control to manage imaging in the operating room</article-title>
        <source>J Orthopaed Traumatol</source>  
        <year>2014</year>  
        <month>11</month>  
        <volume>15</volume>  
        <issue>Suppl 1</issue>  
        <fpage>19</fpage>  
        <lpage>20</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1007/s10195-014-0314-y"/>
        </comment> </nlm-citation>
      </ref>
      <ref id="ref56">
        <label>56</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Streba</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Gheonea</surname>
            <given-names>I</given-names>
          </name>
          <name name-style="western">
            <surname>Streba</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Sandulescu</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Saftoiu</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Gheone</surname>
            <given-names>D</given-names>
          </name>
        </person-group>
        <article-title>Virtual Palpation Model -combining spiral CT and elastography data: a proof-of-concept study</article-title>
        <source>Gastroenterology</source>  
        <year>2014</year>  
        <volume>146</volume>  
        <issue>5</issue>  
        <fpage>344</fpage>  
        <lpage>5</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://linkinghub.elsevier.com/retrieve/pii/S0016508514612452"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1016/S0016-5085(13)63684-7</pub-id></nlm-citation>
      </ref>
      <ref id="ref57">
        <label>57</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Nouei</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Kamyad</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Soroush</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Ghazalbash</surname>
            <given-names>S</given-names>
          </name>
        </person-group>
        <article-title>A comprehensive operating room information system using the Kinect sensors and RFID</article-title>
        <source>J Clin Monit Comput</source>  
        <year>2015</year>  
        <month>04</month>  
        <volume>29</volume>  
        <issue>2</issue>  
        <fpage>251</fpage>  
        <lpage>61</lpage>  
        <pub-id pub-id-type="doi">10.1007/s10877-014-9591-5</pub-id>
        <pub-id pub-id-type="medline">25017016</pub-id></nlm-citation>
      </ref>
      <ref id="ref58">
        <label>58</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Hettig</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Saalfeld</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Luz</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Becker</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Skalej</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Hansen</surname>
            <given-names>C</given-names>
          </name>
        </person-group>
        <article-title>Comparison of gesture and conventional interaction techniques for interventional neuroradiology</article-title>
        <source>Int J Comput Assist Radiol Surg</source>  
        <year>2017</year>  
        <month>09</month>  
        <volume>12</volume>  
        <issue>9</issue>  
        <fpage>1643</fpage>  
        <lpage>53</lpage>  
        <pub-id pub-id-type="doi">10.1007/s11548-017-1523-7</pub-id>
        <pub-id pub-id-type="medline">28120179</pub-id>
        <pub-id pub-id-type="pii">10.1007/s11548-017-1523-7</pub-id></nlm-citation>
      </ref>
      <ref id="ref59">
        <label>59</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Johnson</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>O?Hara</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Sellen</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Cousins</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Criminisi</surname>
            <given-names>C</given-names>
          </name>
        </person-group>
        <article-title>Exploring the Potential for Touchless Interaction in Image-Guided Interventional Radiology</article-title>
        <year>2011</year>  
        <conf-name>CHI'11</conf-name>
        <conf-date>May 7-12, 2011</conf-date>
        <conf-loc>Vancouver, BC, Canada</conf-loc>
        <fpage>3323</fpage>  
        <lpage>32</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://www.microsoft.com/en-us/research/wp-content/uploads/2011/05/chi2011_paper188.pdf"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1145/1978942.1979436</pub-id></nlm-citation>
      </ref>
      <ref id="ref60">
        <label>60</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Hötker</surname>
            <given-names>AM</given-names>
          </name>
          <name name-style="western">
            <surname>Pitton</surname>
            <given-names>MB</given-names>
          </name>
          <name name-style="western">
            <surname>Mildenberger</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Düber</surname>
            <given-names>C</given-names>
          </name>
        </person-group>
        <article-title>Speech and motion control for interventional radiology: requirements and feasibility</article-title>
        <source>Int J Comput Assist Radiol Surg</source>  
        <year>2013</year>  
        <month>11</month>  
        <volume>8</volume>  
        <issue>6</issue>  
        <fpage>997</fpage>  
        <lpage>1002</lpage>  
        <pub-id pub-id-type="doi">10.1007/s11548-013-0841-7</pub-id>
        <pub-id pub-id-type="medline">23580026</pub-id></nlm-citation>
      </ref>
      <ref id="ref61">
        <label>61</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Tan</surname>
            <given-names>JH</given-names>
          </name>
          <name name-style="western">
            <surname>Chao</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Zawaideh</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Roberts</surname>
            <given-names>AC</given-names>
          </name>
          <name name-style="western">
            <surname>Kinney</surname>
            <given-names>TB</given-names>
          </name>
        </person-group>
        <article-title>Informatics in Radiology: developing a touchless user interface for intraoperative image control during interventional radiology procedures</article-title>
        <source>Radiographics</source>  
        <year>2013</year>  
        <volume>33</volume>  
        <issue>2</issue>  
        <fpage>E61</fpage>  
        <lpage>70</lpage>  
        <pub-id pub-id-type="doi">10.1148/rg.332125101</pub-id>
        <pub-id pub-id-type="medline">23264282</pub-id>
        <pub-id pub-id-type="pii">rg.332125101</pub-id></nlm-citation>
      </ref>
      <ref id="ref62">
        <label>62</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Iannessi</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Marcy</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Clatz</surname>
            <given-names>O</given-names>
          </name>
          <name name-style="western">
            <surname>Fillard</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Ayache</surname>
            <given-names>N</given-names>
          </name>
        </person-group>
        <article-title>Touchless intra-operative display for interventional radiologist</article-title>
        <source>Diagn Interv Imaging</source>  
        <year>2014</year>  
        <month>03</month>  
        <volume>95</volume>  
        <issue>3</issue>  
        <fpage>333</fpage>  
        <lpage>7</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S2211-5684(13)00304-5"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1016/j.diii.2013.09.007</pub-id>
        <pub-id pub-id-type="medline">24176864</pub-id>
        <pub-id pub-id-type="pii">S2211-5684(13)00304-5</pub-id></nlm-citation>
      </ref>
      <ref id="ref63">
        <label>63</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Bercu</surname>
            <given-names>Z</given-names>
          </name>
          <name name-style="western">
            <surname>Patil</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Patel</surname>
            <given-names>RS</given-names>
          </name>
          <name name-style="western">
            <surname>Kim</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Nowakowski</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Lookstein</surname>
            <given-names>R</given-names>
          </name>
        </person-group>
        <article-title>Abstracts of the BSIR 2013 Annual Scientific Meeting, November 13-15, 2013, Manchester, England</article-title>
        <source>Cardiovasc Intervent Radiol</source>  
        <year>2014</year>  
        <month>01</month>  
        <volume>37</volume>  
        <issue>Suppl 1</issue>  
        <fpage>1</fpage>  
        <lpage>82</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1007/s00270-013-0835-4"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1007/s00270-013-0835-4</pub-id>
        <pub-id pub-id-type="medline">24425448</pub-id></nlm-citation>
      </ref>
      <ref id="ref64">
        <label>64</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Bercu</surname>
            <given-names>Z</given-names>
          </name>
          <name name-style="western">
            <surname>Patil</surname>
            <given-names>VV</given-names>
          </name>
          <name name-style="western">
            <surname>Patel</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Kim</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Nowakowski</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Lookstein</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Fischman</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Use of hands free gesture-based imaging control for vessel identification during hepatic transarterial chemoembolization and selective internal radiotherapy procedures</article-title>
        <source>J Vasc Interv Radiol</source>  
        <year>2015</year>  
        <month>02</month>  
        <volume>26</volume>  
        <issue>2</issue>  
        <fpage>S186</fpage>  
        <lpage>7</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1016/j.jvir.2014.12.499"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1016/j.jvir.2014.12.499</pub-id></nlm-citation>
      </ref>
      <ref id="ref65">
        <label>65</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Mentis</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>O'Hara</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Sellen</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Rikin</surname>
            <given-names>TR</given-names>
          </name>
        </person-group>
        <article-title>Interaction proxemics and image use in neurosurgery</article-title>
        <source>Proceedings of the SIGCHI Conference on Human Factors in Computing Systems</source>  
        <year>2012</year>  
        <conf-name>CHI'12</conf-name>
        <conf-date>May 5-10, 2012</conf-date>
        <conf-loc>New York, NY, USA</conf-loc>
        <publisher-loc>New York, NY, USA</publisher-loc>
        <publisher-name>ACM Conference on Computer-Human Interaction</publisher-name>
        <fpage>927</fpage>  
        <lpage>36</lpage>  
        <pub-id pub-id-type="doi">10.1145/2207676.2208536</pub-id></nlm-citation>
      </ref>
      <ref id="ref66">
        <label>66</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Wright</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>de Ribaupierre</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Eagleson</surname>
            <given-names>R</given-names>
          </name>
        </person-group>
        <article-title>Design and evaluation of an augmented reality simulator using leap motion</article-title>
        <source>Healthc Technol Lett</source>  
        <year>2017</year>  
        <month>10</month>  
        <volume>4</volume>  
        <issue>5</issue>  
        <fpage>210</fpage>  
        <lpage>5</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/29184667"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1049/htl.2017.0070</pub-id>
        <pub-id pub-id-type="medline">29184667</pub-id>
        <pub-id pub-id-type="pii">HTL.2017.0070</pub-id>
        <pub-id pub-id-type="pmcid">PMC5683193</pub-id></nlm-citation>
      </ref>
      <ref id="ref67">
        <label>67</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Yoshimitsu</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Muragaki</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Maruyama</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Saito</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Suzuki</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Ikuta</surname>
            <given-names>S</given-names>
          </name>
        </person-group>
        <article-title>Clinical trials of the non-touch intraoperative image controllable interface system using KINECT(TM)</article-title>
        <source>Int J Comput Assist Radiol Surg</source>  
        <year>2012</year>  
        <volume>7</volume>  
        <issue>Suppl 1</issue>  
        <fpage>S209</fpage>  
        <lpage>10</lpage> </nlm-citation>
      </ref>
      <ref id="ref68">
        <label>68</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Yoshimitsu</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Muragaki</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Maruyama</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Yamato</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Iseki</surname>
            <given-names>H</given-names>
          </name>
        </person-group>
        <article-title>Development and initial clinical testing of "OPECT": an innovative device for fully intangible control of the intraoperative image-displaying monitor by the surgeon</article-title>
        <source>Neurosurgery</source>  
        <year>2014</year>  
        <month>03</month>  
        <volume>10 Suppl 1</volume>  
        <fpage>46</fpage>  
        <lpage>50; discussion 50</lpage>  
        <pub-id pub-id-type="doi">10.1227/NEU.0000000000000214</pub-id>
        <pub-id pub-id-type="medline">24141478</pub-id></nlm-citation>
      </ref>
      <ref id="ref69">
        <label>69</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>di Tommaso</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Aubry</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Godard</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Katranji</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Pauchot</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>A new human machine interface in neurosurgery: The Leap Motion(®). Technical note regarding a new touchless interface</article-title>
        <source>Neurochirurgie</source>  
        <year>2016</year>  
        <month>06</month>  
        <volume>62</volume>  
        <issue>3</issue>  
        <fpage>178</fpage>  
        <lpage>81</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.neuchi.2016.01.006</pub-id>
        <pub-id pub-id-type="medline">27234915</pub-id>
        <pub-id pub-id-type="pii">S0028-3770(16)30011-X</pub-id></nlm-citation>
      </ref>
      <ref id="ref70">
        <label>70</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Xu</surname>
            <given-names>X</given-names>
          </name>
          <name name-style="western">
            <surname>Zheng</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Yao</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Sun</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Xu</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>X</given-names>
          </name>
        </person-group>
        <article-title>A low-cost multimodal head-mounted display system for neuroendoscopic surgery</article-title>
        <source>Brain Behav</source>  
        <year>2018</year>  
        <month>12</month>  
        <volume>8</volume>  
        <issue>1</issue>  
        <fpage>e00891</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1002/brb3.891"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1002/brb3.891</pub-id>
        <pub-id pub-id-type="medline">29568688</pub-id>
        <pub-id pub-id-type="pii">BRB3891</pub-id>
        <pub-id pub-id-type="pmcid">PMC5853619</pub-id></nlm-citation>
      </ref>
      <ref id="ref71">
        <label>71</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Henseler</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Kuznetsova</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Vogt</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Rosenhahn</surname>
            <given-names>B</given-names>
          </name>
        </person-group>
        <article-title>Validation of the Kinect device as a new portable imaging system for three-dimensional breast assessment</article-title>
        <source>J Plast Reconstr Aesthet Surg</source>  
        <year>2014</year>  
        <month>04</month>  
        <volume>67</volume>  
        <issue>4</issue>  
        <fpage>483</fpage>  
        <lpage>8</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.bjps.2013.12.025</pub-id>
        <pub-id pub-id-type="medline">24513562</pub-id>
        <pub-id pub-id-type="pii">S1748-6815(13)00689-X</pub-id></nlm-citation>
      </ref>
      <ref id="ref72">
        <label>72</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Wheat</surname>
            <given-names>JS</given-names>
          </name>
          <name name-style="western">
            <surname>Choppin</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Goyal</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Development and assessment of a Microsoft Kinect based system for imaging the breast in three dimensions</article-title>
        <source>Med Eng Phys</source>  
        <year>2014</year>  
        <month>06</month>  
        <volume>36</volume>  
        <issue>6</issue>  
        <fpage>732</fpage>  
        <lpage>8</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.medengphy.2013.12.018</pub-id>
        <pub-id pub-id-type="medline">24507690</pub-id>
        <pub-id pub-id-type="pii">S1350-4533(13)00297-X</pub-id></nlm-citation>
      </ref>
      <ref id="ref73">
        <label>73</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Pöhlmann</surname>
            <given-names>ST</given-names>
          </name>
          <name name-style="western">
            <surname>Harkness</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Taylor</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Gandhi</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Astley</surname>
            <given-names>S</given-names>
          </name>
        </person-group>
        <article-title>Preoperative implant selection for unilateral breast reconstruction using 3D imaging with the Microsoft Kinect sensor</article-title>
        <source>J Plast Reconstr Aesthet Surg</source>  
        <year>2017</year>  
        <month>08</month>  
        <volume>70</volume>  
        <issue>8</issue>  
        <fpage>1059</fpage>  
        <lpage>67</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.bjps.2017.04.005</pub-id>
        <pub-id pub-id-type="medline">28595842</pub-id>
        <pub-id pub-id-type="pii">S1748-6815(17)30158-4</pub-id></nlm-citation>
      </ref>
      <ref id="ref74">
        <label>74</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Klumb</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Dubois-Ferriere</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Roduit</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Barea</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Strgar</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Ahmed</surname>
            <given-names>K</given-names>
          </name>
        </person-group>
        <article-title>CARS 2017-Computer Assisted Radiology and Surgery Proceedings of the 31st International Congress and Exhibition Barcelona, Spain, June 20-24, 2017</article-title>
        <source>Int J Comput Assist Radiol Surg</source>  
        <year>2017</year>  
        <month>06</month>  
        <volume>12</volume>  
        <issue>Suppl 1</issue>  
        <fpage>1</fpage>  
        <lpage>286</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1007/s11548-017-1588-3"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1007/s11548-017-1588-3</pub-id>
        <pub-id pub-id-type="medline">28527024</pub-id>
        <pub-id pub-id-type="pii">10.1007/s11548-017-1588-3</pub-id></nlm-citation>
      </ref>
      <ref id="ref75">
        <label>75</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Pauly</surname>
            <given-names>O</given-names>
          </name>
          <name name-style="western">
            <surname>Diotte</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Fallavollita</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Weidert</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Euler</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Navab</surname>
            <given-names>N</given-names>
          </name>
        </person-group>
        <article-title>Machine learning-based augmented reality for improved surgical scene understanding</article-title>
        <source>Comput Med Imaging Graph</source>  
        <year>2015</year>  
        <month>04</month>  
        <volume>41</volume>  
        <fpage>55</fpage>  
        <lpage>60</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.compmedimag.2014.06.007</pub-id>
        <pub-id pub-id-type="medline">24998759</pub-id>
        <pub-id pub-id-type="pii">S0895-6111(14)00100-1</pub-id></nlm-citation>
      </ref>
      <ref id="ref76">
        <label>76</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Jacob</surname>
            <given-names>MG</given-names>
          </name>
          <name name-style="western">
            <surname>Wachs</surname>
            <given-names>JP</given-names>
          </name>
        </person-group>
        <article-title>Context-based hand gesture recognition for the operating room</article-title>
        <source>Pattern Recognit Lett</source>  
        <year>2014</year>  
        <month>01</month>
        <volume>36</volume>  
        <fpage>196</fpage>  
        <lpage>203</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.patrec.2013.05.024</pub-id></nlm-citation>
      </ref>
      <ref id="ref77">
        <label>77</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Hughes</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Nestorov</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Healy</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Sheehy</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>O'Hare</surname>
            <given-names>N</given-names>
          </name>
        </person-group>
        <article-title>Comparing the utility and usability of the Microsoft Kinect and Leap Motion sensor devices in the context of their application for gesture control of biomedical images</article-title>
        <year>2015</year>  
        <conf-name>ECR 2015</conf-name>
        <conf-date>March 4–8, 2015</conf-date>
        <conf-loc>Vienna</conf-loc>
        <pub-id pub-id-type="doi">10.1594/ecr2015/B-1192</pub-id></nlm-citation>
      </ref>
      <ref id="ref78">
        <label>78</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>O’Hara</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Gonzalez</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Penney</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Sellen</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Corish</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Mentis</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Varnavas</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Criminisi</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Rouncefield</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Dastur</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Carrell</surname>
            <given-names>T</given-names>
          </name>
        </person-group>
        <article-title>Interactional order and constructed ways of seeing with touchless imaging systems in surgery</article-title>
        <source>Comput Supported Coop Work</source>  
        <year>2014</year>  
        <month>05</month>  
        <day>7</day>  
        <volume>23</volume>  
        <issue>3</issue>  
        <fpage>299</fpage>  
        <lpage>337</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1007/s10606-014-9203-4"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1007/s10606-014-9203-4</pub-id></nlm-citation>
      </ref>
      <ref id="ref79">
        <label>79</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Kirmizibayrak</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Radeva</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Wakid</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Philbeck</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Sibert</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Hahn</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>Evaluation of gesture based interfaces for medical volume visualization tasks</article-title>
        <source>Int J Virtual Real</source>  
        <year>2012</year>  
        <fpage>1</fpage>  
        <lpage>13</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://ro.uow.edu.au/sspapers/1105/"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1145/2087756.2087764</pub-id></nlm-citation>
      </ref>
      <ref id="ref80">
        <label>80</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Wipfli</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Dubois-Ferrière</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Budry</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Hoffmeyer</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Lovis</surname>
            <given-names>C</given-names>
          </name>
        </person-group>
        <article-title>Gesture-controlled image management for operating room: a randomized crossover study to compare interaction using gestures, mouse, and third person relaying</article-title>
        <source>PLoS One</source>  
        <year>2016</year>  
        <volume>11</volume>  
        <issue>4</issue>  
        <fpage>e0153596</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://dx.plos.org/10.1371/journal.pone.0153596"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1371/journal.pone.0153596</pub-id>
        <pub-id pub-id-type="medline">27082758</pub-id>
        <pub-id pub-id-type="pii">PONE-D-15-41785</pub-id>
        <pub-id pub-id-type="pmcid">PMC4833285</pub-id></nlm-citation>
      </ref>
      <ref id="ref81">
        <label>81</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ogura</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Sato</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Ishida</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Hayashi</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Doi</surname>
            <given-names>K</given-names>
          </name>
        </person-group>
        <article-title>Development of a novel method for manipulation of angiographic images by use of a motion sensor in operating rooms</article-title>
        <source>Radiol Phys Technol</source>  
        <year>2014</year>  
        <month>07</month>  
        <volume>7</volume>  
        <issue>2</issue>  
        <fpage>228</fpage>  
        <lpage>34</lpage>  
        <pub-id pub-id-type="doi">10.1007/s12194-014-0259-0</pub-id>
        <pub-id pub-id-type="medline">24609904</pub-id></nlm-citation>
      </ref>
      <ref id="ref82">
        <label>82</label>
        <nlm-citation citation-type="book">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Wachs</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Stern</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Edan</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Gillam</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Feied</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Smith</surname>
            <given-names>M</given-names>
          </name>
        </person-group>
        <person-group person-group-type="editor">
          <name name-style="western">
            <surname>Tiwari</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Roy</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Knowles</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Avineri</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Dahal</surname>
            <given-names>K</given-names>
          </name>
        </person-group>
        <article-title>A Real-Time Hand Gesture Interface for Medical Visualization Applications</article-title>
        <source>Applications of Soft Computing, Volume 36 of Advances in Intelligent and Soft Computing</source>  
        <year>2006</year>  
        <publisher-loc>Berlin Heidelberg</publisher-loc>
        <publisher-name>Springer</publisher-name>
        <fpage>153</fpage> </nlm-citation>
      </ref>
      <ref id="ref83">
        <label>83</label>
        <nlm-citation citation-type="book">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Jacob</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Cange</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Packer</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Wachs</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <person-group person-group-type="editor">
          <name name-style="western">
            <surname>Alvarez</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Mejail</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Gomez</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Jacobo</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>Intention, context and gesture recognition for sterile MRI navigation in the operating room</article-title>
        <source>Progress in Pattern Recognition, Image Analysis, Computer Vision, and Applications, Volume 7441 of Lecture Notes in Computer Science</source>  
        <year>2012</year>  
        <publisher-loc>Berlin Heidelberg</publisher-loc>
        <publisher-name>Springer</publisher-name>
        <fpage>220</fpage>  
        <lpage>7</lpage> </nlm-citation>
      </ref>
      <ref id="ref84">
        <label>84</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Frame</surname>
            <given-names>M</given-names>
          </name>
        </person-group>
        <article-title>A novel system for hands free manipulation of digital X-rays in a sterile environment using consumer electronics and software</article-title>
        <source>Int J Comput Assist Radiol Surg</source>  
        <year>2012</year>  
        <volume>7</volume>  
        <issue>Supplement 1</issue>  
        <fpage>S208</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://www.caos-international.org/caos-2012/abstracts/024.pdf"/>
        </comment> </nlm-citation>
      </ref>
      <ref id="ref85">
        <label>85</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ebert</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Hatch</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Thali</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Ross</surname>
            <given-names>S</given-names>
          </name>
        </person-group>
        <article-title>Invisible touch—Control of a DICOM viewer with finger gestures using the Kinect depth camera</article-title>
        <source>J Forensic Radiol Imaging</source>  
        <year>2013</year>  
        <month>01</month>  
        <volume>1</volume>  
        <issue>1</issue>  
        <fpage>10</fpage>  
        <lpage>4</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1016/j.jofri.2012.11.006"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1016/j.jofri.2012.11.006</pub-id></nlm-citation>
      </ref>
      <ref id="ref86">
        <label>86</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ogura</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Sato</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Ishida</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Hayashi</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Doi</surname>
            <given-names>K</given-names>
          </name>
        </person-group>
        <article-title>Development of a novel method for manipulation of angiographic images by use of a motion sensor in operating rooms</article-title>
        <source>Radiol Phys Technol</source>  
        <year>2014</year>  
        <month>07</month>  
        <volume>7</volume>  
        <issue>2</issue>  
        <fpage>228</fpage>  
        <lpage>34</lpage>  
        <pub-id pub-id-type="doi">10.1007/s12194-014-0259-0</pub-id>
        <pub-id pub-id-type="medline">24609904</pub-id></nlm-citation>
      </ref>
      <ref id="ref87">
        <label>87</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ebert</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Flach</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Thali</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Ross</surname>
            <given-names>S</given-names>
          </name>
        </person-group>
        <article-title>Out of touch – a plugin for controlling OsiriX with gestures using the leap controller</article-title>
        <source>J Forensic Radiol Imaging</source>  
        <year>2014</year>  
        <month>07</month>  
        <volume>2</volume>  
        <issue>3</issue>  
        <fpage>126</fpage>  
        <lpage>8</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1016/j.jofri.2014.05.006"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1016/j.jofri.2014.05.006</pub-id></nlm-citation>
      </ref>
      <ref id="ref88">
        <label>88</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Rossol</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Cheng</surname>
            <given-names>I</given-names>
          </name>
          <name name-style="western">
            <surname>Shen</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Basu</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Touchfree medical interfaces</article-title>
        <source>Conf Proc IEEE Eng Med Biol Soc</source>  
        <year>2014</year>  
        <volume>2014</volume>  
        <fpage>6597</fpage>  
        <lpage>600</lpage>  
        <pub-id pub-id-type="doi">10.1109/EMBC.2014.6945140</pub-id>
        <pub-id pub-id-type="medline">25571508</pub-id></nlm-citation>
      </ref>
      <ref id="ref89">
        <label>89</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Iannessi</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Marcy</surname>
            <given-names>PY</given-names>
          </name>
          <name name-style="western">
            <surname>Clatz</surname>
            <given-names>O</given-names>
          </name>
          <name name-style="western">
            <surname>Ayache</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Fillard</surname>
            <given-names>P</given-names>
          </name>
        </person-group>
        <article-title>Touchless user interface for intraoperative image control: almost there</article-title>
        <source>Radiographics</source>  
        <year>2014</year>  
        <volume>34</volume>  
        <issue>4</issue>  
        <fpage>1142</fpage>  
        <lpage>4</lpage>  
        <pub-id pub-id-type="doi">10.1148/rg.344135158</pub-id>
        <pub-id pub-id-type="medline">25019447</pub-id></nlm-citation>
      </ref>
      <ref id="ref90">
        <label>90</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Widmer</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Schaer</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Markonis</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Müller</surname>
            <given-names>H</given-names>
          </name>
        </person-group>
        <article-title>Gesture interaction for content--based medical image retrieval</article-title>
        <year>2014</year>  
        <conf-name>ICMR'14</conf-name>
        <conf-date>2014</conf-date>
        <conf-loc>Glasgow, United Kingdom</conf-loc>
        <publisher-loc>ACM New York, NY, USA</publisher-loc>
        <publisher-name>ACM International Conference on Multimedia Retrieval. Glasgow</publisher-name>
        <pub-id pub-id-type="doi">10.1145/2578726.2578804</pub-id></nlm-citation>
      </ref>
      <ref id="ref91">
        <label>91</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ogura</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Sato</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Kadowaki</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Yasumoto</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Okajima</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Tsutsumi</surname>
            <given-names>S</given-names>
          </name>
        </person-group>
        <article-title>Development of a new method for manipulation of dental images using a motion sensor in dentistry</article-title>
        <year>2015</year>  
        <conf-name>ECR 2015</conf-name>
        <conf-date>March 4-8, 2015</conf-date>
        <conf-loc>Vienna, Austria</conf-loc>
        <pub-id pub-id-type="doi">10.1594/ecr2015/C-0251</pub-id></nlm-citation>
      </ref>
      <ref id="ref92">
        <label>92</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ogura</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Sato</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Ishida</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Hayashi</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Doi</surname>
            <given-names>K</given-names>
          </name>
        </person-group>
        <article-title>Development of a novel method for manipulation of angiographic images by use of a motion sensor in operating rooms</article-title>
        <source>Radiol Phys Technol</source>  
        <year>2014</year>  
        <month>07</month>  
        <volume>7</volume>  
        <issue>2</issue>  
        <fpage>228</fpage>  
        <lpage>34</lpage>  
        <pub-id pub-id-type="doi">10.1007/s12194-014-0259-0</pub-id>
        <pub-id pub-id-type="medline">24609904</pub-id></nlm-citation>
      </ref>
      <ref id="ref93">
        <label>93</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Mewes</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Saalfeld</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Riabikin</surname>
            <given-names>O</given-names>
          </name>
          <name name-style="western">
            <surname>Skalej</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Hansen</surname>
            <given-names>C</given-names>
          </name>
        </person-group>
        <article-title>A gesture-controlled projection display for CT-guided interventions</article-title>
        <source>Int J Comput Assist Radiol Surg</source>  
        <year>2016</year>  
        <month>01</month>  
        <volume>11</volume>  
        <issue>1</issue>  
        <fpage>157</fpage>  
        <lpage>64</lpage>  
        <pub-id pub-id-type="doi">10.1007/s11548-015-1215-0</pub-id>
        <pub-id pub-id-type="medline">25958060</pub-id>
        <pub-id pub-id-type="pii">10.1007/s11548-015-1215-0</pub-id></nlm-citation>
      </ref>
      <ref id="ref94">
        <label>94</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Nainggolan</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Siregar</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Fahmi</surname>
            <given-names>F</given-names>
          </name>
        </person-group>
        <article-title>Anatomy learning system on human skeleton using Leap Motion Controller</article-title>
        <year>2016</year>  
        <month>08</month>  
        <day>15</day>  
        <conf-name>2016 3rd International Conference on Computer and Information Sciences (ICCOINS)</conf-name>
        <conf-date>August 15-17, 2016</conf-date>
        <conf-loc>Kuala Lumpur, Malaysia</conf-loc>
        <publisher-name>IEEE</publisher-name>
        <fpage>2016</fpage>  
        <lpage>3</lpage>  
        <pub-id pub-id-type="doi">10.1109/ICCOINS.2016.7783260</pub-id></nlm-citation>
      </ref>
      <ref id="ref95">
        <label>95</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Virag</surname>
            <given-names>I</given-names>
          </name>
          <name name-style="western">
            <surname>Stoicu-Tivadar</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Crisan-Vida</surname>
            <given-names>M</given-names>
          </name>
        </person-group>
        <article-title>Gesture-based interaction in medical interfaces</article-title>
        <year>2016</year>  
        <month>07</month>  
        <day>11</day>  
        <conf-name>2016 IEEE 11th International Symposium on Applied Computational Intelligence and Informatics (SACI)</conf-name>
        <conf-date>May 12-14, 2016</conf-date>
        <conf-loc>Timisoara, Romania</conf-loc>
        <pub-id pub-id-type="doi">10.1109/SACI.2016.7507339</pub-id></nlm-citation>
      </ref>
      <ref id="ref96">
        <label>96</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Juhnke</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Berron</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Philip</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Williams</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Holub</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Winer</surname>
            <given-names>E</given-names>
          </name>
        </person-group>
        <article-title>Comparing the microsoft kinect to a traditional mouse for adjusting the viewed tissue densities of three-dimensional anatomical structures</article-title>
        <year>2013</year>  
        <conf-name>Medical Imaging 2013: Image Perception, Observer Performance, and Technology Assessment</conf-name>
        <conf-date>2013</conf-date>
        <conf-loc>Baltimore, Maryland, USA</conf-loc>
        <fpage>86731</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1117/12.2006994"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1117/12.2006994</pub-id></nlm-citation>
      </ref>
      <ref id="ref97">
        <label>97</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Pulijala</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Ma</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Pears</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Peebles</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Ayoub</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>An innovative virtual reality training tool for orthognathic surgery</article-title>
        <source>Int J Oral Maxillofac Surg</source>  
        <year>2018</year>  
        <month>09</month>  
        <volume>47</volume>  
        <issue>9</issue>  
        <fpage>1199</fpage>  
        <lpage>205</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.ijom.2018.01.005</pub-id>
        <pub-id pub-id-type="medline">29398172</pub-id>
        <pub-id pub-id-type="pii">S0901-5027(18)30005-5</pub-id></nlm-citation>
      </ref>
      <ref id="ref98">
        <label>98</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Pulijala</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Ma</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Pears</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Peebles</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Ayoub</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Effectiveness of immersive virtual reality in surgical training-a randomized control trial</article-title>
        <source>J Oral Maxillofac Surg</source>  
        <year>2018</year>  
        <month>05</month>  
        <volume>76</volume>  
        <issue>5</issue>  
        <fpage>1065</fpage>  
        <lpage>72</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.joms.2017.10.002</pub-id>
        <pub-id pub-id-type="medline">29104028</pub-id>
        <pub-id pub-id-type="pii">S0278-2391(17)31250-8</pub-id></nlm-citation>
      </ref>
      <ref id="ref99">
        <label>99</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Placitelli</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Gallo</surname>
            <given-names>L</given-names>
          </name>
        </person-group>
        <article-title>3D point cloud sensors for low-cost medical in-situ visualization</article-title>
        <year>2011</year>  
        <conf-name>2011 IEEE International Conference on Bioinformatics and Biomedicine Workshops (BIBMW)</conf-name>
        <conf-date>November 12-15, 2011</conf-date>
        <conf-loc>Atlanta, GA, USA</conf-loc>
        <publisher-loc>USA</publisher-loc>
        <publisher-name>IEEE</publisher-name>
        <fpage>596</fpage>  
        <pub-id pub-id-type="doi">10.1109/BIBMW.2011.6112435</pub-id></nlm-citation>
      </ref>
      <ref id="ref100">
        <label>100</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Samosky</surname>
            <given-names>JT</given-names>
          </name>
          <name name-style="western">
            <surname>Wang</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Nelson</surname>
            <given-names>DA</given-names>
          </name>
          <name name-style="western">
            <surname>Bregman</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Hosmer</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Weaver</surname>
            <given-names>RA</given-names>
          </name>
        </person-group>
        <article-title>BodyWindows: enhancing a mannequin with projective augmented reality for exploring anatomy, physiology and medical procedures</article-title>
        <source>Stud Health Technol Inform</source>  
        <year>2012</year>  
        <volume>173</volume>  
        <fpage>433</fpage>  
        <lpage>9</lpage>  
        <pub-id pub-id-type="medline">22357032</pub-id></nlm-citation>
      </ref>
      <ref id="ref101">
        <label>101</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Blum</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Kleeberger</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Bichlmeier</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Navab</surname>
            <given-names>N</given-names>
          </name>
        </person-group>
        <article-title>Mirracle: an augmented reality magic mirror system for anatomy education</article-title>
        <year>2012</year>  
        <conf-name>2012 IEEE Virtual Reality Workshops (VRW)</conf-name>
        <conf-date>March 4-8, 2012</conf-date>
        <conf-loc>Costa Mesa, CA, USA</conf-loc>
        <fpage>433</fpage>  
        <lpage>9</lpage>  
        <pub-id pub-id-type="doi">10.1109/VR.2012.6180909</pub-id></nlm-citation>
      </ref>
      <ref id="ref102">
        <label>102</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Dargar</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Nunno</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Sankaranarayanan</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>De</surname>
            <given-names>S</given-names>
          </name>
        </person-group>
        <article-title>Microsoft Kinect based head tracking for Life Size Collaborative Surgical Simulation Environments (LS-CollaSSLE)</article-title>
        <source>Stud Health Technol Inform</source>  
        <year>2013</year>  
        <volume>184</volume>  
        <fpage>109</fpage>  
        <lpage>13</lpage>  
        <pub-id pub-id-type="doi">10.3233/978-1-61499-209-7-109</pub-id>
        <pub-id pub-id-type="medline">23400140</pub-id></nlm-citation>
      </ref>
      <ref id="ref103">
        <label>103</label>
        <nlm-citation citation-type="web">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Juhnke</surname>
            <given-names>B</given-names>
          </name>
        </person-group>
        <source>Iowa State University</source>  
        <year>2013</year>  
        <comment>Evaluating the Microsoft Kinect compared to the mouse as an effective interaction device for medical imaging manipulations 
        <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://lib.dr.iastate.edu/cgi/viewcontent.cgi?article=4362&amp;context=etd">https://lib.dr.iastate.edu/cgi/viewcontent.cgi?article=4362&amp;context=etd</ext-link></comment> </nlm-citation>
      </ref>
      <ref id="ref104">
        <label>104</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Guo</surname>
            <given-names>X</given-names>
          </name>
          <name name-style="western">
            <surname>Lopez</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Yu</surname>
            <given-names>Z</given-names>
          </name>
          <name name-style="western">
            <surname>Steiner</surname>
            <given-names>KV</given-names>
          </name>
          <name name-style="western">
            <surname>Barner</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Bauer</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Yu</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>A portable immersive surgery training system using RGB-D sensors</article-title>
        <source>Stud Health Technol Inform</source>  
        <year>2013</year>  
        <volume>184</volume>  
        <fpage>161</fpage>  
        <lpage>7</lpage>  
        <pub-id pub-id-type="doi">10.3233/978-1-61499-209-7-161</pub-id>
        <pub-id pub-id-type="medline">23400150</pub-id></nlm-citation>
      </ref>
      <ref id="ref105">
        <label>105</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Yang</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Guo</surname>
            <given-names>X</given-names>
          </name>
          <name name-style="western">
            <surname>Yu</surname>
            <given-names>Z</given-names>
          </name>
          <name name-style="western">
            <surname>Steiner</surname>
            <given-names>KV</given-names>
          </name>
          <name name-style="western">
            <surname>Barner</surname>
            <given-names>KE</given-names>
          </name>
          <name name-style="western">
            <surname>Bauer</surname>
            <given-names>TL</given-names>
          </name>
          <name name-style="western">
            <surname>Yu</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>An immersive surgery training system with live streaming capability</article-title>
        <source>Stud Health Technol Inform</source>  
        <year>2014</year>  
        <volume>196</volume>  
        <fpage>479</fpage>  
        <lpage>85</lpage>  
        <pub-id pub-id-type="doi">10.3233/978-1-61499-375-9-479</pub-id>
        <pub-id pub-id-type="medline">24732560</pub-id></nlm-citation>
      </ref>
      <ref id="ref106">
        <label>106</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Hochman</surname>
            <given-names>JB</given-names>
          </name>
          <name name-style="western">
            <surname>Unger</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Kraut</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Pisa</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Hombach-Klonisch</surname>
            <given-names>S</given-names>
          </name>
        </person-group>
        <article-title>Gesture-controlled interactive three dimensional anatomy: a novel teaching tool in head and neck surgery</article-title>
        <source>J Otolaryngol Head Neck Surg</source>  
        <year>2014</year>  
        <volume>43</volume>  
        <fpage>38</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://journalotohns.biomedcentral.com/articles/10.1186/s40463-014-0038-2"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1186/s40463-014-0038-2</pub-id>
        <pub-id pub-id-type="medline">25286966</pub-id>
        <pub-id pub-id-type="pii">s40463-014-0038-2</pub-id>
        <pub-id pub-id-type="pmcid">PMC4193987</pub-id></nlm-citation>
      </ref>
      <ref id="ref107">
        <label>107</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Kocev</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Ritter</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Linsen</surname>
            <given-names>L</given-names>
          </name>
        </person-group>
        <article-title>Projector-based surgeon-computer interaction on deformable surfaces</article-title>
        <source>Int J Comput Assist Radiol Surg</source>  
        <year>2014</year>  
        <month>03</month>  
        <volume>9</volume>  
        <issue>2</issue>  
        <fpage>301</fpage>  
        <lpage>12</lpage>  
        <pub-id pub-id-type="doi">10.1007/s11548-013-0928-1</pub-id>
        <pub-id pub-id-type="medline">23888316</pub-id></nlm-citation>
      </ref>
      <ref id="ref108">
        <label>108</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Alvarez-Lopez</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Maina</surname>
            <given-names>MF</given-names>
          </name>
          <name name-style="western">
            <surname>Saigí-Rubió</surname>
            <given-names>F</given-names>
          </name>
        </person-group>
        <article-title>Natural user interfaces: is it a solution to accomplish ubiquitous training in minimally invasive surgery?</article-title>
        <source>Surg Innov</source>  
        <year>2016</year>  
        <month>08</month>  
        <volume>23</volume>  
        <issue>4</issue>  
        <fpage>429</fpage>  
        <lpage>30</lpage>  
        <pub-id pub-id-type="doi">10.1177/1553350616639145</pub-id>
        <pub-id pub-id-type="medline">27009688</pub-id>
        <pub-id pub-id-type="pii">1553350616639145</pub-id></nlm-citation>
      </ref>
      <ref id="ref109">
        <label>109</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Juanes</surname>
            <given-names>JA</given-names>
          </name>
          <name name-style="western">
            <surname>Gómez</surname>
            <given-names>JJ</given-names>
          </name>
          <name name-style="western">
            <surname>Peguero</surname>
            <given-names>PD</given-names>
          </name>
          <name name-style="western">
            <surname>Ruisoto</surname>
            <given-names>P</given-names>
          </name>
        </person-group>
        <article-title>Digital environment for movement control in surgical skill training</article-title>
        <source>J Med Syst</source>  
        <year>2016</year>  
        <month>06</month>  
        <volume>40</volume>  
        <issue>6</issue>  
        <fpage>133</fpage>  
        <pub-id pub-id-type="doi">10.1007/s10916-016-0495-4</pub-id>
        <pub-id pub-id-type="medline">27091754</pub-id>
        <pub-id pub-id-type="pii">10.1007/s10916-016-0495-4</pub-id></nlm-citation>
      </ref>
      <ref id="ref110">
        <label>110</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Svendsen</surname>
            <given-names>MB</given-names>
          </name>
          <name name-style="western">
            <surname>Preisler</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Hillingsoe</surname>
            <given-names>JG</given-names>
          </name>
          <name name-style="western">
            <surname>Svendsen</surname>
            <given-names>LB</given-names>
          </name>
          <name name-style="western">
            <surname>Konge</surname>
            <given-names>L</given-names>
          </name>
        </person-group>
        <article-title>Using motion capture to assess colonoscopy experience level</article-title>
        <source>World J Gastrointest Endosc</source>  
        <year>2014</year>  
        <month>05</month>  
        <day>16</day>  
        <volume>6</volume>  
        <issue>5</issue>  
        <fpage>193</fpage>  
        <lpage>9</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://www.wjgnet.com/1948-5190/full/v6/i5/193.htm"/>
        </comment>  
        <pub-id pub-id-type="doi">10.4253/wjge.v6.i5.193</pub-id>
        <pub-id pub-id-type="medline">24891932</pub-id>
        <pub-id pub-id-type="pmcid">PMC4024492</pub-id></nlm-citation>
      </ref>
      <ref id="ref111">
        <label>111</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Colella</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Svendsen</surname>
            <given-names>MB</given-names>
          </name>
          <name name-style="western">
            <surname>Konge</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Svendsen</surname>
            <given-names>LB</given-names>
          </name>
          <name name-style="western">
            <surname>Sivapalan</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Clementsen</surname>
            <given-names>P</given-names>
          </name>
        </person-group>
        <article-title>Assessment of competence in simulated flexible bronchoscopy using motion analysis</article-title>
        <source>Respiration</source>  
        <year>2015</year>  
        <volume>89</volume>  
        <issue>2</issue>  
        <fpage>155</fpage>  
        <lpage>61</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://www.karger.com?DOI=10.1159/000369471"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1159/000369471</pub-id>
        <pub-id pub-id-type="medline">25591730</pub-id>
        <pub-id pub-id-type="pii">000369471</pub-id></nlm-citation>
      </ref>
      <ref id="ref112">
        <label>112</label>
        <nlm-citation citation-type="web">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Coles</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Cao</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Dumas</surname>
            <given-names>C</given-names>
          </name>
        </person-group>
        <source>SAGES</source>  
        <year>2014</year>  
        <access-date>2019-04-02</access-date>
        <comment>ETrack: An affordable Ergonomic assessment tool for surgical settings 
        <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://www.sages.org/meetings/annual-meeting/abstracts-archive/etrack-an-affordable-ergonomic-assessment-tool-for-surgical-settings/">http://www.sages.org/meetings/annual-meeting/abstracts-archive/etrack-an-affordable-ergonomic-assessment-tool-for-surgical-settings/</ext-link>
        <ext-link ext-link-type="webcite" xlink:href="77KXM9p9R"/></comment> </nlm-citation>
      </ref>
      <ref id="ref113">
        <label>113</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Kim</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Kim</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Selle</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Shademan</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Krieger</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Experimental evaluation of contact-less hand tracking systems for tele-operation of surgical tasks</article-title>
        <year>2014</year>  
        <month>05</month>  
        <day>31</day>  
        <conf-name>2014 IEEE International Conference on Robotics and Automation (ICRA)</conf-name>
        <conf-date>May 31-June 7, 2014</conf-date>
        <conf-loc>Hong Kong, China</conf-loc>
        <fpage>2014</fpage>  
        <pub-id pub-id-type="doi">10.1109/ICRA.2014.6907364</pub-id></nlm-citation>
      </ref>
      <ref id="ref114">
        <label>114</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Beyl</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Schreiter</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Nicolai</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Raczkowsky</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Wörn</surname>
            <given-names>H</given-names>
          </name>
        </person-group>
        <article-title>3D perception technologies for surgical operating theatres</article-title>
        <source>Stud Health Technol Inform</source>  
        <year>2016</year>  
        <volume>220</volume>  
        <fpage>45</fpage>  
        <lpage>50</lpage>  
        <pub-id pub-id-type="medline">27046552</pub-id></nlm-citation>
      </ref>
      <ref id="ref115">
        <label>115</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Jacob</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Li</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Akingba</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Wachs</surname>
            <given-names>JP</given-names>
          </name>
        </person-group>
        <article-title>Gestonurse: a robotic surgical nurse for handling surgical instruments in the operating room</article-title>
        <source>J Robot Surg</source>  
        <year>2012</year>  
        <month>03</month>  
        <volume>6</volume>  
        <issue>1</issue>  
        <fpage>53</fpage>  
        <lpage>63</lpage>  
        <pub-id pub-id-type="doi">10.1007/s11701-011-0325-0</pub-id>
        <pub-id pub-id-type="medline">27637980</pub-id>
        <pub-id pub-id-type="pii">10.1007/s11701-011-0325-0</pub-id></nlm-citation>
      </ref>
      <ref id="ref116">
        <label>116</label>
        <nlm-citation citation-type="book">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Despinoy</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Sánchez</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Zemiti</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Jannin</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Poignet</surname>
            <given-names>P</given-names>
          </name>
        </person-group>
        <person-group person-group-type="editor">
          <name name-style="western">
            <surname>Stoyanov</surname>
            <given-names>D</given-names>
          </name>
        </person-group>
        <article-title>Comparative assessment of a novel optical human-machine interface for laparoscopic telesurgery</article-title>
        <source>Information Processing in Computer-Assisted Interventions</source>  
        <year>2014</year>  
        <publisher-loc>Cham</publisher-loc>
        <publisher-name>Springer</publisher-name>
        <fpage>21</fpage> </nlm-citation>
      </ref>
      <ref id="ref117">
        <label>117</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Vargas</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Vivas</surname>
            <given-names>O</given-names>
          </name>
        </person-group>
        <article-title>Gesture recognition system for surgical robot's manipulation</article-title>
        <year>2014</year>  
        <conf-name>2014 XIX Symposium on Image, Signal Processing and Artificial Vision</conf-name>
        <conf-date>September 17-19, 2014</conf-date>
        <conf-loc>Armenia, Colombia</conf-loc>
        <pub-id pub-id-type="doi">10.1109/STSIVA.2014.7010172</pub-id></nlm-citation>
      </ref>
      <ref id="ref118">
        <label>118</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Travaglini</surname>
            <given-names>TA</given-names>
          </name>
          <name name-style="western">
            <surname>Swaney</surname>
            <given-names>PJ</given-names>
          </name>
          <name name-style="western">
            <surname>Weaver</surname>
            <given-names>KD</given-names>
          </name>
          <name name-style="western">
            <surname>Webster</surname>
            <given-names>RJ</given-names>
          </name>
        </person-group>
        <article-title>Initial experiments with the leap motion as a user interface in robotic endonasal surgery</article-title>
        <source>Robot Mechatron (2015)</source>  
        <year>2016</year>  
        <volume>37</volume>  
        <fpage>171</fpage>  
        <lpage>9</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/26752501"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1007/978-3-319-22368-1_17</pub-id>
        <pub-id pub-id-type="medline">26752501</pub-id>
        <pub-id pub-id-type="pmcid">PMC4703412</pub-id></nlm-citation>
      </ref>
      <ref id="ref119">
        <label>119</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Despinoy</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Zemiti</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Forestier</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Sánchez</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Jannin</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Poignet</surname>
            <given-names>P</given-names>
          </name>
        </person-group>
        <article-title>Evaluation of contactless human-machine interface for robotic surgical training</article-title>
        <source>Int J Comput Assist Radiol Surg</source>  
        <year>2018</year>  
        <month>01</month>  
        <volume>13</volume>  
        <issue>1</issue>  
        <fpage>13</fpage>  
        <lpage>24</lpage>  
        <pub-id pub-id-type="doi">10.1007/s11548-017-1666-6</pub-id>
        <pub-id pub-id-type="medline">28914409</pub-id>
        <pub-id pub-id-type="pii">10.1007/s11548-017-1666-6</pub-id></nlm-citation>
      </ref>
      <ref id="ref120">
        <label>120</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Lahanas</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Loukas</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Georgiou</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Lababidi</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Al-Jaroudi</surname>
            <given-names>D</given-names>
          </name>
        </person-group>
        <article-title>Virtual reality-based assessment of basic laparoscopic skills using the Leap Motion controller</article-title>
        <source>Surg Endosc</source>  
        <year>2017</year>  
        <month>12</month>  
        <volume>31</volume>  
        <issue>12</issue>  
        <fpage>5012</fpage>  
        <lpage>23</lpage>  
        <pub-id pub-id-type="doi">10.1007/s00464-017-5503-3</pub-id>
        <pub-id pub-id-type="medline">28466361</pub-id>
        <pub-id pub-id-type="pii">10.1007/s00464-017-5503-3</pub-id></nlm-citation>
      </ref>
      <ref id="ref121">
        <label>121</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Kowalewski</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Hendrie</surname>
            <given-names>JD</given-names>
          </name>
          <name name-style="western">
            <surname>Schmidt</surname>
            <given-names>MW</given-names>
          </name>
          <name name-style="western">
            <surname>Garrow</surname>
            <given-names>CR</given-names>
          </name>
          <name name-style="western">
            <surname>Bruckner</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Proctor</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Paul</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Adigüzel</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Bodenstedt</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Erben</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Kenngott</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Erben</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Speidel</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Müller-Stich</surname>
            <given-names>BP</given-names>
          </name>
          <name name-style="western">
            <surname>Nickel</surname>
            <given-names>F</given-names>
          </name>
        </person-group>
        <article-title>Development and validation of a sensor- and expert model-based training system for laparoscopic surgery: the iSurgeon</article-title>
        <source>Surg Endosc</source>  
        <year>2017</year>  
        <month>12</month>  
        <volume>31</volume>  
        <issue>5</issue>  
        <fpage>2155</fpage>  
        <lpage>65</lpage>  
        <pub-id pub-id-type="doi">10.1007/s00464-016-5213-2</pub-id>
        <pub-id pub-id-type="medline">27604368</pub-id>
        <pub-id pub-id-type="pii">10.1007/s00464-016-5213-2</pub-id></nlm-citation>
      </ref>
      <ref id="ref122">
        <label>122</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Pérez</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Sossa</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Martínez</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Lorias</surname>
            <given-names>D</given-names>
          </name>
        </person-group>
        <article-title>Video-based tracking of laparoscopic instruments using an orthogonal webcams system</article-title>
        <source>Acad Sci Eng Technol Int J</source>  
        <year>2013</year>  
        <volume>7</volume>  
        <issue>8</issue>  
        <fpage>440</fpage>  
        <lpage>3</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://waset.org/publications/16063/video-based-tracking-of-laparoscopic-instruments-using-an-orthogonal-webcams-system"/>
        </comment>  
        <pub-id pub-id-type="doi">10.5281/zenodo.1086517</pub-id></nlm-citation>
      </ref>
      <ref id="ref123">
        <label>123</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Oropesa</surname>
            <given-names>I</given-names>
          </name>
          <name name-style="western">
            <surname>de Jong</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Sánchez-González</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Dankelman</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Gómez</surname>
            <given-names>E</given-names>
          </name>
        </person-group>
        <article-title>Feasibility of tracking laparoscopic instruments in a box trainer using a Leap Motion Controller</article-title>
        <source>Measurement</source>  
        <year>2016</year>  
        <month>02</month>  
        <volume>80</volume>  
        <fpage>115</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1016/j.measurement.2015.11.018"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1016/j.measurement.2015.11.018</pub-id></nlm-citation>
      </ref>
      <ref id="ref124">
        <label>124</label>
        <nlm-citation citation-type="web">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Beck</surname>
            <given-names>P</given-names>
          </name>
        </person-group>
        <source>Free Patents Online</source>  
        <year>2016</year>  
        <access-date>2019-04-02</access-date>
        <comment>Accurate Three-dimensional Instrument Positioning 
        <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://www.freepatentsonline.com/20160354152.pdf">http://www.freepatentsonline.com/20160354152.pdf</ext-link>
        <ext-link ext-link-type="webcite" xlink:href="77KST95Tx"/></comment> </nlm-citation>
      </ref>
      <ref id="ref125">
        <label>125</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Owlia</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Khabbazan</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Mirbagheri</surname>
            <given-names>MM</given-names>
          </name>
          <name name-style="western">
            <surname>Mirbagheri</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Real-time tracking of laparoscopic instruments using kinect for training in virtual reality</article-title>
        <source>Conf Proc IEEE Eng Med Biol Soc</source>  
        <year>2016</year>  
        <month>12</month>  
        <volume>2016</volume>  
        <fpage>3945</fpage>  
        <lpage>8</lpage>  
        <pub-id pub-id-type="doi">10.1109/EMBC.2016.7591590</pub-id>
        <pub-id pub-id-type="medline">28269148</pub-id></nlm-citation>
      </ref>
      <ref id="ref126">
        <label>126</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Partridge</surname>
            <given-names>RW</given-names>
          </name>
          <name name-style="western">
            <surname>Brown</surname>
            <given-names>FS</given-names>
          </name>
          <name name-style="western">
            <surname>Brennan</surname>
            <given-names>PM</given-names>
          </name>
          <name name-style="western">
            <surname>Hennessey</surname>
            <given-names>IA</given-names>
          </name>
          <name name-style="western">
            <surname>Hughes</surname>
            <given-names>MA</given-names>
          </name>
        </person-group>
        <article-title>The LEAPTM gesture interface device and take-home laparoscopic simulators: a study of construct and concurrent validity</article-title>
        <source>Surg Innov</source>  
        <year>2016</year>  
        <month>02</month>  
        <volume>23</volume>  
        <issue>1</issue>  
        <fpage>70</fpage>  
        <lpage>7</lpage>  
        <pub-id pub-id-type="doi">10.1177/1553350615594734</pub-id>
        <pub-id pub-id-type="medline">26178693</pub-id>
        <pub-id pub-id-type="pii">1553350615594734</pub-id></nlm-citation>
      </ref>
      <ref id="ref127">
        <label>127</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Sun</surname>
            <given-names>X</given-names>
          </name>
          <name name-style="western">
            <surname>Byrns</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Cheng</surname>
            <given-names>I</given-names>
          </name>
          <name name-style="western">
            <surname>Zheng</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Basu</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Smart sensor-based motion detection system for hand movement training in open surgery</article-title>
        <source>J Med Syst</source>  
        <year>2017</year>  
        <month>02</month>  
        <volume>41</volume>  
        <issue>2</issue>  
        <fpage>24</fpage>  
        <pub-id pub-id-type="doi">10.1007/s10916-016-0665-4</pub-id>
        <pub-id pub-id-type="medline">28000118</pub-id>
        <pub-id pub-id-type="pii">10.1007/s10916-016-0665-4</pub-id></nlm-citation>
      </ref>
      <ref id="ref128">
        <label>128</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Hartmann</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Schlaefer</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Feasibility of touch-less control of operating room lights</article-title>
        <source>Int J Comput Assist Radiol Surg</source>  
        <year>2013</year>  
        <month>03</month>  
        <volume>8</volume>  
        <issue>2</issue>  
        <fpage>259</fpage>  
        <lpage>68</lpage>  
        <pub-id pub-id-type="doi">10.1007/s11548-012-0778-2</pub-id>
        <pub-id pub-id-type="medline">22806717</pub-id></nlm-citation>
      </ref>
      <ref id="ref129">
        <label>129</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Mauser</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Burgert</surname>
            <given-names>O</given-names>
          </name>
        </person-group>
        <article-title>Touch-free, gesture-based control of medical devices and software based on the leap motion controller</article-title>
        <source>Stud Health Technol Inform</source>  
        <year>2014</year>  
        <volume>196</volume>  
        <fpage>265</fpage>  
        <lpage>70</lpage>  
        <pub-id pub-id-type="doi">10.1371/journal.pone.0176123</pub-id>
        <pub-id pub-id-type="medline">24732520</pub-id></nlm-citation>
      </ref>
      <ref id="ref130">
        <label>130</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Schröder</surname>
            <given-names>Stephan</given-names>
          </name>
          <name name-style="western">
            <surname>Loftfield</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Langmann</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Frank</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Reithmeier</surname>
            <given-names>E</given-names>
          </name>
        </person-group>
        <article-title>Contactless operating table control based on 3D image processing</article-title>
        <source>Conf Proc IEEE Eng Med Biol Soc</source>  
        <year>2014</year>  
        <volume>2014</volume>  
        <fpage>388</fpage>  
        <lpage>92</lpage>  
        <pub-id pub-id-type="doi">10.1109/EMBC.2014.6943610</pub-id>
        <pub-id pub-id-type="medline">25569978</pub-id></nlm-citation>
      </ref>
      <ref id="ref131">
        <label>131</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Jacob</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Wachs</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>Context-based hand gesture recognition for the operating room</article-title>
        <source>Pattern Recognit Lett</source>  
        <year>2014</year>  
        <month>1</month>  
        <volume>36</volume>  
        <fpage>196</fpage>  
        <lpage>203</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1016/j.patrec.2013.05.024"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1016/j.patrec.2013.05.024</pub-id></nlm-citation>
      </ref>
      <ref id="ref132">
        <label>132</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Sweet</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Kowalewski</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Oppenheimer</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Weghorst</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Satava</surname>
            <given-names>R</given-names>
          </name>
        </person-group>
        <article-title>Face, content and construct validity of the University of Washington virtual reality transurethral prostate resection trainer</article-title>
        <source>J Urol</source>  
        <year>2004</year>  
        <month>11</month>  
        <volume>172</volume>  
        <issue>5 Pt 1</issue>  
        <fpage>1953</fpage>  
        <lpage>7</lpage>  
        <pub-id pub-id-type="doi">10.1097/01.ju.0000141298.06350.4c</pub-id>
        <pub-id pub-id-type="medline">15540764</pub-id>
        <pub-id pub-id-type="pii">S0022-5347(05)60903-2</pub-id></nlm-citation>
      </ref>
      <ref id="ref133">
        <label>133</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Higgins</surname>
            <given-names>JP</given-names>
          </name>
          <name name-style="western">
            <surname>Altman</surname>
            <given-names>DG</given-names>
          </name>
          <name name-style="western">
            <surname>Gøtzsche</surname>
            <given-names>PC</given-names>
          </name>
          <name name-style="western">
            <surname>Jüni</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Moher</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Oxman</surname>
            <given-names>AD</given-names>
          </name>
          <name name-style="western">
            <surname>Savovic</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Schulz</surname>
            <given-names>KF</given-names>
          </name>
          <name name-style="western">
            <surname>Weeks</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Sterne</surname>
            <given-names>JA</given-names>
          </name>
          <collab>Cochrane Bias Methods Group</collab>
          <collab>Cochrane Statistical Methods Group</collab>
        </person-group>
        <article-title>The Cochrane Collaboration's tool for assessing risk of bias in randomised trials</article-title>
        <source>Br Med J</source>  
        <year>2011</year>  
        <month>10</month>  
        <day>18</day>  
        <volume>343</volume>  
        <fpage>d5928</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/22008217"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1136/bmj.d5928</pub-id>
        <pub-id pub-id-type="medline">22008217</pub-id>
        <pub-id pub-id-type="pmcid">PMC3196245</pub-id></nlm-citation>
      </ref>
      <ref id="ref134">
        <label>134</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Weichert</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Bachmann</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Rudak</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Fisseler</surname>
            <given-names>D</given-names>
          </name>
        </person-group>
        <article-title>Analysis of the accuracy and robustness of the Leap Motion Controller</article-title>
        <source>Sensors (Switzerland)</source>  
        <year>2013</year>  
        <month>01</month>  
        <volume>13</volume>  
        <issue>5</issue>  
        <fpage>6380</fpage>  
        <lpage>93</lpage>  
        <pub-id pub-id-type="medline">23673687</pub-id></nlm-citation>
      </ref>
      <ref id="ref135">
        <label>135</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Bachmann</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Weichert</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Rinkenauer</surname>
            <given-names>G</given-names>
          </name>
        </person-group>
        <article-title>Evaluation of the leap motion controller as a new contact-free pointing device</article-title>
        <source>Sensors (Basel)</source>  
        <year>2014</year>  
        <month>12</month>  
        <day>24</day>  
        <volume>15</volume>  
        <issue>1</issue>  
        <fpage>214</fpage>  
        <lpage>33</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://www.mdpi.com/resolver?pii=s150100214"/>
        </comment>  
        <pub-id pub-id-type="doi">10.3390/s150100214</pub-id>
        <pub-id pub-id-type="medline">25609043</pub-id>
        <pub-id pub-id-type="pii">s150100214</pub-id>
        <pub-id pub-id-type="pmcid">PMC4327015</pub-id></nlm-citation>
      </ref>
      <ref id="ref136">
        <label>136</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Guna</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Jakus</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Pogačnik</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Tomažič</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Sodnik</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>An analysis of the precision and reliability of the leap motion sensor and its suitability for static and dynamic tracking</article-title>
        <source>Sensors (Basel)</source>  
        <year>2014</year>  
        <month>02</month>  
        <day>21</day>  
        <volume>14</volume>  
        <issue>2</issue>  
        <fpage>3702</fpage>  
        <lpage>20</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://www.mdpi.com/resolver?pii=s140203702"/>
        </comment>  
        <pub-id pub-id-type="doi">10.3390/s140203702</pub-id>
        <pub-id pub-id-type="medline">24566635</pub-id>
        <pub-id pub-id-type="pii">s140203702</pub-id>
        <pub-id pub-id-type="pmcid">PMC3958287</pub-id></nlm-citation>
      </ref>
      <ref id="ref137">
        <label>137</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Khoshelham</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Elberink</surname>
            <given-names>SO</given-names>
          </name>
        </person-group>
        <article-title>Accuracy and resolution of Kinect depth data for indoor mapping applications</article-title>
        <source>Sensors (Basel)</source>  
        <year>2012</year>  
        <volume>12</volume>  
        <issue>2</issue>  
        <fpage>1437</fpage>  
        <lpage>54</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://www.mdpi.com/resolver?pii=s120201437"/>
        </comment>  
        <pub-id pub-id-type="doi">10.3390/s120201437</pub-id>
        <pub-id pub-id-type="medline">22438718</pub-id>
        <pub-id pub-id-type="pii">s120201437</pub-id>
        <pub-id pub-id-type="pmcid">PMC3304120</pub-id></nlm-citation>
      </ref>
      <ref id="ref138">
        <label>138</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Mendez</surname>
            <given-names>I</given-names>
          </name>
          <name name-style="western">
            <surname>Hansen</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Grabow</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Smedegaard</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Skogberg</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Uth</surname>
            <given-names>X</given-names>
          </name>
          <name name-style="western">
            <surname>Bruhn</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Geng</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Kamavuako</surname>
            <given-names>EN</given-names>
          </name>
        </person-group>
        <article-title>Evaluation of the Myo armband for the classification of hand motions</article-title>
        <source>IEEE Int Conf Rehabil Robot</source>  
        <year>2017</year>  
        <month>12</month>  
        <volume>2017</volume>  
        <fpage>1211</fpage>  
        <lpage>4</lpage>  
        <pub-id pub-id-type="doi">10.1109/ICORR.2017.8009414</pub-id>
        <pub-id pub-id-type="medline">28813986</pub-id></nlm-citation>
      </ref>
      <ref id="ref139">
        <label>139</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Li</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Ren</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Huang</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Wang</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Zhu</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Hu</surname>
            <given-names>H</given-names>
          </name>
        </person-group>
        <article-title>PCA and deep learning based myoelectric grasping control of a prosthetic hand</article-title>
        <source>Biomed Eng Online</source>  
        <year>2018</year>  
        <month>08</month>  
        <day>6</day>  
        <volume>17</volume>  
        <issue>1</issue>  
        <fpage>107</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://biomedical-engineering-online.biomedcentral.com/articles/10.1186/s12938-018-0539-8"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1186/s12938-018-0539-8</pub-id>
        <pub-id pub-id-type="medline">30081927</pub-id>
        <pub-id pub-id-type="pii">10.1186/s12938-018-0539-8</pub-id>
        <pub-id pub-id-type="pmcid">PMC6080221</pub-id></nlm-citation>
      </ref>
      <ref id="ref140">
        <label>140</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ur Rehman</surname>
            <given-names>MZ</given-names>
          </name>
          <name name-style="western">
            <surname>Waris</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Gilani</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Jochumsen</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Niazi</surname>
            <given-names>IK</given-names>
          </name>
          <name name-style="western">
            <surname>Jamil</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Farina</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Kamavuako</surname>
            <given-names>EN</given-names>
          </name>
        </person-group>
        <article-title>Multiday EMG-based classification of hand motions with deep learning techniques</article-title>
        <source>Sensors (Basel)</source>  
        <year>2018</year>  
        <month>08</month>  
        <day>1</day>  
        <volume>18</volume>  
        <issue>8</issue>  
        <fpage>1</fpage>  
        <lpage>16</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://www.mdpi.com/resolver?pii=s18082497"/>
        </comment>  
        <pub-id pub-id-type="doi">10.3390/s18082497</pub-id>
        <pub-id pub-id-type="medline">30071617</pub-id>
        <pub-id pub-id-type="pii">s18082497</pub-id>
        <pub-id pub-id-type="pmcid">PMC6111443</pub-id></nlm-citation>
      </ref>
      <ref id="ref141">
        <label>141</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Sánchez-Margallo</surname>
            <given-names>JA</given-names>
          </name>
          <name name-style="western">
            <surname>Sánchez-Margallo</surname>
            <given-names>FM</given-names>
          </name>
          <name name-style="western">
            <surname>Pagador Carrasco</surname>
            <given-names>JB</given-names>
          </name>
          <name name-style="western">
            <surname>Oropesa García</surname>
            <given-names>I</given-names>
          </name>
          <name name-style="western">
            <surname>Gómez Aguilera</surname>
            <given-names>EJ</given-names>
          </name>
          <name name-style="western">
            <surname>Moreno del Pozo</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>Usefulness of an optical tracking system in laparoscopic surgery for motor skills assessment</article-title>
        <source>Cir Esp</source>  
        <year>2014</year>  
        <volume>92</volume>  
        <issue>6</issue>  
        <fpage>421</fpage>  
        <lpage>8</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.ciresp.2013.01.006</pub-id>
        <pub-id pub-id-type="medline">23668944</pub-id>
        <pub-id pub-id-type="pii">S0009-739X(13)00096-1</pub-id></nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
