<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="letter" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JMIR</journal-id>
      <journal-id journal-id-type="nlm-ta">J Med Internet Res</journal-id>
      <journal-title>Journal of Medical Internet Research</journal-title>
      <issn pub-type="epub">1438-8871</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v27i1e75327</article-id>
      <article-id pub-id-type="pmid">41218198</article-id>
      <article-id pub-id-type="doi">10.2196/75327</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Research Letter</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Research Letter</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Wearable Augmented Reality for Nystagmus Examination in Patients With Vertigo: Randomized Crossover Usability Study</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Coristine</surname>
            <given-names>Andrew</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Wang</surname>
            <given-names>Ching-Fu</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Hungbo</surname>
            <given-names>Akonasu</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author">
          <name name-style="western">
            <surname>Wu</surname>
            <given-names>Ching-Nung</given-names>
          </name>
          <degrees>MD, PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff2" ref-type="aff">2</xref>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-9397-0986</ext-link>
        </contrib>
        <contrib id="contrib2" contrib-type="author" corresp="yes">
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>Ming-Che</given-names>
          </name>
          <degrees>Prof Dr</degrees>
          <xref rid="aff4" ref-type="aff">4</xref>
          <address>
            <institution>Department of Electronic Engineering</institution>
            <institution>Southern Taiwan University of Science and Technology</institution>
            <addr-line>1 Nantai St</addr-line>
            <addr-line>Yungkang District</addr-line>
            <addr-line>Tainan, 710301</addr-line>
            <country>Taiwan</country>
            <phone>886 913661700</phone>
            <email>jerryhata@stust.edu.tw</email>
          </address>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-4109-4563</ext-link>
        </contrib>
        <contrib id="contrib3" contrib-type="author">
          <name name-style="western">
            <surname>Chien</surname>
            <given-names>Chien-Yan</given-names>
          </name>
          <xref rid="aff4" ref-type="aff">4</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0009-0008-5193-5101</ext-link>
        </contrib>
        <contrib id="contrib4" contrib-type="author">
          <name name-style="western">
            <surname>Chang</surname>
            <given-names>Hsiang-Han</given-names>
          </name>
          <xref rid="aff4" ref-type="aff">4</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0009-0007-0938-6674</ext-link>
        </contrib>
        <contrib id="contrib5" contrib-type="author">
          <name name-style="western">
            <surname>Luo</surname>
            <given-names>Sheng-Dean</given-names>
          </name>
          <degrees>MD, PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff2" ref-type="aff">2</xref>
          <xref rid="aff3" ref-type="aff">3</xref>
          <xref rid="aff5" ref-type="aff">5</xref>
          <xref rid="aff6" ref-type="aff">6</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-4095-5541</ext-link>
        </contrib>
        <contrib id="contrib6" contrib-type="author">
          <name name-style="western">
            <surname>Hwang</surname>
            <given-names>Chung-Feng</given-names>
          </name>
          <degrees>MD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff2" ref-type="aff">2</xref>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-4126-5737</ext-link>
        </contrib>
        <contrib id="contrib7" contrib-type="author">
          <name name-style="western">
            <surname>Chang</surname>
            <given-names>Wan-Jung</given-names>
          </name>
          <degrees>Prof Dr</degrees>
          <xref rid="aff7" ref-type="aff">7</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-7478-7315</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>Department of Otolaryngology</institution>
        <institution>Kaohsiung Chang Gung Memorial Hospital</institution>
        <addr-line>Kaohsiung</addr-line>
        <country>Taiwan</country>
      </aff>
      <aff id="aff2">
        <label>2</label>
        <institution>School of Traditional Chinese Medicine</institution>
        <institution>College of Medicine</institution>
        <institution>Chang Gung University</institution>
        <addr-line>Taoyuan</addr-line>
        <country>Taiwan</country>
      </aff>
      <aff id="aff3">
        <label>3</label>
        <institution>Department of Otolaryngology</institution>
        <institution>Kaohsiung Municipal Ta-Tung Hospital</institution>
        <addr-line>Kaohsiung</addr-line>
        <country>Taiwan</country>
      </aff>
      <aff id="aff4">
        <label>4</label>
        <institution>Department of Electronic Engineering</institution>
        <institution>Southern Taiwan University of Science and Technology</institution>
        <addr-line>Tainan</addr-line>
        <country>Taiwan</country>
      </aff>
      <aff id="aff5">
        <label>5</label>
        <institution>Graduate Institute of Clinical Medical Sciences</institution>
        <institution>College of Medicine</institution>
        <institution>Chang Gung University</institution>
        <addr-line>Taoyuan</addr-line>
        <country>Taiwan</country>
      </aff>
      <aff id="aff6">
        <label>6</label>
        <institution>School of Medicine</institution>
        <institution>College of Medicine</institution>
        <institution>National Sun Yat-sen University</institution>
        <addr-line>Kaohsiung</addr-line>
        <country>Taiwan</country>
      </aff>
      <aff id="aff7">
        <label>7</label>
        <institution>Department of Electronic Engineering</institution>
        <institution>National Kaohsiung University of Science and Technology</institution>
        <addr-line>kaohsiung</addr-line>
        <country>Taiwan</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Ming-Che Chen <email>jerryhata@stust.edu.tw</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <year>2025</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>11</day>
        <month>11</month>
        <year>2025</year>
      </pub-date>
      <volume>27</volume>
      <elocation-id>e75327</elocation-id>
      <history>
        <date date-type="received">
          <day>1</day>
          <month>4</month>
          <year>2025</year>
        </date>
        <date date-type="rev-request">
          <day>11</day>
          <month>6</month>
          <year>2025</year>
        </date>
        <date date-type="rev-recd">
          <day>28</day>
          <month>6</month>
          <year>2025</year>
        </date>
        <date date-type="accepted">
          <day>8</day>
          <month>9</month>
          <year>2025</year>
        </date>
      </history>
      <copyright-statement>©Ching-Nung Wu, Ming-Che Chen, Chien-Yan Chien, Hsiang-Han Chang, Sheng-Dean Luo, Chung-Feng Hwang, Wan-Jung Chang. Originally published in the Journal of Medical Internet Research (https://www.jmir.org), 11.11.2025.</copyright-statement>
      <copyright-year>2025</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in the Journal of Medical Internet Research (ISSN 1438-8871), is properly cited. The complete bibliographic information, a link to the original publication on https://www.jmir.org/, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://www.jmir.org/2025/1/e75327" xlink:type="simple"/>
      <abstract>
        <p>We demonstrate the feasibility of a wearable, augmented reality–based nystagmus examination system, showing its preliminary diagnostic agreement with conventional video-oculography and its potential for portable vestibular assessment in patients with vertigo.</p>
      </abstract>
      <kwd-group>
        <kwd>nystagmus examination</kwd>
        <kwd>augmented reality</kwd>
        <kwd>oculomotor test</kwd>
        <kwd>video-oculography</kwd>
        <kwd>vertigo diagnostics</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <p>Vertigo commonly arises from benign vestibular dysfunction but may also have a central cause such as stroke (in approximately 10% of cases) [<xref ref-type="bibr" rid="ref1">1</xref>]. Nystagmus analysis is key to differentiating these disorders [<xref ref-type="bibr" rid="ref2">2</xref>], yet conventional video-oculography (VOG) requires specialized laboratories and personnel, limiting access [<xref ref-type="bibr" rid="ref3">3</xref>]. We developed a wearable augmented reality (AR)–based system delivering standardized oculomotor stimuli with real-time eye tracking. This study reports its design and preliminary validation.</p>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Overview</title>
        <p>This feasibility study evaluated the usability, accuracy, and tolerability of a wearable AR-based nystagmus system in a hospital clinical setting. The system integrated hardware and software to simulate conventional oculomotor testing with real-time eye tracking and automated data processing.</p>
        <p>For comparison, the VNG Ulmer system (Synapsys; <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>) performs 6 standardized tests of 3 stimulus types: (1) gaze-evoked nystagmus at plus or minus 15° (60 s total; horizontal/vertical axes), (2) saccades with fixed displacements every 4 seconds over 30 seconds (8-9 trials; horizontal/vertical axes), and (3) smooth pursuit at 0.25 Hz for 30 seconds (7-8 cycles; horizontal/vertical axes) [<xref ref-type="bibr" rid="ref4">4</xref>].</p>
        <p>The wearable AR system (<xref ref-type="supplementary-material" rid="app2">Multimedia Appendix 2</xref>) comprised J7EF Gaze smart glasses, an Android-based portable device (APD), and a back-end platform. The structural components included dual Si-OLED displays, a 30 Hz infrared eye-tracking sensor, and an optional magnetic light shield to replicate Frenzel goggles (<xref ref-type="supplementary-material" rid="app3">Multimedia Appendix 3</xref>). The APD, connected via USB Type-C, ran Unity 3D software to generate a virtual 1-meter display.</p>
        <p>In-house software delivered 6 standardized stimuli (fixation, saccades, and smooth pursuit in the horizontal and vertical axes), consistent with the VOG protocol. Real-time gaze data were transmitted via Wi-Fi for secure storage and automated analysis. This setup reproduced conventional vestibular assessments while enabling portable, automated nystagmus evaluation (<xref rid="figure1" ref-type="fig">Figure 1</xref>).</p>
        <fig id="figure1" position="float">
          <label>Figure 1</label>
          <caption>
            <p>This figure illustrates the architecture of the wearable augmented reality–based nystagmus examination system. The J7EF Gaze smart glasses incorporate near-eye displays and infrared eye-tracking sensors to present standardized visual stimuli and capture real-time gaze positions. A virtual screen simulates a 1-meter viewing distance, displaying moving light dots for oculomotor assessment. Data are processed on an Android portable device and transmitted via Wi-Fi to a back-end information platform for visualization and further analysis.</p>
          </caption>
          <graphic xlink:href="jmir_v27i1e75327_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Participants and Study Procedures</title>
        <p>Nine patients with vertigo were enrolled (October 2024 to January 2025); 8 completed both AR and VOG examinations in a randomized crossover design with a 30-minute washout (<xref rid="figure2" ref-type="fig">Figure 2</xref>). After each examination, participants rated discomfort using a visual analog scale (VAS; range 0-10). All waveform outputs were pooled and blindly interpreted by a board-certified otologist. The primary outcome was diagnostic concordance, quantified as percentage agreement [<xref ref-type="bibr" rid="ref5">5</xref>]. Secondary outcomes were VAS scores and diagnostic performance metrics (accuracy, sensitivity, specificity, positive predictive value [PPV], and negative predictive value [NPV]), each reported with 95% CIs (Clopper-Pearson exact method).</p>
        <fig id="figure2" position="float">
          <label>Figure 2</label>
          <caption>
            <p>This figure illustrates the workflow of the augmented reality–based oculomotor examination, including software-generated visual stimuli and real-time eye movement tracking. The light dot, white stripe, and gaze trajectories are displayed on the back-end information platform, enabling comparative analysis of expected vs actual eye movement responses.</p>
          </caption>
          <graphic xlink:href="jmir_v27i1e75327_fig2.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Ethical Considerations</title>
        <p>This study was approved by the institutional review board of Kaohsiung Chang Gung Memorial Hospital (202202194B0C501). All participants provided written informed consent. Data were anonymized, encrypted, and securely stored. No compensation was given.</p>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <p>One participant with prior cataract surgery could not calibrate the AR glasses, leaving 8 valid cases (mean age 60.4, SD 8.9 years; range 46-72). No significant discomfort or adverse effects were reported (<xref ref-type="supplementary-material" rid="app4">Multimedia Appendix 4</xref>).</p>
      <p>A total of 48 oculomotor data points were analyzed (saccades, pursuit, and gaze fixation). Agreement rates between AR and VOG ranged from 62.5% to 87.5% (<xref ref-type="supplementary-material" rid="app5">Multimedia Appendix 5</xref>). Overall diagnostic accuracy was 77.1%, with sensitivity 81.8% (9/11; 95% CI 48.2%-97.7%), specificity 75.7% (28/37; 95% CI 58.8%-88.2%), PPV 50.0% (9/18; 95% CI 26.0-73.9%), and NPV 93.3% (28/30; 95% CI 77.9%-99.2%) (<xref ref-type="supplementary-material" rid="app6">Multimedia Appendix 6</xref>). For central pathology, sensitivity reached 83.3% and specificity 100%. VAS scores did not differ significantly between AR and VOG, confirming tolerability.</p>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <p>This study demonstrates the feasibility of wearable AR glasses for nystagmus examination, showing diagnostic consistency comparable to VOG, particularly in ruling out central abnormalities. Results align with recent AR-based HINTS (head impulse, nystagmus, test of skew) assessments using head-mounted devices [<xref ref-type="bibr" rid="ref6">6</xref>] and smartphone nystagmus apps with approximately 82% sensitivity [<xref ref-type="bibr" rid="ref7">7</xref>].</p>
      <p>Although enrollment spanned 4 months, only 8 participants completed paired assessments due to the 2-to-3-month delay for conventional VOG. This explains the small sample and illustrates the clinical bottleneck the AR system seeks to address. Patient tolerance was favorable, with no significant discomfort, consistent with prior AR studies [<xref ref-type="bibr" rid="ref8">8</xref>]. One participant with cataract surgery could not be calibrated, likely due to altered ocular optics, underscoring the need for adaptive algorithms [<xref ref-type="bibr" rid="ref9">9</xref>]. Unlike stationary VOG laboratories, wearable AR systems are portable and deployable in clinics, emergency care, or telemedicine, enabling point-of-care testing without specialized infrastructure [<xref ref-type="bibr" rid="ref10">10</xref>]. With automated guidance, real-time tracking, and potential artificial intelligence integration, they may reduce reliance on experts and support decision-making.</p>
      <p>Limitations include the small sample, yielding wide CIs for sensitivity (81.8%; 48.2%-97.7%) and specificity (75.7%; 58.8%-88.2%), reducing precision. The moderate PPV (50.0%; 26.0%-73.9%) highlights the need for improved processing. Diagnostic concordance was measured by percent agreement, which does not adjust for chance; future studies should apply Cohen κ and multiple raters. Finally, as a single-center pilot with one clinician, multicenter validation is required to confirm generalizability.</p>
    </sec>
  </body>
  <back>
    <app-group>
      <supplementary-material id="app1">
        <label>Multimedia Appendix 1</label>
        <p>Full conventional video-oculography protocol, including test parameters and classification criteria.</p>
        <media xlink:href="jmir_v27i1e75327_app1.docx" xlink:title="DOCX File , 22 KB"/>
      </supplementary-material>
      <supplementary-material id="app2">
        <label>Multimedia Appendix 2</label>
        <p>Detailed system design and hardware/software specifications.</p>
        <media xlink:href="jmir_v27i1e75327_app2.docx" xlink:title="DOCX File , 23 KB"/>
      </supplementary-material>
      <supplementary-material id="app3">
        <label>Multimedia Appendix 3</label>
        <p>Structural components of the J7EF Gaze smart glasses.</p>
        <media xlink:href="jmir_v27i1e75327_app3.docx" xlink:title="DOCX File , 140 KB"/>
      </supplementary-material>
      <supplementary-material id="app4">
        <label>Multimedia Appendix 4</label>
        <p>Patient characteristics and oculomotor examination results.</p>
        <media xlink:href="jmir_v27i1e75327_app4.docx" xlink:title="DOCX File , 23 KB"/>
      </supplementary-material>
      <supplementary-material id="app5">
        <label>Multimedia Appendix 5</label>
        <p>Heatmap of agreement rates between augmented reality and video-oculography signals.</p>
        <media xlink:href="jmir_v27i1e75327_app5.docx" xlink:title="DOCX File , 4694 KB"/>
      </supplementary-material>
      <supplementary-material id="app6">
        <label>Multimedia Appendix 6</label>
        <p>Confusion matrix of augmented reality–based vs video-oculography–based classifications of suspected central vestibular pathology.</p>
        <media xlink:href="jmir_v27i1e75327_app6.docx" xlink:title="DOCX File , 20 KB"/>
      </supplementary-material>
    </app-group>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">APD</term>
          <def>
            <p>Android-based portable device</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">AR</term>
          <def>
            <p>augmented reality</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">HINTS</term>
          <def>
            <p>head impulse, nystagmus, test of skew</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb4">NPV</term>
          <def>
            <p>negative predictive value</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb5">PPV</term>
          <def>
            <p>positive predictive value</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb6">VAS</term>
          <def>
            <p>visual analog scale</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb7">VOG</term>
          <def>
            <p>video-oculography</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>This study was supported by a grant from the National Science and Technology Council, Taiwan (112-2314-B-182A-094-MY3). However, the funders had no role in the study design, data collection, data analysis, decision to publish, or manuscript preparation. We express our appreciation of the Biostatistics Center at Kaohsiung Chang Gung Memorial Hospital for helping with the study design and statistics work. Portions of the manuscript were edited for grammar and style using OpenAI’s ChatGPT (GPT-5); all scientific content and interpretation were reviewed and approved by the authors.</p>
    </ack>
    <notes>
      <sec>
        <title>Data Availability</title>
        <p>All deidentified data generated or analyzed during this study are included in this published article and its supplementary files.</p>
      </sec>
    </notes>
    <fn-group>
      <fn fn-type="con">
        <p>CNW contributed to conceptualization, project administration, formal analysis, writing—original draft, and data curation. CYC contributed to methodology, and technical support. HHC contributed to data curation, and technical support. SDL contributed to investigation. CFH contributed to validation. WJC contributed to conceptualization. MCC contributed to conceptualization, resources, supervision, and writing—review and editing..</p>
      </fn>
      <fn fn-type="conflict">
        <p>None declared.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lui</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Foris</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Tadi</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <source>Central Vertigo</source>
          <year>2025</year>
          <publisher-loc>Treasure Island, FL</publisher-loc>
          <publisher-name>StatPearls Publishing</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Saha</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Vertigo related to central nervous system disorders</article-title>
          <source>Continuum (Minneap Minn)</source>
          <year>2021</year>
          <month>04</month>
          <day>01</day>
          <volume>27</volume>
          <issue>2</issue>
          <fpage>447</fpage>
          <lpage>467</lpage>
          <pub-id pub-id-type="doi">10.1212/CON.0000000000000933</pub-id>
          <pub-id pub-id-type="medline">34351114</pub-id>
          <pub-id pub-id-type="pii">00132979-202104000-00010</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Winnick</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>Chih-Chung</given-names>
            </name>
            <name name-style="western">
              <surname>Chang</surname>
              <given-names>Tzu-Pu</given-names>
            </name>
            <name name-style="western">
              <surname>Kuo</surname>
              <given-names>Yu-Hung</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>Ching-Fu</given-names>
            </name>
            <name name-style="western">
              <surname>Huang</surname>
              <given-names>Chin-Hsun</given-names>
            </name>
            <name name-style="western">
              <surname>Yang</surname>
              <given-names>Chun-Chen</given-names>
            </name>
          </person-group>
          <article-title>Automated nystagmus detection: accuracy of slow-phase and quick-phase algorithms to determine the presence of nystagmus</article-title>
          <source>J Neurol Sci</source>
          <year>2022</year>
          <month>11</month>
          <day>15</day>
          <volume>442</volume>
          <fpage>120392</fpage>
          <pub-id pub-id-type="doi">10.1016/j.jns.2022.120392</pub-id>
          <pub-id pub-id-type="medline">36058057</pub-id>
          <pub-id pub-id-type="pii">S0022-510X(22)00254-4</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wu</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Luo</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Huang</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Chiang</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Yang</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Ho</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Cheng</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Lin</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>Y</given-names>
            </name>
          </person-group>
          <article-title>Applicability of oculomotor tests for predicting central vestibular disorder using principal component analysis</article-title>
          <source>J Pers Med</source>
          <year>2022</year>
          <month>02</month>
          <day>02</day>
          <volume>12</volume>
          <issue>2</issue>
          <fpage>203</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=jpm12020203"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/jpm12020203</pub-id>
          <pub-id pub-id-type="medline">35207691</pub-id>
          <pub-id pub-id-type="pii">jpm12020203</pub-id>
          <pub-id pub-id-type="pmcid">PMC8880333</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>McHugh</surname>
              <given-names>ML</given-names>
            </name>
          </person-group>
          <article-title>Interrater reliability: the kappa statistic</article-title>
          <source>Biochem Med (Zagreb)</source>
          <year>2012</year>
          <volume>22</volume>
          <issue>3</issue>
          <fpage>276</fpage>
          <lpage>82</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/23092060"/>
          </comment>
          <pub-id pub-id-type="medline">23092060</pub-id>
          <pub-id pub-id-type="pmcid">PMC3900052</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sadok</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Luijten</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Bahnsen</surname>
              <given-names>FH</given-names>
            </name>
            <name name-style="western">
              <surname>Gsaxner</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Peters</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Eichler</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Rombach</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Lang</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Khattab</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Kleesiek</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Holle</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Meyer</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Egger</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Performing the HINTS-exam using a mixed-reality head-mounted display in patients with acute vestibular syndrome: a feasibility study</article-title>
          <source>Front Neurol</source>
          <year>2025</year>
          <volume>16</volume>
          <fpage>1576959</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.3389/fneur.2025.1576959"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/fneur.2025.1576959</pub-id>
          <pub-id pub-id-type="medline">40438569</pub-id>
          <pub-id pub-id-type="pmcid">PMC12116368</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>van Bonn</surname>
              <given-names>SM</given-names>
            </name>
            <name name-style="western">
              <surname>Behrendt</surname>
              <given-names>SP</given-names>
            </name>
            <name name-style="western">
              <surname>Pawar</surname>
              <given-names>BL</given-names>
            </name>
            <name name-style="western">
              <surname>Schraven</surname>
              <given-names>SP</given-names>
            </name>
            <name name-style="western">
              <surname>Mlynski</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Schuldt</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Smartphone-based nystagmus diagnostics: development of an innovative app for the targeted detection of vertigo</article-title>
          <source>Eur Arch Otorhinolaryngol</source>
          <year>2022</year>
          <month>12</month>
          <volume>279</volume>
          <issue>12</issue>
          <fpage>5565</fpage>
          <lpage>5571</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/35451613"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s00405-022-07385-9</pub-id>
          <pub-id pub-id-type="medline">35451613</pub-id>
          <pub-id pub-id-type="pii">10.1007/s00405-022-07385-9</pub-id>
          <pub-id pub-id-type="pmcid">PMC9023692</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Che</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Chang</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Qu</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Di</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Su</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>How different text display patterns affect cybersickness in augmented reality</article-title>
          <source>Sci Rep</source>
          <year>2024</year>
          <month>05</month>
          <day>22</day>
          <volume>14</volume>
          <issue>1</issue>
          <fpage>11693</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1038/s41598-024-62338-y"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41598-024-62338-y</pub-id>
          <pub-id pub-id-type="medline">38778168</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41598-024-62338-y</pub-id>
          <pub-id pub-id-type="pmcid">PMC11111777</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Krösl</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Elvezio</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Luidolt</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Hürbe</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Karst</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Feiner</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Wimmer</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>CatARact: simulating cataracts in augmented reality</article-title>
          <source>IEEE International Symposium on Mixed and Augmented Reality</source>
          <year>2020</year>
          <conf-name>ISMAR</conf-name>
          <conf-date>November 9-13, 2020</conf-date>
          <conf-loc>Porto de Galinhas, Brazil</conf-loc>
          <pub-id pub-id-type="doi">10.1109/ismar50242.2020.00098</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wu</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Luo</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Lin</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Huang</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Tsai</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Fan</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Lan</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Eligibility for live, interactive otolaryngology telemedicine: 19-month experience before and during the COVID-19 pandemic in Taiwan</article-title>
          <source>Biomed J</source>
          <year>2021</year>
          <month>10</month>
          <volume>44</volume>
          <issue>5</issue>
          <fpage>582</fpage>
          <lpage>588</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S2319-4170(21)00100-1"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.bj.2021.07.012</pub-id>
          <pub-id pub-id-type="medline">34371224</pub-id>
          <pub-id pub-id-type="pii">S2319-4170(21)00100-1</pub-id>
          <pub-id pub-id-type="pmcid">PMC8556875</pub-id>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
