<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "journalpublishing.dtd"><article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" dtd-version="2.0" xml:lang="en" article-type="review-article"><front><journal-meta><journal-id journal-id-type="nlm-ta">J Med Internet Res</journal-id><journal-id journal-id-type="publisher-id">jmir</journal-id><journal-id journal-id-type="index">1</journal-id><journal-title>Journal of Medical Internet Research</journal-title><abbrev-journal-title>J Med Internet Res</abbrev-journal-title><issn pub-type="epub">1438-8871</issn><publisher><publisher-name>JMIR Publications</publisher-name><publisher-loc>Toronto, Canada</publisher-loc></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">v27i1e64207</article-id><article-id pub-id-type="doi">10.2196/64207</article-id><article-categories><subj-group subj-group-type="heading"><subject>Review</subject></subj-group></article-categories><title-group><article-title>Tai Chi Chuan Auxiliary Training Systems in Health and Rehabilitation: Scoping Review</article-title></title-group><contrib-group><contrib contrib-type="author" equal-contrib="yes"><name name-style="western"><surname>Liu</surname><given-names>Hong</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff2">2</xref><xref ref-type="fn" rid="equal-contrib1">*</xref></contrib><contrib contrib-type="author" equal-contrib="yes"><name name-style="western"><surname>Li</surname><given-names>Huibiao</given-names></name><degrees>MM</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff2">2</xref><xref ref-type="fn" rid="equal-contrib1">*</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Huang</surname><given-names>Haoyu</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff2">2</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Huang</surname><given-names>Jia</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff2">2</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Zhang</surname><given-names>Yanxin</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff3">3</xref></contrib><contrib contrib-type="author" corresp="yes"><name name-style="western"><surname>Chen</surname><given-names>Lidian</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff2">2</xref><xref ref-type="aff" rid="aff4">4</xref></contrib></contrib-group><aff id="aff1"><institution>College of Rehabilitation Medicine, Fujian University of Traditional Chinese Medicine</institution><addr-line>Fuzhou</addr-line><addr-line>Fujian</addr-line><country>China</country></aff><aff id="aff2"><institution>National-Local Joint Engineering Research Center of Rehabilitation Medicine Technology, Fujian University of Traditional Chinese Medicine</institution><addr-line>Fuzhou</addr-line><addr-line>Fujian</addr-line><country>China</country></aff><aff id="aff3"><institution>Department of Exercise Sciences, The University of Auckland</institution><addr-line>Newmarket</addr-line><addr-line>Auckland</addr-line><country>New Zealand</country></aff><aff id="aff4"><institution>The Institute of Rehabilitation Industry, Fujian University of Traditional Chinese Medicine</institution><addr-line>1 Qiuyang West Road</addr-line><addr-line>Fuzhou</addr-line><addr-line>Fujian</addr-line><country>China</country></aff><contrib-group><contrib contrib-type="editor"><name name-style="western"><surname>Schwartz</surname><given-names>Amy</given-names></name></contrib></contrib-group><contrib-group><contrib contrib-type="reviewer"><name name-style="western"><surname>Aliriad</surname><given-names>Hilmy</given-names></name></contrib><contrib contrib-type="reviewer"><name name-style="western"><surname>Al-Jabali</surname><given-names>Rasha Najib</given-names></name></contrib></contrib-group><author-notes><corresp>Correspondence to Lidian Chen, PhD, The Institute of Rehabilitation Industry, Fujian University of Traditional Chinese Medicine, 1 Qiuyang West Road, Fuzhou, Fujian, 350122, China, 86 059122861815; <email>cld@fjtcm.edu.cn</email></corresp><fn fn-type="equal" id="equal-contrib1"><label>*</label><p>these authors contributed equally</p></fn></author-notes><pub-date pub-type="collection"><year>2025</year></pub-date><pub-date pub-type="epub"><day>22</day><month>12</month><year>2025</year></pub-date><volume>27</volume><elocation-id>e64207</elocation-id><history><date date-type="received"><day>11</day><month>07</month><year>2024</year></date><date date-type="accepted"><day>28</day><month>10</month><year>2025</year></date></history><copyright-statement>&#x00A9; Hong Liu, Huibiao Li, Haoyu Huang, Jia Huang, Yanxin Zhang, Lidian Chen. Originally published in the Journal of Medical Internet Research (<ext-link ext-link-type="uri" xlink:href="https://www.jmir.org">https://www.jmir.org</ext-link>), 22.12.2025. </copyright-statement><copyright-year>2025</copyright-year><license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (<ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">https://creativecommons.org/licenses/by/4.0/</ext-link>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in the Journal of Medical Internet Research (ISSN 1438-8871), is properly cited. The complete bibliographic information, a link to the original publication on <ext-link ext-link-type="uri" xlink:href="https://www.jmir.org/">https://www.jmir.org/</ext-link>, as well as this copyright and license information must be included.</p></license><self-uri xlink:type="simple" xlink:href="https://www.jmir.org/2025/1/e64207"/><abstract><sec><title>Background</title><p>Tai Chi Chuan (TCC), often described as &#x201C;moving meditation,&#x201D; is a traditional Chinese mind-body exercise suitable for individuals of all ages. Mounting evidence demonstrates that TCC can improve physical functions, promote physical activity, and positively impact health and longevity. However, systematic learning is hindered by insufficient teaching resources, difficulties in imparting expertise, and learning environment constraints. TCC auxiliary training systems, an innovative means of human-computer interaction, provide a potential solution.</p></sec><sec><title>Objective</title><p>This scoping review evaluates the research trends and clinical outcomes of TCC auxiliary training systems. Specifically, we compare the development tools, system design, and evaluation or validation processes used by different systems to guide future development in this research area.</p></sec><sec sec-type="methods"><title>Methods</title><p>Following the PRISMA-ScR (Preferred Reporting Items for Systematic Reviews and Meta-Analyses extension for Scoping Reviews) guidelines, electronic databases (PubMed, Embase, Scopus, IEEE Xplore, and ACM Digital Library) were systematically searched for studies in English from 2014 to 2024. Two reviewers independently extracted the data and used an adapted version of the Santos evaluation criteria to evaluate the quality of the included studies. The included studies were qualitatively summarized with respect to system design and evaluation verification.</p></sec><sec sec-type="results"><title>Results</title><p>Among the 2202 identified studies, 34 studies met the inclusion criteria, of which 24 were rated as medium to high quality. Desktop-based applications dominate the TCC auxiliary training system environment, comprising 38% (13/34) of the selected studies. The hardware and software components of TCC auxiliary training systems vary depending on the development objectives. Regarding system design, 76% (26/34) addressed all groups, with only a minority focusing on specific populations. Interaction design in TCC auxiliary training commonly incorporates human-computer interaction technologies, such as tactile, action, visual, speech, and multimodal interaction. Clinical validation is necessary to implement this system in clinical practice. Most reviewed studies were validated, 6 underwent acceptability validation, 21 underwent feasibility validation, and only 2 virtual reality&#x2013;based systems underwent clinical efficacy validation, demonstrating their effectiveness in improving cognitive abilities and motor functions in older adults.</p></sec><sec sec-type="conclusions"><title>Conclusions</title><p>The TCC auxiliary training system is an innovative health intervention in a rapidly advancing field. This scoping review, the first undertaken on this topic, systematically synthesizes current evidence regarding its design, applications, research trends, and clinical outcomes, thereby establishing a comprehensive foundation to guide and inform future research. However, the current evidence still faces issues such as methodological inconsistencies, insufficient sample diversity, and a lack of long-term effectiveness validation, which limit its generalizability and effectiveness in widespread applications. Future research should place greater emphasis on standardized reporting, applicability to diverse populations, and foster ethical considerations and interdisciplinary collaboration. This will facilitate the widespread deployment of the TCC auxiliary training system and ensure its sustainable integration into the field of health intervention.</p></sec><sec><title>Trial Registration</title><p>PROSPERO CRD42024539375; https://www.crd.york.ac.uk/PROSPERO/view/CRD42024539375</p></sec></abstract><kwd-group><kwd>Tai Chi Chuan</kwd><kwd>human-computer interaction</kwd><kwd>physical activity</kwd><kwd>virtual reality</kwd><kwd>auxiliary training system</kwd><kwd>scoping review</kwd><kwd>PRISMA</kwd><kwd>Preferred Reporting Items for Systematic Reviews and Meta-Analyses</kwd></kwd-group></article-meta></front><body><sec id="s1" sec-type="intro"><title>Introduction</title><p>Physical inactivity is now recognized as the fourth leading cause of death and has been demonstrated to contribute to 40 chronic diseases [<xref ref-type="bibr" rid="ref1">1</xref>]. Importantly, physical inactivity often plays an independent role as a direct cause of speeding the loss of cardiovascular and strength fitness, shortening healthspan, and lowering the age of onset of the first chronic disease, which in turn decreases quality of life, increases health care costs, and accelerates mortality risk [<xref ref-type="bibr" rid="ref2">2</xref>]. In contrast, regular physical activity and exercise offer a wide range of health advantages, including enhanced physical fitness (notably in cardiorespiratory [<xref ref-type="bibr" rid="ref3">3</xref>] and musculoskeletal capacities [<xref ref-type="bibr" rid="ref4">4</xref>,<xref ref-type="bibr" rid="ref5">5</xref>]), improved cardiometabolic health [<xref ref-type="bibr" rid="ref6">6</xref>], cognitive function enhancement [<xref ref-type="bibr" rid="ref7">7</xref>], mental health support [<xref ref-type="bibr" rid="ref8">8</xref>], improved sleep quality [<xref ref-type="bibr" rid="ref9">9</xref>], and an overall elevation in quality of life [<xref ref-type="bibr" rid="ref10">10</xref>]. Furthermore, these activities contribute to reduced adiposity [<xref ref-type="bibr" rid="ref11">11</xref>], lower all-cause mortality rates [<xref ref-type="bibr" rid="ref12">12</xref>], and decreased health care expenditures [<xref ref-type="bibr" rid="ref13">13</xref>,<xref ref-type="bibr" rid="ref14">14</xref>] and positively impact healthy longevity and well-being [<xref ref-type="bibr" rid="ref15">15</xref>].</p><p>Among various physical activities, Tai Chi, often described as &#x201C;moving meditation&#x201D; [<xref ref-type="bibr" rid="ref16">16</xref>], is a traditional Chinese mind-body exercise suitable for individuals of all ages (including individuals with chronic diseases, cognitive impairment, or motor disorders) to support health [<xref ref-type="bibr" rid="ref17">17</xref>-<xref ref-type="bibr" rid="ref19">19</xref>]. Compared to brisk walking, Tai Chi is more effective at improving motor function&#x2014;especially gait and balance&#x2014;in individuals with Parkinson disease [<xref ref-type="bibr" rid="ref20">20</xref>], reducing cardiovascular disease risk factors among adults with hypertension, and enhancing psychosocial well-being [<xref ref-type="bibr" rid="ref21">21</xref>]. Compared to fitness walking, long-term Tai Chi exercise is more effective at improving global cognitive function in older adults with type 2 diabetes and mild cognitive impairment [<xref ref-type="bibr" rid="ref22">22</xref>]. Compared to aerobic exercise, Tai Chi offers greater advantages in alleviating pain, anxiety, self-efficacy, and cognitive coping strategies in patients with fibromyalgia, and in reducing systolic blood pressure in individuals with prehypertension [<xref ref-type="bibr" rid="ref23">23</xref>,<xref ref-type="bibr" rid="ref24">24</xref>]. Compared to stretching exercise, Tai Chi is more effective at reducing the incidence of fall-related injuries, improving balance in older postmenopausal women who are cancer survivors [<xref ref-type="bibr" rid="ref25">25</xref>], and decreasing injurious falls in older adults at high risk of falling [<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref27">27</xref>]. Tai Chi is recommended by the &#x201C;World Guidelines for Falls Prevention and Management for Older Adults: A Global Initiative&#x201D; [<xref ref-type="bibr" rid="ref28">28</xref>]. Moreover, compared to resistance and stretching exercises, Tai Chi can reduce balance impairments in patients with mild-to-moderate Parkinson disease, with additional benefits of improved functional capacity and reduced falls [<xref ref-type="bibr" rid="ref29">29</xref>], and is now endorsed by the &#x201C;Clinical Practice Guideline From the American Physical Therapy Association&#x201D; [<xref ref-type="bibr" rid="ref30">30</xref>].</p><p>Various Tai Chi Chuan (TCC) learning methods can accommodate different learning preferences and environments. Traditional methods include personalized instruction through one-on-one coaching and structured group classes, which provide direct feedback and hands-on guidance from experienced practitioners. Alternatively, self-directed learning through instructional videos, books, and online resources offers flexibility and accessibility, allowing individuals to study at their own pace. However, practitioners often find personal coaching cost-prohibitive, while self-study is monotonous and lacks the immediate corrective feedback necessary for mastering the subtleties of TCC, making it difficult to sustain over the long term. With the advent of virtual reality (VR) technology, the immersive, interactive, and imaginative characteristics of VR can set up a learning environment beyond the screen&#x2019;s limitations, enhancing learners&#x2019; motivation and interest and producing ideal experiential results. The progress of VR technology has facilitated the emergence of the TCC learning system. A TCC auxiliary training system based on VR provides a safe, comfortable, and effective method to practice TCC through human-computer interactions with virtual interfaces. This system adapts to the needs and progress of each practitioner, providing a personalized rehabilitation experience and delivering comprehensive physical and psychological health benefits.</p><p>As early as 1997, Becker and Pentland [<xref ref-type="bibr" rid="ref31">31</xref>] developed a virtual interactive TCC system to relieve stress in patients with cancer. The system uses a vision-based motion capture system to track the user&#x2019;s head and hands, and the hidden Markov model was used to identify TCC movements and provide patient feedback. Since then, with the development of low-cost, highly cost-effective motion sensors such as Kinect and inertial sensors, more TCC auxiliary training systems based on VR and motion capture have been developed. Using such systems, TCC practitioners can follow the motions of the virtual coach to practice TCC and receive feedback from the system to adjust their practice in real time. The hardware of the TCC auxiliary training system based on VR usually includes TCC motion capture equipment, VR tools, and other commonly used basic computer hardware; the software requires game engines or modeling software to build VR scenes and virtual characters. The performance of the system depends on real-time bone tracking technology of the motion capture system, highly adaptive algorithms, good communication technology, and a real-time interactive VR environment. According to the existing research, this kind of TCC auxiliary training system can accelerate the learning process of practitioners and improve the quality of movement learning [<xref ref-type="bibr" rid="ref32">32</xref>]. In addition, such a system design is more valuable for rehabilitating older adults and patients with chronic diseases from the point of view of long-term exercise [<xref ref-type="bibr" rid="ref33">33</xref>,<xref ref-type="bibr" rid="ref34">34</xref>].</p><p>Therefore, the TCC auxiliary training system could serve as a promising alternative to traditional and group TCC practices. It is particularly significant in the current health promotion and rehabilitation context, offering unprecedented opportunities to increase accessibility to rehabilitation training, enhance the coverage of health interventions across regions and cultures, and foster collaborative practices among different health care and community organizations.</p><p>Although studies have shown that TCC auxiliary training systems have potential advantages in improving motor skills, increasing training interest, and enhancing physical and mental health, gaps remain in their development and research. On one hand, there are significant differences in hardware (sensor types and accuracy), software (engines and algorithms), and human-computer interaction design across systems, leading to a lack of comparability between studies. On the other hand, clinical validation is insufficient, as most systems have only been tested in small-sample experimental settings and lack clinical trials. Furthermore, due to rapid technological advancements (such as motion capture and VR algorithms) and the increasing demand for rehabilitation in an aging population, no systematic review has specifically addressed the development, design, and effectiveness evaluation of TCC auxiliary training systems.</p><p>This review aims to fill this research gap by analyzing studies related to TCC auxiliary training systems from the past decade (2014&#x2010;2024), exploring the existing gaps in their development and research, assessing current trends and clinical outcomes, and providing a foundation for future research. This scoping review addresses the following research questions (RQs):</p><list list-type="order"><list-item><p>RQ1: To identify the development tools used across different systems, including the hardware and software used in their development.</p></list-item><list-item><p>RQ2: To describe system designs, including development environments, applications, and human-computer interactions, to identify research gaps and suggest design improvements for future design work.</p></list-item><list-item><p>RQ3: To identify the evaluation or verification of each system, including whether the system has been evaluated or verified and how it has been evaluated or verified, providing evidence for its efficacy in health promotion.</p></list-item><list-item><p>RQ4: To provide recommendations for further investigations in this research field.</p></list-item></list><p>This study aims to address these questions by summarizing the development tools, design, human-computer interaction, and clinical validation related to the &#x201C;TCC auxiliary training system,&#x201D; thus filling a gap in existing literature. Furthermore, it provides an integrated analytical framework that combines hardware, software, interaction design, and clinical validation, offering reference standards and benchmarks for future research and system development. Finally, it combines theoretical insights with practical considerations, providing evidence-based support for researchers, clinicians, and policymakers in clinical decision-making and the development of relevant health policies.</p></sec><sec id="s2" sec-type="methods"><title>Methods</title><sec id="s2-1"><title>Literature Search Strategy</title><p>This protocol for the review was implemented following the PRISMA (Preferred Reporting Items for Systematic Reviews and Meta-Analyses) statement [<xref ref-type="bibr" rid="ref35">35</xref>], which has been registered in the PROSPERO (International Prospective Register of Systematic Reviews) database under the ID CRD42024539375. Literature was retrieved from the following 5 databases: PubMed, Embase, Scopus, IEEE Xplore, and ACM Digital Library. The focus of this paper is the application, research trends, and clinical efficacy of the TCC auxiliary training system. There are many spellings of Medical Subject Headings terms for TCC, including Taiji, Tai Chi, TCC, and T&#x2019;ai Chi. We modified the retrieval strategy according to different databases, and the search terms of all databases are provided in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>. This study summarizes the studies published from January 2014 to May 2024.</p></sec><sec id="s2-2"><title>Inclusion and Exclusion Criteria</title><p>Studies eligible to be included in this review had to meet the following inclusion criteria: (1) studies describing a TCC training system, (2) the search language is limited to English, and (3) studies published in the past 10 years. The exclusion criteria were (1) systematic reviews or literature reviews, (2) books or study comments, (3) theses, (4) incomplete or short papers (eg, posters, tutorials, and technical reports), and (5) primary studies that are repeated.</p></sec><sec id="s2-3"><title>Study Selection Process</title><p>This search included journal and conference papers. The study screening process consisted of the following steps: (1) retrieving studies from January 2014 to May 2024, (2) screening titles and abstracts of the remaining studies after removing duplicates, and (3) reviewers reading the full texts and selecting studies according to the inclusion and exclusion criteria. If a journal study covers the content reported in the previous conference paper or degree thesis, the journal paper was given precedence over the conference paper and degree thesis.</p></sec><sec id="s2-4"><title>Assessment of Study Quality</title><p>After establishing the inclusion and exclusion criteria, the next step was to identify quality criteria to strengthen the extraction of quantitative and qualitative data for the synthesis and results analysis. We established a list of 5 quality standards (<xref ref-type="table" rid="table1">Table 1</xref>) based on Santos et al [<xref ref-type="bibr" rid="ref36">36</xref>], aligning with the objectives of this review on TCC auxiliary training systems. Each study was rated by 2 authors (HuibiaoLi and HH) according to these 5 specific criteria. Finally, the final score of each researcher was sorted by Hong Liu, and any differences were checked. Any discrepancies between the raters were resolved through negotiation. Each criterion in the list is rated as follows: yes=1 point, no=0 points, and partial=0.5 points. The total score was derived by summing the scores of the questions, reflecting the overall quality of the included literature: 0&#x2010;2 (low quality), 3&#x2010;4 (medium quality), and 5 (high quality).</p><table-wrap id="t1" position="float"><label>Table 1.</label><caption><p>Quality evaluation (QE) for this study.</p></caption><table id="table1" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Item</td><td align="left" valign="bottom">Description</td></tr></thead><tbody><tr><td align="left" valign="top">QE1</td><td align="left" valign="top">The research scheme is described clearly, and the methods and techniques are explained.</td></tr><tr><td align="left" valign="top">QE2</td><td align="left" valign="top">The auxiliary training system of the TCC<sup><xref ref-type="table-fn" rid="table1fn1">a</xref></sup> is clearly described.</td></tr><tr><td align="left" valign="top">QE3</td><td align="left" valign="top">A clear report on the virtual environment used.</td></tr><tr><td align="left" valign="top">QE4</td><td align="left" valign="top">A precise evaluation or verification of the system is carried out.</td></tr><tr><td align="left" valign="top">QE5</td><td align="left" valign="top">Detailed case study results are provided.</td></tr></tbody></table><table-wrap-foot><fn id="table1fn1"><p><sup>a</sup>TCC: Tai Chi Chuan.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s2-5"><title>Data Extraction Process</title><p>After identifying eligible publications, all relevant data were collected in Microsoft Excel using a structured coding scheme. Data were extracted independently by 2 authors (Huibiao Li and HH). The collected variables included title, sample size, year, country, research methods and techniques, system description, type of virtual environment, system evaluation and validation, case study results, development tools, interaction design, and categories of TCC. Detailed descriptions were provided for development tools, system design, evaluation and validation methods, and clinical efficacy studies. Any discrepancies were resolved through discussion and re-evaluation of the relevant literature.</p></sec><sec id="s2-6"><title>Data Synthesis and Analysis</title><p>The data synthesis process used a narrative approach to analyze and present the findings of the included studies. An overview of the development tools used by different systems, descriptions of system designs, and evaluations or validations for each system were provided to facilitate cross-study comparisons. This study used summarized data wherever possible and followed the PRISMA-ScR (Preferred Reporting Items for Systematic reviews and Meta-Analyses extension for Scoping Reviews) guidelines.</p></sec></sec><sec id="s3" sec-type="results"><title>Results</title><sec id="s3-1"><title>Database Search and Paper Lists</title><p><xref ref-type="fig" rid="figure1">Figure 1</xref> shows an overview of the study selection results at different stages. Of the 2202 studies retrieved through the search strategies, 34 papers were selected for inclusion in this review. <xref ref-type="table" rid="table2">Table 2</xref> shows the quality of the studies included in the review. According to our predefined quality assessment criteria, 2 of the 34 studies included in the study were considered high quality, 22 were considered medium quality, and 10 were considered low quality. In summary, 94% (32/34) of the research schemes described the methods and techniques used, 82% (28/34) described the TCC auxiliary training system, 32% (11/34) reported the virtual environment used, 50% (17/34) evaluated or verified the system, and 41% (14/34) provided detailed case study results.</p><fig position="float" id="figure1"><label>Figure 1.</label><caption><p>PRISMA (Preferred Reporting Items for Systematic Reviews and Meta-Analyses) flowchart of the results from the literature search. TCC: Tai Chi Chuan.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="jmir_v27i1e64207_fig01.png"/></fig><table-wrap id="t2" position="float"><label>Table 2.</label><caption><p>Risk of bias assessment of the included studies.</p></caption><table id="table2" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Study</td><td align="left" valign="bottom">QE1<sup><xref ref-type="table-fn" rid="table2fn1">a</xref></sup></td><td align="left" valign="bottom">QE2<sup><xref ref-type="table-fn" rid="table2fn2">b</xref></sup></td><td align="left" valign="bottom">QE3<sup><xref ref-type="table-fn" rid="table2fn3">c</xref></sup></td><td align="left" valign="bottom">QE4<sup><xref ref-type="table-fn" rid="table2fn4">d</xref></sup></td><td align="left" valign="bottom">QE5<sup><xref ref-type="table-fn" rid="table2fn5">e</xref></sup></td><td align="left" valign="bottom">Sum</td><td align="left" valign="bottom">Quality</td></tr></thead><tbody><tr><td align="left" valign="top">Lee et al [<xref ref-type="bibr" rid="ref37">37</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Chen [<xref ref-type="bibr" rid="ref38">38</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Iwaanaguchi et al [<xref ref-type="bibr" rid="ref39">39</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Bian et al [<xref ref-type="bibr" rid="ref40">40</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Han et al [<xref ref-type="bibr" rid="ref41">41</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">4</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Han et al [<xref ref-type="bibr" rid="ref42">42</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Xue et al [<xref ref-type="bibr" rid="ref43">43</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">2</td><td align="left" valign="top">Low</td></tr><tr><td align="left" valign="top">Liang et al [<xref ref-type="bibr" rid="ref44">44</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">2</td><td align="left" valign="top">Low</td></tr><tr><td align="left" valign="top">Guimar&#x00E3;es et al [<xref ref-type="bibr" rid="ref45">45</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">4</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Delfa et al [<xref ref-type="bibr" rid="ref46">46</xref>]</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">Low</td></tr><tr><td align="left" valign="top">Bian et al [<xref ref-type="bibr" rid="ref47">47</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Hsieh et al [<xref ref-type="bibr" rid="ref48">48</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">2</td><td align="left" valign="top">Low</td></tr><tr><td align="left" valign="top">Lin et al [<xref ref-type="bibr" rid="ref49">49</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">4</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Yu and Xiong [<xref ref-type="bibr" rid="ref50">50</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">2</td><td align="left" valign="top">Low</td></tr><tr><td align="left" valign="top">Kao et al [<xref ref-type="bibr" rid="ref51">51</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">5</td><td align="left" valign="top">High</td></tr><tr><td align="left" valign="top">Zhu et al [<xref ref-type="bibr" rid="ref52">52</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">4</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Hung et al [<xref ref-type="bibr" rid="ref53">53</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">2</td><td align="left" valign="top">Low</td></tr><tr><td align="left" valign="top">Chen et al [<xref ref-type="bibr" rid="ref32">32</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Kamel et al [<xref ref-type="bibr" rid="ref54">54</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">4</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Chen et al [<xref ref-type="bibr" rid="ref55">55</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Tharatipyakul et al [<xref ref-type="bibr" rid="ref56">56</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">Low</td></tr><tr><td align="left" valign="top">Liu et al [<xref ref-type="bibr" rid="ref57">57</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Li and Wang [<xref ref-type="bibr" rid="ref58">58</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">2</td><td align="left" valign="top">Low</td></tr><tr><td align="left" valign="top">Jan et al [<xref ref-type="bibr" rid="ref59">59</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">4</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Gao et al [<xref ref-type="bibr" rid="ref60">60</xref>]</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">Low</td></tr><tr><td align="left" valign="top">Wei et al [<xref ref-type="bibr" rid="ref61">61</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Li et al [<xref ref-type="bibr" rid="ref62">62</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Li et al [<xref ref-type="bibr" rid="ref63">63</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">4</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Kim et al [<xref ref-type="bibr" rid="ref64">64</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">4</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Tian et al [<xref ref-type="bibr" rid="ref65">65</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">4</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Wang et al [<xref ref-type="bibr" rid="ref66">66</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">0</td><td align="left" valign="top">2</td><td align="left" valign="top">Low</td></tr><tr><td align="left" valign="top">Wang and Deng [<xref ref-type="bibr" rid="ref67">67</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Kanchanapaetnukul et al [<xref ref-type="bibr" rid="ref68">68</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">1</td><td align="left" valign="top">0</td><td align="left" valign="top">3</td><td align="left" valign="top">Medium</td></tr><tr><td align="left" valign="top">Tian et al [<xref ref-type="bibr" rid="ref69">69</xref>]</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">1</td><td align="left" valign="top">5</td><td align="left" valign="top">High</td></tr></tbody></table><table-wrap-foot><fn id="table2fn1"><p><sup>a</sup>QE1: the research scheme is described clearly, and the methods and techniques are explained.</p></fn><fn id="table2fn2"><p><sup>b</sup>QE2: the auxiliary training system of the Tai Chi Chuan (TCC) is clearly described.</p></fn><fn id="table2fn3"><p><sup>c</sup>QE3: a clear report on the virtual environment used.</p></fn><fn id="table2fn4"><p><sup>d</sup>QE4: a precise evaluation or verification of the system is carried out.</p></fn><fn id="table2fn5"><p><sup>e</sup>QE5: detailed case study results are provided.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-2"><title>Types of Publications</title><p><xref ref-type="table" rid="table3">Table 3</xref> shows the year, country (based on the research unit of the first author), and type of publication included in the review. In the research we included, 65% (22/34) of the studies were conference papers, and the remainder were journal papers. Since TCC is a traditional martial art originating in China, 76% (26/34) of the studies are from China.</p><p><xref ref-type="fig" rid="figure2">Figure 2</xref> shows the temporal trend of the development of the TCC auxiliary training system from 2014 to 2024. This trend shows fluctuations in research over the past decade. In terms of time trends, research on TCC auxiliary training systems has been increasing in the past 10 years, with most system development research focusing on the period before and after the outbreak of the COVID-19 pandemic (59%, 20/34).</p><table-wrap id="t3" position="float"><label>Table 3.</label><caption><p>Types of publications.</p></caption><table id="table3" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Publication type and year</td><td align="left" valign="bottom">China</td><td align="left" valign="bottom">Japan</td><td align="left" valign="bottom">United States</td><td align="left" valign="bottom">Korea</td><td align="left" valign="bottom">Singapore</td><td align="left" valign="bottom">Portugal</td><td align="left" valign="bottom">Thailand</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="8">Conference</td></tr><tr><td align="left" valign="top">&#x2003;2014</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref37">37</xref>]</td><td align="left" valign="top">&#x2014;<sup><xref ref-type="table-fn" rid="table3fn1">a</xref></sup></td><td align="left" valign="top">&#x2014;<sup><xref ref-type="table-fn" rid="table3fn1">a</xref></sup></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2015</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref38">38</xref>]</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref39">39</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2016</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref40">40</xref>,<xref ref-type="bibr" rid="ref41">41</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2017</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref42">42</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2018</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref47">47</xref>,<xref ref-type="bibr" rid="ref49">49</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref46">46</xref>]</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2019</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref51">51</xref>,<xref ref-type="bibr" rid="ref53">53</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2020</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref58">58</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref56">56</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2021</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref59">59</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2022</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref60">60</xref>-<xref ref-type="bibr" rid="ref63">63</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2023</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref66">66</xref>,<xref ref-type="bibr" rid="ref67">67</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref68">68</xref>]</td></tr><tr><td align="left" valign="top" colspan="8">Journal</td></tr><tr><td align="left" valign="top">&#x2003;2017</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref43">43</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2018</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref48">48</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref44">44</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2019</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref52">52</xref>,<xref ref-type="bibr" rid="ref54">54</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref50">50</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2020</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref55">55</xref>,<xref ref-type="bibr" rid="ref57">57</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2022</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref64">64</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2023</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref65">65</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;2024</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref69">69</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr></tbody></table><table-wrap-foot><fn id="table3fn1"><p><sup>a</sup>Not available.</p></fn></table-wrap-foot></table-wrap><fig position="float" id="figure2"><label>Figure 2.</label><caption><p>Time trend of research on Tai Chi Chuan (TCC) auxiliary training systems.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="jmir_v27i1e64207_fig02.png"/></fig></sec><sec id="s3-3"><title>RQ1: Comparison of Development Tools Used in Different TCC Auxiliary Training Systems</title><sec id="s3-3-1"><title>System Development Environment Design</title><p>This paper introduces the development tools of the TCC auxiliary training system from the hardware and software point of view. The hardware and software included in the 34 studies were analyzed. The hardware describes motion capture devices, VR tools, and other major hardware, as detailed in <xref ref-type="table" rid="table4">Table 4</xref>. The necessary devices, such as computers, keyboards, and mice, are not listed as hardware.</p><p>Most desktop application environments do not require hardware beyond commonly used devices, and keyboards and mice can be used to set up and simulate TCC virtual coaches in various software programs [<xref ref-type="bibr" rid="ref70">70</xref>]. The presentation type of VR environment can be divided into immersive and nonimmersive types. Immersive VR uses a cave automatic virtual environment (CAVE), large screen projection, or helmet-mounted displays (HMDs) to present a VR environment, which can provide users with a strong sense of immersion [<xref ref-type="bibr" rid="ref71">71</xref>]. In contrast, nonimmersive VR uses a desktop display system to present a VR environment, which does not provide a high degree of immersion [<xref ref-type="bibr" rid="ref72">72</xref>]. Among the 34 studies we included, desktop-based environments are the most widely used in current TCC auxiliary training systems, accounting for 38% (13/34) of the selected studies. This is followed by HMD-based environments, which account for 26% (9/34). Web display and augmented reality (AR) system applications are less common, each representing 15% (5/34), while CAVE applications are the least common, accounting for 3% (1/34).</p><p>In the implementation tools of the VR environment for HMD systems, different types of devices are used, such as the Epson MOVERIO BT-200, Microsoft HoloLens, HTC VIVE, Oculus Quest, and Oculus Rift devices [<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref39">39</xref>,<xref ref-type="bibr" rid="ref41">41</xref>,<xref ref-type="bibr" rid="ref42">42</xref>,<xref ref-type="bibr" rid="ref51">51</xref>,<xref ref-type="bibr" rid="ref52">52</xref>,<xref ref-type="bibr" rid="ref57">57</xref>,<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref64">64</xref>]. The AR system also uses a portable Microsoft HoloLens as an AR device to develop the TCC auxiliary training system [<xref ref-type="bibr" rid="ref53">53</xref>].</p><p>In addition, the development purposes of TCC auxiliary training systems are different, and the hardware is also different. For example, the correct distribution of the center of gravity in TCC practice is essential, so they added plantar pressure&#x2013;sensing devices to the TCC auxiliary training system to help practitioners perform accurate TCC movements with the correct center of gravity distribution [<xref ref-type="bibr" rid="ref38">38</xref>,<xref ref-type="bibr" rid="ref49">49</xref>,<xref ref-type="bibr" rid="ref51">51</xref>]. Moreover, TCC exercises frequently incorporate metaphorical imagery. A developed TCC auxiliary training system, which uses drones to simulate gestures akin to &#x201C;waving clouds,&#x201D; can enhance the fluidity of movements [<xref ref-type="bibr" rid="ref46">46</xref>].</p><table-wrap id="t4" position="float"><label>Table 4.</label><caption><p>Development tools.</p></caption><table id="table4" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Study</td><td align="left" valign="bottom">Application environment</td><td align="left" valign="bottom">Motion capture</td><td align="left" valign="bottom">VR<sup><xref ref-type="table-fn" rid="table4fn1">a</xref></sup> tools</td><td align="left" valign="bottom">Other</td><td align="left" valign="bottom">Software</td></tr></thead><tbody><tr><td align="left" valign="top">Chen et al [<xref ref-type="bibr" rid="ref32">32</xref>]</td><td align="left" valign="top">VR and HMD<sup><xref ref-type="table-fn" rid="table4fn2">b</xref></sup> and CAVE<sup><xref ref-type="table-fn" rid="table4fn3">c</xref></sup></td><td align="left" valign="top">Kinect</td><td align="left" valign="top">A 6-sided cubic screen and Oculus Rift</td><td align="left" valign="top">NR<sup><xref ref-type="table-fn" rid="table4fn4">d</xref></sup></td><td align="left" valign="top">Unity3D</td></tr><tr><td align="left" valign="top">Iwaanaguchi et al [<xref ref-type="bibr" rid="ref39">39</xref>]</td><td align="left" valign="top">HMD</td><td align="left" valign="top">MAC3D</td><td align="left" valign="top">Epson MOVERIO BT-200</td><td align="left" valign="top">NR</td><td align="left" valign="top">Maya and Unity3D</td></tr><tr><td align="left" valign="top">Bian et al [<xref ref-type="bibr" rid="ref40">40</xref>]</td><td align="left" valign="top">VR</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Han et al [<xref ref-type="bibr" rid="ref42">42</xref>]</td><td align="left" valign="top">MR<sup><xref ref-type="table-fn" rid="table4fn5">e</xref></sup>-HMD</td><td align="left" valign="top">Parrot Bebop 2</td><td align="left" valign="top">Microsoft HoloLens</td><td align="left" valign="top">NR</td><td align="left" valign="top">Unity3D</td></tr><tr><td align="left" valign="top">Xue et al [<xref ref-type="bibr" rid="ref43">43</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">Unity3D</td></tr><tr><td align="left" valign="top">Lin et al [<xref ref-type="bibr" rid="ref49">49</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">NR</td><td align="left" valign="top">Pressure-sensing shoes</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Liang et al [<xref ref-type="bibr" rid="ref44">44</xref>]</td><td align="left" valign="top">VR and AR<sup><xref ref-type="table-fn" rid="table4fn6">f</xref></sup></td><td align="left" valign="top">Kinect</td><td align="left" valign="top">Oculus Rift</td><td align="left" valign="top">Hadoop cluster, GPU server, and Foot-pressure pad and Activator</td><td align="left" valign="top">Unity 3D, TensorFlow,<break/>Kinect Studio and<break/>Gesture recog. toolkit</td></tr><tr><td align="left" valign="top">Yu and Xiong [<xref ref-type="bibr" rid="ref50">50</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">Kinect and Xsens MVN BIOMECH</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">Unity3D</td></tr><tr><td align="left" valign="top">Kao et al [<xref ref-type="bibr" rid="ref51">51</xref>]</td><td align="left" valign="top">MR-HMD</td><td align="left" valign="top">Vicon</td><td align="left" valign="top">Microsoft HoloLens</td><td align="left" valign="top">Pressure-sensing insoles</td><td align="left" valign="top">Unity3D</td></tr><tr><td align="left" valign="top">Zhu et al [<xref ref-type="bibr" rid="ref52">52</xref>]</td><td align="left" valign="top">HMD</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">Google Cardboard and Smartphone</td><td align="left" valign="top">NR</td><td align="left" valign="top">Google VR SDK</td></tr><tr><td align="left" valign="top">Hung et al [<xref ref-type="bibr" rid="ref53">53</xref>]</td><td align="left" valign="top">AR</td><td align="left" valign="top">Vicon and Sony SmartWatch 3</td><td align="left" valign="top">Microsoft HoloLens</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Chen et al [<xref ref-type="bibr" rid="ref55">55</xref>]</td><td align="left" valign="top">AR</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Tharatipyakul et al [<xref ref-type="bibr" rid="ref56">56</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">Logitech C922 Pro webcam</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">JavaScript</td></tr><tr><td align="left" valign="top">Liu et al [<xref ref-type="bibr" rid="ref57">57</xref>]</td><td align="left" valign="top">HMD</td><td align="left" valign="top">Vicon and Noitom</td><td align="left" valign="top">HTC VIVE</td><td align="left" valign="top">NR</td><td align="left" valign="top">Maya and Unreal Engine 4</td></tr><tr><td align="left" valign="top">Li and Wang [<xref ref-type="bibr" rid="ref58">58</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">NR</td><td align="left" valign="top">NAO robot, STM32, DC deceleration motor, and RFP pressure sensor</td><td align="left" valign="top">Python, C language, and MATLAB</td></tr><tr><td align="left" valign="top">Gao et al [<xref ref-type="bibr" rid="ref60">60</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">NR</td><td align="left" valign="top">Screen display</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Guimar&#x00E3;es et al [<xref ref-type="bibr" rid="ref45">45</xref>]</td><td align="left" valign="top">Desktop, Web, and Mobile</td><td align="left" valign="top">Inertial Sensors</td><td align="left" valign="top">NR</td><td align="left" valign="top">Bluetooth&#x00AE; Low Energy</td><td align="left" valign="top">Unity3D</td></tr><tr><td align="left" valign="top">La et al [<xref ref-type="bibr" rid="ref46">46</xref>]</td><td align="left" valign="top">Micro UAVs<sup><xref ref-type="table-fn" rid="table4fn7">g</xref></sup></td><td align="left" valign="top">Qualisys</td><td align="left" valign="top">NR</td><td align="left" valign="top">Crazyflie 2.0s and LED</td><td align="left" valign="top">Python</td></tr><tr><td align="left" valign="top">Bian et al [<xref ref-type="bibr" rid="ref47">47</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">Smartphone and Separate display</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Han et al [<xref ref-type="bibr" rid="ref41">41</xref>]</td><td align="left" valign="top">AR-HMD</td><td align="left" valign="top">Leap Motion, Myo, and Chest Strap</td><td align="left" valign="top">Oculus Rift DK2</td><td align="left" valign="top">NR</td><td align="left" valign="top">Unity3D</td></tr><tr><td align="left" valign="top">Tian et al [<xref ref-type="bibr" rid="ref65">65</xref>]</td><td align="left" valign="top">VR-HMD and MR-HMD</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">Unity3D</td></tr><tr><td align="left" valign="top">Wang and Deng [<xref ref-type="bibr" rid="ref66">66</xref>]</td><td align="left" valign="top">Web</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">NR</td><td align="left" valign="top">Gyroscope, Accelerometer, Magnetometer, WIFI module, High-frequency filtering circuit, Signal conditioning circuit, A/D reference power circuit, E2PROM data storage circuit and Watchdog circuit</td><td align="left" valign="top">Unity3D</td></tr><tr><td align="left" valign="top">Wang and Deng [<xref ref-type="bibr" rid="ref67">67</xref>]</td><td align="left" valign="top">Web</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">Unity3D, HTML5, C language, Kinect SDK, and Microsoft Visual Studio 2010</td></tr><tr><td align="left" valign="top">Kanchanapaetnukul et al [<xref ref-type="bibr" rid="ref68">68</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Wei et al [<xref ref-type="bibr" rid="ref61">61</xref>]</td><td align="left" valign="top">Web</td><td align="left" valign="top">Industrial camera</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Li et al [<xref ref-type="bibr" rid="ref62">62</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Li et al [<xref ref-type="bibr" rid="ref63">63</xref>]</td><td align="left" valign="top">Web</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">MySQL and PHP</td></tr><tr><td align="left" valign="top">Jan et al [<xref ref-type="bibr" rid="ref59">59</xref>]</td><td align="left" valign="top">AR and OST-HMD</td><td align="left" valign="top">Webcam</td><td align="left" valign="top">Microsoft HoloLens</td><td align="left" valign="top">3-Axis Magnetic Sensor QMC5883L and Raspberry Pi</td><td align="left" valign="top">OpenPose and Lifting from the Deep</td></tr><tr><td align="left" valign="top">Lee et al [<xref ref-type="bibr" rid="ref37">37</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Tian et al [<xref ref-type="bibr" rid="ref69">69</xref>]</td><td align="left" valign="top">TCG<sup><xref ref-type="table-fn" rid="table4fn8">h</xref></sup>, VR, and MR</td><td align="left" valign="top">NR</td><td align="left" valign="top">Pico Neo3 and Pico4</td><td align="left" valign="top">TP-LINK AX6000</td><td align="left" valign="top">Unity3D and Mirror Plugin</td></tr><tr><td align="left" valign="top">Kim et al [<xref ref-type="bibr" rid="ref64">64</xref>]</td><td align="left" valign="top">VR</td><td align="left" valign="top">NR</td><td align="left" valign="top">Oculus Quest</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Hsieh et al [<xref ref-type="bibr" rid="ref48">48</xref>]</td><td align="left" valign="top">VR</td><td align="left" valign="top">Kinect</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td></tr><tr><td align="left" valign="top">Kamel et al [<xref ref-type="bibr" rid="ref54">54</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">RGB-D</td><td align="left" valign="top">NR</td><td align="left" valign="top">Screen display</td><td align="left" valign="top">MakeHuman</td></tr><tr><td align="left" valign="top">Chen [<xref ref-type="bibr" rid="ref38">38</xref>]</td><td align="left" valign="top">Desktop</td><td align="left" valign="top">NR</td><td align="left" valign="top">NR</td><td align="left" valign="top">Force sensor modules, microcontroller, and wireless data transmission module</td><td align="left" valign="top">NR</td></tr></tbody></table><table-wrap-foot><fn id="table4fn1"><p><sup>a</sup>VR: virtual reality.</p></fn><fn id="table4fn2"><p><sup>b</sup>HMD: helmet-mounted display.</p></fn><fn id="table4fn3"><p><sup>c</sup>CAVE: cave automatic virtual environment.</p></fn><fn id="table4fn4"><p><sup>d</sup>NR: not reported.</p></fn><fn id="table4fn5"><p><sup>e</sup>MR: mixed reality.</p></fn><fn id="table4fn6"><p><sup>f</sup>AR: augmented reality.</p></fn><fn id="table4fn7"><p><sup>g</sup>UAV: uncrewed aerial vehicle.</p></fn><fn id="table4fn8"><p><sup>h</sup>TCG: traditional coach guidance.</p></fn></table-wrap-foot></table-wrap><p>The TCC virtual coach simulation is driven by motion data captured by human TCC experts. Commonly used motion capture systems can be divided into 2 categories: nonoptical systems and optical systems. Of the 34 studies we included, 3 used a combination of optical and nonoptical devices for accurate TCC motion capture; for example, Kinect and Xsens were used to capture the movement of each TCC practitioner [<xref ref-type="bibr" rid="ref50">50</xref>]. Among the optical devices, the unmarked optical capture device, Kinect, is the most widely used, and 44% (15/34) of the TCC auxiliary training systems using Kinect as the motion capture device. Three studies used a tag-based Vicon system to capture TCC instructors&#x2019; movements [<xref ref-type="bibr" rid="ref51">51</xref>,<xref ref-type="bibr" rid="ref53">53</xref>,<xref ref-type="bibr" rid="ref57">57</xref>].</p><p>In terms of software, Unity3D (David Helgason) is the most commonly used game engine for developing TCC auxiliary training systems, accounting for 38% (13/34) of the included studies. Unity3D is a cross-platform integrated game engine that mainly provides 3D animation, virtual scenes, physical computing, and other functions, which helps build a high-quality and high-fidelity virtual environment [<xref ref-type="bibr" rid="ref73">73</xref>]. Maya (Autodesk, Inc.) is also used as 3D modeling software [<xref ref-type="bibr" rid="ref39">39</xref>,<xref ref-type="bibr" rid="ref57">57</xref>].</p></sec></sec><sec id="s3-4"><title>RQ2: Description of the System Design for the TCC Auxiliary Training System</title><sec id="s3-4-1"><title>System Field Design</title><p><xref ref-type="table" rid="table5">Table 5</xref> describes the TCC auxiliary training system, the target population, and the TCC style used in the system. In the studies we included, 76% (26/34) of the TCC auxiliary training systems were aimed at all groups of people; only 5 systems were developed specifically for older adults [<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref50">50</xref>,<xref ref-type="bibr" rid="ref55">55</xref>,<xref ref-type="bibr" rid="ref64">64</xref>,<xref ref-type="bibr" rid="ref68">68</xref>], 1 system was developed for older adults with cognitive impairment [<xref ref-type="bibr" rid="ref48">48</xref>], and 1 system was developed for people with dyskinesia [<xref ref-type="bibr" rid="ref37">37</xref>].</p><table-wrap id="t5" position="float"><label>Table 5.</label><caption><p>System field design.</p></caption><table id="table5" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Category</td><td align="left" valign="bottom">Study</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="2">Target population</td></tr><tr><td align="left" valign="top">&#x2003;The TCC<sup><xref ref-type="table-fn" rid="table5fn1">a</xref></sup> trainee</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref38">38</xref>-<xref ref-type="bibr" rid="ref44">44</xref>,<xref ref-type="bibr" rid="ref46">46</xref>,<xref ref-type="bibr" rid="ref47">47</xref>,<xref ref-type="bibr" rid="ref49">49</xref>,<xref ref-type="bibr" rid="ref51">51</xref>-<xref ref-type="bibr" rid="ref54">54</xref>,<xref ref-type="bibr" rid="ref56">56</xref>-<xref ref-type="bibr" rid="ref63">63</xref>,<xref ref-type="bibr" rid="ref65">65</xref>,<xref ref-type="bibr" rid="ref66">66</xref>,<xref ref-type="bibr" rid="ref69">69</xref>]</td></tr><tr><td align="left" valign="top">&#x2003;Older adults and older adults with cognitive impairment</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref50">48</xref>,<xref ref-type="bibr" rid="ref55">50</xref>,<xref ref-type="bibr" rid="ref64">55</xref>,<xref ref-type="bibr" rid="ref68">64</xref>,<xref ref-type="bibr" rid="ref48">68</xref>]</td></tr><tr><td align="left" valign="top">&#x2003;People with movement disorders</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref37">37</xref>]</td></tr><tr><td align="left" valign="top" colspan="2">The types of TCC</td></tr><tr><td align="left" valign="top">&#x2003;24-form TCC and 18-form TCC</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref39">39</xref>,<xref ref-type="bibr" rid="ref40">40</xref>,<xref ref-type="bibr" rid="ref43">43</xref>,<xref ref-type="bibr" rid="ref44">44</xref>,<xref ref-type="bibr" rid="ref54">54</xref>,<xref ref-type="bibr" rid="ref55">55</xref>,<xref ref-type="bibr" rid="ref60">60</xref>,<xref ref-type="bibr" rid="ref66">66</xref>,<xref ref-type="bibr" rid="ref68">68</xref>]</td></tr><tr><td align="left" valign="top">&#x2003;8-form TCC, Yang-style TCC, and Yang, Ye, Chen, or Wu-style TCC</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref50">42</xref>,<xref ref-type="bibr" rid="ref61">50</xref>,<xref ref-type="bibr" rid="ref42">59</xref>,<xref ref-type="bibr" rid="ref59">61</xref>,<xref ref-type="bibr" rid="ref62">62</xref>-<xref ref-type="bibr" rid="ref64">64</xref>,<xref ref-type="bibr" rid="ref67">67</xref>,<xref ref-type="bibr" rid="ref69">69</xref>]</td></tr><tr><td align="left" valign="top">&#x2003;Tai Chi Push Hands</td><td align="left" valign="top">[<xref ref-type="bibr" rid="ref58">58</xref>]</td></tr></tbody></table><table-wrap-foot><fn id="table5fn1"><p><sup>a</sup>TCC: Tai Chi Chuan.</p></fn></table-wrap-foot></table-wrap><p>There are several variations in TCC, with the 24-form style being one of the most popular [<xref ref-type="bibr" rid="ref39">39</xref>]. In developing TCC auxiliary training systems, approximately 24% (8/34) of the systems chose to capture the 24-form TCC demonstrated by TCC experts as a reference motion [<xref ref-type="bibr" rid="ref39">39</xref>,<xref ref-type="bibr" rid="ref40">40</xref>,<xref ref-type="bibr" rid="ref43">43</xref>,<xref ref-type="bibr" rid="ref44">44</xref>,<xref ref-type="bibr" rid="ref54">54</xref>,<xref ref-type="bibr" rid="ref55">55</xref>,<xref ref-type="bibr" rid="ref60">60</xref>,<xref ref-type="bibr" rid="ref66">66</xref>]. Six system developers implemented the Yang-style TCC in these systems [<xref ref-type="bibr" rid="ref42">42</xref>,<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref62">62</xref>-<xref ref-type="bibr" rid="ref64">64</xref>,<xref ref-type="bibr" rid="ref67">67</xref>], while 2 system developers chose the 8-form TCC [<xref ref-type="bibr" rid="ref50">50</xref>,<xref ref-type="bibr" rid="ref61">61</xref>]. In addition, a system developer selected the 18-form TCC as a reference [<xref ref-type="bibr" rid="ref68">68</xref>]. Some developers opted to construct a Tai Chi Push Hands robotic system [<xref ref-type="bibr" rid="ref69">69</xref>] or create a novel portable TCC group training system, which integrates various TCC forms [<xref ref-type="bibr" rid="ref58">58</xref>].</p></sec><sec id="s3-4-2"><title>System Interaction Design</title><p><xref ref-type="table" rid="table6">Table 6</xref> describes the human-computer interaction technologies commonly used in TCC auxiliary training systems, including tactile interaction, action interaction, visual interaction, speech interaction, VR input, and multimodal interaction technology.</p><table-wrap id="t6" position="float"><label>Table 6.</label><caption><p>System interaction design.</p></caption><table id="table6" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Interactive technology and study</td><td align="left" valign="bottom">Algorithm</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="2">Motion interaction (posture recognition)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Chen et al [<xref ref-type="bibr" rid="ref32">32</xref>]</td><td align="left" valign="top">Quaternion-based with DTW<sup><xref ref-type="table-fn" rid="table6fn1">a</xref></sup> extended from algorithms,<break/>curvature property in space curve extended from algorithms, and tensor decomposition&#x2013;based extended from algorithms.</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Xue et al [<xref ref-type="bibr" rid="ref43">43</xref>]</td><td align="left" valign="top">DTW algorithm</td></tr><tr><td align="left" valign="top">&#x2003;Liang et al [<xref ref-type="bibr" rid="ref44">44</xref>]</td><td align="left" valign="top">Approximate entropy or sample entropy</td></tr><tr><td align="left" valign="top">&#x2003;Lin et al [<xref ref-type="bibr" rid="ref49">49</xref>]</td><td align="left" valign="top">DTW algorithm</td></tr><tr><td align="left" valign="top">&#x2003;Yu and Xiong [<xref ref-type="bibr" rid="ref50">50</xref>], Hung et al [<xref ref-type="bibr" rid="ref53">53</xref>], and Wang and Deng [<xref ref-type="bibr" rid="ref66">66</xref>]</td><td align="left" valign="top">DTW algorithm</td></tr><tr><td align="left" valign="top">&#x2003;Li et al [<xref ref-type="bibr" rid="ref62">62</xref>]</td><td align="left" valign="top">Deep-learning model</td></tr><tr><td align="left" valign="top">&#x2003;Kanchanapaetnukul et al [<xref ref-type="bibr" rid="ref68">68</xref>]</td><td align="left" valign="top">BPNN<sup><xref ref-type="table-fn" rid="table6fn2">b</xref></sup></td></tr><tr><td align="left" valign="top">&#x2003;Wei et al [<xref ref-type="bibr" rid="ref61">61</xref>]</td><td align="left" valign="top">By applying YOLOv4<sup><xref ref-type="table-fn" rid="table6fn3">c</xref></sup>, TSDNN<sup><xref ref-type="table-fn" rid="table6fn4">d</xref></sup>, and PRNN<sup><xref ref-type="table-fn" rid="table6fn5">e</xref></sup> to the obtained human body images, the system detects 17 human body keypoints.</td></tr><tr><td align="left" valign="top">&#x2003;Tian et al [<xref ref-type="bibr" rid="ref65">65</xref>]</td><td align="left" valign="top">The average Euclidean distance between the hand positions of the standard movements and the learner&#x2019;s movements across all frames.</td></tr><tr><td align="left" valign="top">&#x2003;Kamel et al [<xref ref-type="bibr" rid="ref54">54</xref>]</td><td align="left" valign="top">CNN<sup><xref ref-type="table-fn" rid="table6fn6">f</xref></sup> based on a set of algorithms that model high-level abstraction in data.</td></tr><tr><td align="left" valign="top">&#x2003;Jan et al [<xref ref-type="bibr" rid="ref59">59</xref>]</td><td align="left" valign="top">DTW algorithm and Gaussian function&#x2013;based similarity metric to compare the aligned signal.</td></tr><tr><td align="left" valign="top" colspan="2">Real-time visual interaction</td></tr><tr><td align="char" char="." valign="top">&#x2003;[<xref ref-type="bibr" rid="ref38">38</xref>,<xref ref-type="bibr" rid="ref43">43</xref>-<xref ref-type="bibr" rid="ref49">49</xref>,<xref ref-type="bibr" rid="ref51">51</xref>,<xref ref-type="bibr" rid="ref52">52</xref>,<xref ref-type="bibr" rid="ref55">55</xref>-<xref ref-type="bibr" rid="ref57">57</xref>,<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref60">60</xref>,<xref ref-type="bibr" rid="ref62">62</xref>-<xref ref-type="bibr" rid="ref65">65</xref>,<xref ref-type="bibr" rid="ref68">68</xref>,<xref ref-type="bibr" rid="ref69">69</xref>]</td><td align="char" char="." valign="top">&#x2014;<sup><xref ref-type="table-fn" rid="table6fn7">g</xref></sup></td></tr><tr><td align="left" valign="top" colspan="2">Multimode interaction</td></tr><tr><td align="left" valign="top">&#x2003;Tactile and Visual [<xref ref-type="bibr" rid="ref44">44</xref>]</td><td align="left" valign="top"/></tr><tr><td align="left" valign="top">&#x2003;Wearable sensors, Visual and Auditory [<xref ref-type="bibr" rid="ref45">45</xref>]</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;Smartphone-based user interface and Visual [<xref ref-type="bibr" rid="ref47">47</xref>]</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;Virtual keyboard technology, Motion, Speech and Visual [<xref ref-type="bibr" rid="ref57">57</xref>]</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;Voice, Visual, Hidden panel and Hot key [<xref ref-type="bibr" rid="ref59">59</xref>]</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;Voice, Visual and Background music playback [<xref ref-type="bibr" rid="ref63">63</xref>]</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;Voice, rainbow trailing effects, hand models and particle tracks [<xref ref-type="bibr" rid="ref65">65</xref>]</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;Visual, Touch control, Somatosensory Interaction, Phone interaction and Emotional interaction [<xref ref-type="bibr" rid="ref67">67</xref>]</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">&#x2003;Button and Visual [<xref ref-type="bibr" rid="ref69">69</xref>]</td><td align="left" valign="top">&#x2014;</td></tr></tbody></table><table-wrap-foot><fn id="table6fn1"><p><sup>a</sup>DTW: dynamic time warping.</p></fn><fn id="table6fn2"><p><sup>b</sup>BPNN: backpropagation neural network.</p></fn><fn id="table6fn3"><p><sup>c</sup>YOLO: you only look once.</p></fn><fn id="table6fn4"><p><sup>d</sup>TSDNN: time series deep neural network.</p></fn><fn id="table6fn5"><p><sup>e</sup>PRNN: pose regression neural network.</p></fn><fn id="table6fn6"><p><sup>f</sup>CNN: convolutional neural network.</p></fn><fn id="table6fn7"><p><sup>g</sup>Not available.</p></fn></table-wrap-foot></table-wrap><p>Calculating the similarity of recorded TCC movements to a standard template is necessary to determine the interaction between TCC practitioners and the TCC auxiliary training system. In the current system design, the common algorithm for TCC posture recognition is the body posture recognition method based on template matching. Among these methods, the dynamic time warping (DTW) algorithm is mostly used [<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref43">43</xref>,<xref ref-type="bibr" rid="ref49">49</xref>,<xref ref-type="bibr" rid="ref50">50</xref>,<xref ref-type="bibr" rid="ref53">53</xref>,<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref66">66</xref>], which calculates the proximity between the test sample and the standard sample by extending and shortening the time series, establishes a time-calibration matching path between the test sample and the standard sample, and identifies the path with the minimum cumulative distance between the 2 samples in the matching process as the optimal path. Developers have extended 2 methodological approaches to objectively evaluate TCC posture sequences. First, they enhanced a quaternion-based similarity assessment method, incorporating DTW and the curvature properties of spatial curves to improve motion sequence alignment. Second, they extended a tensor decomposition&#x2013;based similarity assessment technique to capture multidimensional posture features for more accurate evaluation [<xref ref-type="bibr" rid="ref32">32</xref>]. These extensions allow both quaternion-based similarity assessment with DTW and tensor decomposition&#x2013;based similarity assessment to objectively evaluate TCC posture sequences.</p><p>In addition, some researchers used approximate entropy or sample entropy to measure the time series distance or dissimilarity between users and the TC master [<xref ref-type="bibr" rid="ref44">44</xref>]. In recent years, the TCC action quality assessment has gradually been achieved through machine learning assessment models. These models use motion characteristics captured by computer vision combined with various neural networks, such as backpropagation neural networks, time series deep neural networks, convolutional neural networks, and pose regression neural networks, to facilitate TCC action recognition and assessment [<xref ref-type="bibr" rid="ref54">54</xref>,<xref ref-type="bibr" rid="ref61">61</xref>,<xref ref-type="bibr" rid="ref62">62</xref>,<xref ref-type="bibr" rid="ref68">68</xref>].</p><p>Excellent human-computer interaction design usually requires real-time feedback, and timely feedback can improve the user experience of a TCC auxiliary training system. Among the 34 studies included in this review, 62% (21/34) of the studies showed that the TCC auxiliary training system provides real-time visual feedback. Among the 21 studies that provided real-time visual feedback, 16 studies provided training feedback for TCC practitioners in the form of visual feedback, and 4 of them provided feedback in the form of scores [<xref ref-type="bibr" rid="ref43">43</xref>,<xref ref-type="bibr" rid="ref49">49</xref>,<xref ref-type="bibr" rid="ref57">57</xref>,<xref ref-type="bibr" rid="ref59">59</xref>]. In TCC practice, the correct center-of-gravity distribution of each motion is essential. Therefore, 3 system developments visualize the center-of-gravity distribution of practitioners practicing TCC via visual feedback to help practitioners perform TCC movements with the correct center of gravity [<xref ref-type="bibr" rid="ref38">38</xref>,<xref ref-type="bibr" rid="ref39">39</xref>,<xref ref-type="bibr" rid="ref49">49</xref>,<xref ref-type="bibr" rid="ref51">51</xref>].</p><p>TCC is a meditative practice where practitioners execute forms to unlock chi and allow it to flow within their bodies. Consequently, some researchers transformed TCC movements into a visual feedback form of energy flow, building a visual representation of the energy flows in the body [<xref ref-type="bibr" rid="ref60">60</xref>]. Furthermore, TCC is a traditional therapeutic practice that combines breathing exercises with physical training; only 1 system visualizes the respiratory status of TCC practitioners in the form of visual feedback [<xref ref-type="bibr" rid="ref39">39</xref>].</p><p>According to the literature review, only 9 TCC auxiliary training systems use multimodal interaction technology. For example, modern sensors, actuators, and VR or AR technologies can capture and reconstruct 4D motion behaviors, enabling seamless human-computer interaction [<xref ref-type="bibr" rid="ref44">44</xref>]. The TCC auxiliary training system can also be operated via voice interaction, motion interaction, and virtual keyboard technologies, providing visual feedback of scores to practitioners [<xref ref-type="bibr" rid="ref57">57</xref>]. Designers have also enhanced the interactive control experience by fixing a smartphone-based user interface to the user&#x2019;s forearm, ensuring seamless interaction and positioning the smartphone as a key interaction tool [<xref ref-type="bibr" rid="ref47">47</xref>,<xref ref-type="bibr" rid="ref67">67</xref>]. Recently, a composite navigation module based on a unique environment has provided diverse interaction modes for TCC practitioners [<xref ref-type="bibr" rid="ref65">65</xref>]. These modes encompass voice commands, rainbow trailing effects, hand models, and particle tracks to enrich the TCC experience. This integrated approach harnesses diverse methodologies and technologies, marking significant progress in optimizing user interaction and feedback mechanisms in the TCC auxiliary training system. These advancements promise to enhance TCC practices and potentially foster their widespread adoption.</p></sec></sec><sec id="s3-5"><title>RQ3: Evaluation and Validation of the TCC Auxiliary Training System, With Evidence of Its Role in Health Promotion</title><p>This section summarizes the work conducted for evaluating or verifying each system according to the VR-core framework [<xref ref-type="bibr" rid="ref74">74</xref>], a framework commonly used for evaluating VR systems, which includes evaluating system acceptability, feasibility, tolerability, and clinical efficacy.</p><sec id="s3-5-1"><title>Acceptability, Feasibility, and Tolerability</title><p><xref ref-type="table" rid="table7">Table 7</xref> describes the preliminary evaluation of the TCC auxiliary training system to clarify its acceptability, feasibility, and tolerability.</p><table-wrap id="t7" position="float"><label>Table 7.</label><caption><p>Acceptability, feasibility, and tolerability of the Tai Chi Chuan (TCC) training system.</p></caption><table id="table7" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Domain and study</td><td align="left" valign="bottom">Participants</td><td align="left" valign="bottom">Result</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="3">Acceptability</td></tr><tr><td align="left" valign="bottom">&#x2003;Chen et al [<xref ref-type="bibr" rid="ref32">32</xref>]</td><td align="left" valign="bottom">Students (n=18)</td><td align="left" valign="bottom">ImmerTai in immersive environments is attractive to students and can enhance their learning experience significantly.</td></tr><tr><td align="left" valign="bottom">&#x2003;Yu and Xiong [<xref ref-type="bibr" rid="ref50">50</xref>]</td><td align="left" valign="bottom">Older adults (n=41)</td><td align="left" valign="bottom">Participants thought this system was beneficial, positive, entertaining, with low privacy risk.</td></tr><tr><td align="left" valign="bottom">&#x2003;Kim et al [<xref ref-type="bibr" rid="ref64">64</xref>]</td><td align="left" valign="bottom">Female participants (n=5)</td><td align="left" valign="bottom">VR<sup><xref ref-type="table-fn" rid="table7fn1">a</xref></sup> provided participants with novel experiences while practicing TCC<sup><xref ref-type="table-fn" rid="table7fn2">b</xref></sup>, which they found valuable and enjoyable.</td></tr><tr><td align="left" valign="bottom">&#x2003;Jan et al [<xref ref-type="bibr" rid="ref59">59</xref>]</td><td align="left" valign="bottom">Advanced TCC students (n=5)</td><td align="left" valign="bottom">The evaluation module clearly represented coach movements, the practice review UI<sup><xref ref-type="table-fn" rid="table7fn3">c</xref></sup> was easy to use, and the module correctly identified errors and correct poses.</td></tr><tr><td align="left" valign="bottom">&#x2003;Li et al [<xref ref-type="bibr" rid="ref63">63</xref>]</td><td align="left" valign="bottom">Students (n=246)</td><td align="left" valign="bottom">Students were more satisfied with &#x201C;content selection,&#x201D; &#x201C;running speed,&#x201D; and &#x201C;login speed.&#x201D;</td></tr><tr><td align="left" valign="bottom">&#x2003;Guimar&#x00E3;es et al [<xref ref-type="bibr" rid="ref45">45</xref>]</td><td align="left" valign="bottom">Older adults (n=8)</td><td align="left" valign="bottom">The exergame was a fun way to perform exercises, which they felt to be important to motivate people of their age.</td></tr><tr><td align="left" valign="top" colspan="3">Feasibility</td></tr><tr><td align="left" valign="top">&#x2003;Chen et al [<xref ref-type="bibr" rid="ref32">32</xref>]</td><td align="left" valign="top">Students (n=12)</td><td align="left" valign="top">The algorithm for assessing the movement quality of TCC is highly correlated with expert ratings in TCC.</td></tr><tr><td align="left" valign="top">&#x2003;Chen et al [<xref ref-type="bibr" rid="ref32">32</xref>]</td><td align="left" valign="top">Students (n=18)</td><td align="left" valign="top">HMD<sup><xref ref-type="table-fn" rid="table7fn4">d</xref></sup> &#x003E; CAVE<sup><xref ref-type="table-fn" rid="table7fn5">e</xref></sup> &#x003E; PC<sup><xref ref-type="table-fn" rid="table7fn6">f</xref></sup> for NoR<sup><xref ref-type="table-fn" rid="table7fn7">g</xref></sup> (faster learning), CAVE &#x003E; PC &#x003E; HMD for MQS<sup><xref ref-type="table-fn" rid="table7fn8">h</xref></sup> (better quality), and CAVE &#x003E; PC &#x003E; HMD for QSPR<sup><xref ref-type="table-fn" rid="table7fn9">i</xref></sup> (higher learning efficiency).</td></tr><tr><td align="left" valign="top">&#x2003;Yu and Xiong [<xref ref-type="bibr" rid="ref50">50</xref>]</td><td align="left" valign="top">Middle-aged and older participants (n=21)</td><td align="left" valign="top">Algorithm scores were comparable to the gold standard (experts&#x2019; ratings).</td></tr><tr><td align="left" valign="top">&#x2003;Iwaanaguchi et al [<xref ref-type="bibr" rid="ref39">39</xref>]</td><td align="left" valign="top">Beginner (n=8)</td><td align="left" valign="top">The 2 controls were complementary to each other.</td></tr><tr><td align="left" valign="top">&#x2003;Han et al [<xref ref-type="bibr" rid="ref42">42</xref>]</td><td align="left" valign="top">Participants (n=60)</td><td align="left" valign="top">Visualization of standard movement and a fixed augmented mirror.</td></tr><tr><td align="left" valign="top">&#x2003;Lin et al [<xref ref-type="bibr" rid="ref49">49</xref>]</td><td align="left" valign="top">Beginner (n=14)</td><td align="left" valign="top">The TCC training system with weight transfer guidance and feedback benefited TCC beginners.</td></tr><tr><td align="left" valign="top" rowspan="2">&#x2003;Zhu et al [<xref ref-type="bibr" rid="ref52">52</xref>]</td><td align="left" valign="top">Volunteers (n=30)</td><td align="left" valign="top">The smoke-enhanced display effectively allows users to recognize and distinguish specific human motions.</td></tr><tr><td align="left" valign="top">Volunteers (n=20)</td><td align="left" valign="top">Motion data presentation with smoke animation was beneficial and would help motivate long-term training.</td></tr><tr><td align="left" valign="top">&#x2003;Tharatipyakul et al [<xref ref-type="bibr" rid="ref56">56</xref>]</td><td align="left" valign="top">Participants (n=12)</td><td align="left" valign="top">Trainer video and video with skeleton allowed participants to be significantly more accurate.</td></tr><tr><td align="left" valign="top">&#x2003;Kamel et al [<xref ref-type="bibr" rid="ref54">54</xref>]</td><td align="left" valign="top">Volunteers (n=20)</td><td align="left" valign="top">iTai-Chi system can significantly improve learning outcomes of TCC learners.</td></tr><tr><td align="left" valign="top">&#x2003;Kamel et al [<xref ref-type="bibr" rid="ref54">54</xref>]</td><td align="left" valign="top">Volunteers (n=60)</td><td align="left" valign="top">iTai-Chi &#x003E; one-to-many tutorial &#x003E; video watching (more accurately).</td></tr><tr><td align="left" valign="top">&#x2003;Kamel et al [<xref ref-type="bibr" rid="ref54">54</xref>]</td><td align="left" valign="top">Volunteers (n=30)</td><td align="left" valign="top">Performance of older TCC practitioners was enhanced significantly using iTai-Chi.</td></tr><tr><td align="left" valign="top">&#x2003;Kamel et al [<xref ref-type="bibr" rid="ref54">54</xref>]</td><td align="left" valign="top">Volunteers (n=40)</td><td align="left" valign="top">iTai-Chi overcomes the lack of previous knowledge.</td></tr><tr><td align="left" valign="top">&#x2003;Kamel et al [<xref ref-type="bibr" rid="ref54">54</xref>]</td><td align="left" valign="top">Volunteers (n=130)</td><td align="left" valign="top">Practicing TCC with the system was exciting and increased motivation to practice continually.</td></tr><tr><td align="left" valign="top">&#x2003;Chen [<xref ref-type="bibr" rid="ref38">38</xref>]</td><td align="left" valign="top">Participants (n=6)</td><td align="left" valign="top">Pressure-sensing shoes with visual feedback &#x003C; without visual feedback (participants spent less time).</td></tr><tr><td align="left" valign="top">&#x2003;Hsieh et al [<xref ref-type="bibr" rid="ref48">48</xref>]</td><td align="left" valign="top">Participants (n=60)</td><td align="left" valign="top">VR-based Tai Chi exercise provided protective effects for some cognitive and physical functions in older adults with cognitive impairment.</td></tr><tr><td align="left" valign="top">&#x2003;Tian et al [<xref ref-type="bibr" rid="ref69">69</xref>]</td><td align="left" valign="top">Participants (n=36)</td><td align="left" valign="top">VR &#x003E; MR<sup><xref ref-type="table-fn" rid="table7fn10">j</xref></sup> &#x003E; TCG (better effectiveness) and MR &#x003E; TCG<sup><xref ref-type="table-fn" rid="table7fn1">j</xref></sup> &#x003E; VR (better social experience).</td></tr><tr><td align="left" valign="top">&#x2003;Kim et al [<xref ref-type="bibr" rid="ref64">64</xref>]</td><td align="left" valign="top">Female participants (n=5)</td><td align="left" valign="top">VR Tai Chi benefits included mindfulness/enjoyment/physical exercise.</td></tr><tr><td align="left" valign="top">&#x2003;Jan et al [<xref ref-type="bibr" rid="ref59">59</xref>]</td><td align="left" valign="top">Advanced TCC students (n=5)</td><td align="left" valign="top">Combining the camera and digital compasses &#x003E; using only the camera (higher utility).</td></tr><tr><td align="left" valign="top">&#x2003;Wang and Deng [<xref ref-type="bibr" rid="ref67">67</xref>]</td><td align="left" valign="top">A user</td><td align="left" valign="top">Students could improve their learning based on feedback results.</td></tr><tr><td align="left" valign="top">&#x2003;Tian et al [<xref ref-type="bibr" rid="ref65">65</xref>]</td><td align="left" valign="top">Volunteers (n=9)</td><td align="left" valign="top">Significant improvements in the learners&#x2019; level of movement precision.</td></tr><tr><td align="left" valign="top">&#x2003;Wei et al [<xref ref-type="bibr" rid="ref61">61</xref>]</td><td align="left" valign="top">Volunteers (n=4)</td><td align="left" valign="top">The system accurately assigned scores to users at different levels.</td></tr><tr><td align="left" valign="top">&#x2003;Lee et al [<xref ref-type="bibr" rid="ref37">37</xref>]</td><td align="left" valign="top">A user with movement disorders</td><td align="left" valign="top">Significantly increased participants&#x2019; motivation for physical rehabilitation, improving exercise performance.</td></tr><tr><td align="left" valign="top">&#x2003;Li et al [<xref ref-type="bibr" rid="ref63">63</xref>]</td><td align="left" valign="top">Students (n=246)</td><td align="left" valign="top">The system helped students learn TCC better (theory and skills).</td></tr><tr><td align="left" valign="top">&#x2003;Bian et al [<xref ref-type="bibr" rid="ref47">47</xref>]</td><td align="left" valign="top">Students (n=40)</td><td align="left" valign="top">Improved learning experience/performance.</td></tr><tr><td align="left" valign="top">&#x2003;Guimar&#x00E3;es et al [<xref ref-type="bibr" rid="ref45">45</xref>]</td><td align="left" valign="top">Older adults (n=8)</td><td align="left" valign="top">Helped someone unfamiliar with exercises perform them correctly.</td></tr><tr><td align="left" valign="top">&#x2003;Han et al [<xref ref-type="bibr" rid="ref41">41</xref>]</td><td align="left" valign="top">Participants (n=8)</td><td align="left" valign="top">All participants were guided to move their arms to perform maneuvers well.</td></tr><tr><td align="left" valign="top" colspan="3">Tolerability</td></tr><tr><td align="left" valign="top">&#x2003;Tian et al [<xref ref-type="bibr" rid="ref69">69</xref>]</td><td align="left" valign="top">Participants (n=36)</td><td align="left" valign="top">Despite the transitions between real and virtual worlds during the training, participants did not experience significant discomfort (VR/MR/TCG).</td></tr><tr><td align="left" valign="top">&#x2003;Kim et al [<xref ref-type="bibr" rid="ref64">64</xref>]</td><td align="left" valign="top">Female participants (n=5)</td><td align="left" valign="top">In the virtual TCC program, participants had to bow their heads to view the location of their footsteps, which caused them to feel nausea.</td></tr></tbody></table><table-wrap-foot><fn id="table7fn1"><p><sup>a</sup>VR: virtual reality.</p></fn><fn id="table7fn2"><p><sup>b</sup>TCC: Tai Chi Chuan.</p></fn><fn id="table7fn3"><p><sup>c</sup>UI: user interface.</p></fn><fn id="table7fn4"><p><sup>d</sup>HMD: helmet-mounted display.</p></fn><fn id="table7fn5"><p><sup>e</sup>CAVE: cave automatic virtual environment.</p></fn><fn id="table7fn6"><p><sup>f</sup>PC: personal computer.</p></fn><fn id="table7fn7"><p><sup>g</sup>NoR: number of replays.</p></fn><fn id="table7fn8"><p><sup>h</sup>MQS: motion quality score.</p></fn><fn id="table7fn9"><p><sup>i</sup>QSPR: quality score per replay.</p></fn><fn id="table7fn10"><p><sup>j</sup>MR: mixed reality.</p></fn></table-wrap-foot></table-wrap><p>Among the 34 studies included in this review, 6 studies evaluating user acceptability of the developed system showed promising results. The users surveyed thought that the TCC auxiliary training system based on VR is valuable and positive and can effectively improve their practice quality. In addition, 21 studies have analyzed the feasibility of the TCC auxiliary training system to determine existing system problems as quickly as possible to prepare for follow-up clinical trials. Researchers evaluated the feasibility of the TCC motion quality assessment algorithm and system in different VR environments, yielding favorable results [<xref ref-type="bibr" rid="ref32">32</xref>]. In addition, the validity of the system&#x2019;s assessment algorithm was verified, showing a high correlation with expert ratings [<xref ref-type="bibr" rid="ref50">50</xref>]. Other researchers evaluated the TCC auxiliary training system under 2 control modes, demonstrating its feasibility and interoperability [<xref ref-type="bibr" rid="ref39">39</xref>]. Feedback collected through interviews identified existing issues within the current system [<xref ref-type="bibr" rid="ref42">42</xref>]. Furthermore, the TCC auxiliary training system with center-of-mass guidance and feedback functions is more beneficial for beginners. The smoke-enhanced display helps users effectively identify and differentiate specific body movements, facilitating TCC practice [<xref ref-type="bibr" rid="ref49">49</xref>,<xref ref-type="bibr" rid="ref52">52</xref>]. However, other researchers found that displaying virtual videos is also a feasible approach [<xref ref-type="bibr" rid="ref56">56</xref>]. Finally, 2 studies specifically analyzed the tolerability of the TCC auxiliary training system. One study reported that participants did not experience significant discomfort when switching between real and virtual environments [<xref ref-type="bibr" rid="ref69">69</xref>]. In contrast, another study observed that participants in virtual Tai Chi training experienced nausea due to the need to lower their heads to check foot positioning [<xref ref-type="bibr" rid="ref64">64</xref>].</p></sec><sec id="s3-5-2"><title>Clinical Efficacy</title><p>Regarding the clinical efficacy evaluation of the TCC auxiliary training system, only 2 studies align with the international working group&#x2019;s recommendations [<xref ref-type="bibr" rid="ref74">74</xref>]. In this study, 1 study randomly divided older adults into AR TCC auxiliary training and traditional TCC training groups. After 8 weeks of 30-minute training sessions, 3 times a week, the 3 balance function test scores improved in both groups. AR-assisted training with selected TCC movements, which are designed based on objective measurements of the practitioner&#x2019;s capability, improved balance control and lower limb muscle strength at least as effectively as the complete sequence of traditional TCC exercises [<xref ref-type="bibr" rid="ref55">55</xref>]. To explore the cognitive and physical effects of a VR-based Tai Chi (VRTC) exercise program on older adults with cognitive impairment, another study divided 60 older adults with cognitive impairment into clusters assigned to either the VRTC or the control group, and the intervention was conducted twice weekly for 6 months. The results suggested that the VRTC exercise program significantly protected abstract thinking and judgment, aerobic endurance, lower extremity endurance, balance, and gait speed, confirming the clinical efficacy of VRTC, as provided in <xref ref-type="table" rid="table8">Table 8</xref>.</p><table-wrap id="t8" position="float"><label>Table 8.</label><caption><p>Clinical efficacy of the Tai Chi Chuan (TCC) training system.</p></caption><table id="table8" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Study</td><td align="left" valign="bottom">Chen et al [<xref ref-type="bibr" rid="ref55">55</xref>]</td><td align="left" valign="bottom">Hsieh et al [<xref ref-type="bibr" rid="ref48">48</xref>]</td></tr></thead><tbody><tr><td align="left" valign="top">Design</td><td align="left" valign="top">RCT<sup><xref ref-type="table-fn" rid="table8fn1">a</xref></sup></td><td align="left" valign="top">Quasi-randomized design study</td></tr><tr><td align="left" valign="top">Participants</td><td align="left" valign="top">Adults aged &#x2265;65 years without any debilitating diseases (n=28)</td><td align="left" valign="top">Adults aged &#x2265;65 years with cognitive impairment (n=60)</td></tr><tr><td align="left" valign="top" rowspan="2">Comparisons (number of participants)</td><td align="left" valign="top">sTC<sup><xref ref-type="table-fn" rid="table8fn2">b</xref></sup>: using AR TCC<sup><xref ref-type="table-fn" rid="table8fn3">c</xref></sup> training system to practice TCC (n=14)</td><td align="left" valign="top">VRTC: using AR TCC training system to practice TCC (n=14)</td></tr><tr><td align="left" valign="top">tTC<sup><xref ref-type="table-fn" rid="table8fn4">d</xref></sup>: TCC masters guide the practice of TCC (n=14)</td><td align="left" valign="top">Control: TCC masters guide the TCC practice (n=14)</td></tr><tr><td align="left" valign="top">Exercise dose</td><td align="left" valign="top">30 minutes, 3 times per week for 8 weeks</td><td align="left" valign="top">VRTC<sup><xref ref-type="table-fn" rid="table8fn5">e</xref></sup> group: 60-minute group session, twice weekly for 24 weeks Control group: no exercise or specific behavioral management training</td></tr><tr><td align="left" valign="top">Outcomes measurement</td><td align="left" valign="top">sTC: BBS<sup><xref ref-type="table-fn" rid="table8fn6">f</xref></sup>&#x2191;<sup><xref ref-type="table-fn" rid="table8fn7">g</xref></sup>, TUG<sup><xref ref-type="table-fn" rid="table8fn8">h</xref></sup>&#x2191;, FRT<sup><xref ref-type="table-fn" rid="table8fn9">i</xref></sup>&#x2191;, and muscle strength&#x2191;</td><td align="left" valign="top">VRTC group: 6MWT<sup><xref ref-type="table-fn" rid="table8fn10">j</xref></sup>&#x2193;<sup><xref ref-type="table-fn" rid="table8fn11">k</xref></sup>, 30-s STS<sup><xref ref-type="table-fn" rid="table8fn12">l</xref></sup>&#x2193;, FRT<break/>5-m gait speed&#x2192;<sup><xref ref-type="table-fn" rid="table8fn13">m</xref></sup>, and ABSTR<sup><xref ref-type="table-fn" rid="table8fn14">n</xref></sup>&#x2191;</td></tr><tr><td align="left" valign="top">Results</td><td align="left" valign="top">Equivalent</td><td align="left" valign="top">VRTC exercise yielded some cognitive and physical benefits</td></tr><tr><td align="left" valign="top">Trial registration</td><td align="left" valign="top">#1000087</td><td align="left" valign="top">103-1487B</td></tr></tbody></table><table-wrap-foot><fn id="table8fn1"><p><sup>a</sup>RCT: randomized controlled trial.</p></fn><fn id="table8fn2"><p><sup>b</sup>sTC: selected Tai Chi.</p></fn><fn id="table8fn3"><p><sup>c</sup>TCC: Tai Chi Chuan.</p></fn><fn id="table8fn4"><p><sup>d</sup>tTC: traditional Tai Chi.</p></fn><fn id="table8fn5"><p><sup>e</sup>VRTC: virtual reality&#x2013;based Tai Chi.</p></fn><fn id="table8fn6"><p><sup>f</sup>BBS: Berg Balance Scale.</p></fn><fn id="table8fn7"><p><sup>g</sup>&#x2191;: increased.</p></fn><fn id="table8fn8"><p><sup>h</sup>TUG: timed up and go test.</p></fn><fn id="table8fn9"><p><sup>i</sup>FRT: functional reach test.</p></fn><fn id="table8fn10"><p><sup>j</sup>6MWT: 6-minute walk test.</p></fn><fn id="table8fn11"><p><sup>k</sup>&#x2193;: decrease.</p></fn><fn id="table8fn12"><p><sup>l</sup>30-s STS: 30-second sit-to-stand test.</p></fn><fn id="table8fn13"><p><sup>m</sup>&#x2192;: no change.</p></fn><fn id="table8fn14"><p><sup>n</sup>ABSTR: abstract thinking and judgment.</p></fn></table-wrap-foot></table-wrap></sec></sec><sec id="s3-6"><title>RQ4: Future Directions for the Development of the TCC Auxiliary Training System</title><p>To enhance the innovation and foresight of the TCC auxiliary training system, we further explored its future development directions and proposed several potential technology integration approaches. These are aimed at addressing current challenges in personalized training, real-time feedback, and data analysis, thereby promoting the precision and intelligence of TCC training outcomes.</p><sec id="s3-6-1"><title>Integration of Artificial Intelligence (AI) With the TCC Auxiliary Training System</title><p>The rapid development of artificial intelligence (AI) has brought revolutionary changes to sports training and rehabilitation, particularly in areas such as motion recognition, posture optimization, and personalized training [<xref ref-type="bibr" rid="ref75">75</xref>]. Among these, generative feedback and deep learning&#x2013;based skeletal tracking, as key AI applications, show great potential in TCC-assisted training. The TCC training process requires precise posture control and dynamic coordination from practitioners. In this context, AI integration can provide more accurate feedback and guidance to the TCC auxiliary training system, improving both training effectiveness and efficiency.</p><p>Generative feedback is an AI-based technology that generates dynamic feedback by analyzing real-time motion data to provide targeted training suggestions [<xref ref-type="bibr" rid="ref76">76</xref>]. In Tai Chi training, generative feedback analyzes the practitioner&#x2019;s posture and movements in real time to offer personalized improvement suggestions. Compared to traditional manual guidance, generative feedback offers real-time, personalized, and repeatable characteristics, enabling practitioners to make subtle adjustments in each movement cycle, effectively reducing motion deviations and improving training quality. Deep learning&#x2013;based skeletal tracking, using computer vision and deep learning algorithms, precisely tracks the movement of the human skeletal structure [<xref ref-type="bibr" rid="ref77">77</xref>]. In Tai Chi training, deep learning&#x2013;based skeletal tracking captures the practitioner&#x2019;s full-body posture and joint movements in real time, generating accurate skeletal models. These skeletal models not only reflect an individual&#x2019;s movement trajectories and posture changes but also reveal potential issues that may arise during the movements.</p></sec><sec id="s3-6-2"><title>Integration of TCC Auxiliary Training Systems and Biofeedback Technology</title><p>Biofeedback technology, which involves real-time monitoring of physiological data and providing feedback, can present and regulate the practitioner&#x2019;s physiological state to achieve training objectives [<xref ref-type="bibr" rid="ref78">78</xref>-<xref ref-type="bibr" rid="ref80">80</xref>]. In the application of TCC auxiliary training systems, biofeedback technology, particularly heart rate variability (HRV) and electroencephalography, provides significant potential for enhancing training outcomes. HRV, a physiological indicator of the regularity of heart rate fluctuations, can reflect the practitioner&#x2019;s bodily responses in real time, particularly when facing varying levels of training intensity and load [<xref ref-type="bibr" rid="ref80">80</xref>]. By monitoring HRV, training intensity and content can be dynamically adjusted to avoid the physiological burden caused by overtraining, thereby achieving a personalized training program. Changes in electroencephalography signals reflect psychological characteristics such as attention, relaxation state, and emotional fluctuations [<xref ref-type="bibr" rid="ref81">81</xref>]. For TCC auxiliary training systems, real-time electroencephalography feedback serves as a crucial tool for users to adjust their mental state and optimize training outcomes. TCC, as a traditional exercise form that emphasizes mind-body coordination, particularly focuses on the practitioner&#x2019;s internal concentration, breath control, and body movement coordination. With the integration of electroencephalography, TCC auxiliary training systems can detect when the practitioner&#x2019;s brain is in an overly excited or anxious state and provide appropriate feedback to guide them in relaxation exercises such as deep breathing or meditation, helping them achieve an optimal mental state and thus enhancing training effectiveness [<xref ref-type="bibr" rid="ref82">82</xref>].</p></sec><sec id="s3-6-3"><title>Integration of the TCC Auxiliary Training System With Digital Twin Technology</title><p>Digital twin technology has rapidly advanced in recent years and is now widely applied across various fields, including smart manufacturing, health care, and sports training [<xref ref-type="bibr" rid="ref83">83</xref>,<xref ref-type="bibr" rid="ref84">84</xref>]. In the field of sports training, the application of digital twins enables precise simulation and prediction of an individual&#x2019;s movement state and performance through virtual models. The integration of the digital twin with the TCC auxiliary training system is a novel design approach, promising enhanced precision and personalization in training while providing effective feedback on posture control and movement efficiency during exercise.</p><p>Posture twin uses detailed modeling of human movement to track actions in real time and provide optimization suggestions based on biomechanical principles. A core goal of TCC training is to improve balance and coordination through slow, fluid movements, which is particularly important for older adults. Posture twin not only tracks the balance status of Tai Chi practitioners in real time during each movement cycle but also combines gait analysis to assess gait symmetry, stability, and movement fluidity. In the future, with the development of digital twin technology, its application in TCC training will expand, opening new research directions for interdisciplinary integration in fields such as kinesiology and rehabilitation medicine.</p><p>In summary, the future development of the TCC auxiliary training system, integrated with advanced technologies such as AI, biofeedback, and digital twin, demonstrates great potential. Through the innovation and integration of these technologies, the system not only provides more personalized, real-time, and precise training feedback but also optimizes the physiological and psychological state of the trainee, thereby improving overall training effectiveness and efficiency. Future research can further deepen the application of these technologies, explore more interdisciplinary integration paths, and contribute to advancing the TCC auxiliary training system.</p></sec></sec></sec><sec id="s4" sec-type="discussion"><title>Discussion</title><sec id="s4-1"><title>Principal Findings</title><p>The results of this review indicate that most individuals can effectively engage in TCC training using assistive systems, which offer significant benefits for both physical and mental health. Existing studies show that the development tools for TCC auxiliary training systems are diverse, enabling the creation of training environments through various hardware and software combinations. The design of interaction modes and feedback systems effectively enhances participants&#x2019; engagement, learning interest, and training outcomes, without causing significant adverse effects. However, clinical validation of TCC auxiliary training systems remains insufficient. This necessitates future research to conduct longitudinal studies with standardized reporting to strengthen clinical validation. Finally, this study offers forward-looking insights by providing practical, design-oriented recommendations, such as integrating emerging technologies. These recommendations aim to guide future development, an area that has not been sufficiently explored in previous practice. Therefore, this review not only synthesizes existing evidence but also provides a strategic roadmap for designing more scalable and feasible TCC auxiliary training systems.</p></sec><sec id="s4-2"><title>Design of the TCC Auxiliary Training System</title><p>Immersive VR is principally used in developing the VR environment of existing TCC auxiliary training systems. The TCC assistant training system based on immersive VR has significant advantages in improving the learning speed of TCC practitioners, among which the HMD environment with the best VR experience has the fastest learning speed. In addition, compared with personal computers and HMD, the CAVE environment is more helpful for improving learning quality and increasing action learning efficiency [<xref ref-type="bibr" rid="ref32">32</xref>]. This may be because the VR environment provided by HMD and CAVE becomes the visual reality of the practitioner without any hint of the physical environment. By using sensory information related to exercise, practitioners improve their attention to learning and motivation and, subsequently, the learning efficiency of TCC. The virtual scene design of mixed reality is also used in the current system design. In mixed reality, TCC practitioners can coexist with virtual coaches and interact in a mixed environment. After wearing a helmet, the practitioner can see many virtual coaches around them, and the TCC movements of the coach from different angles can be easily observed. Simultaneously, movements can be corrected over time through the augmented mirror [<xref ref-type="bibr" rid="ref42">42</xref>]. In contrast, the AR design in the practice system is closer to the real-world scene, introducing computer-generated elements, including the use of common devices such as smartphones or tablets as viewing media, thereby enhancing the sensory experience of the natural environment. Therefore, existing research [<xref ref-type="bibr" rid="ref55">55</xref>] shows that using an AR-based TCC auxiliary training system for TCC training can effectively improve the balance control of older adults and increase the muscle strength of the lower extremities.</p><p>Microsoft HoloLens is the most widely used VR tool in implementing VR environments. It accurately tracks TCC practitioners&#x2019; head movements in real time and offers immediate interactive experiences [<xref ref-type="bibr" rid="ref42">42</xref>,<xref ref-type="bibr" rid="ref51">51</xref>,<xref ref-type="bibr" rid="ref53">53</xref>]. Its enhanced features and improved health services also cater to clinical rehabilitation and medical environments [<xref ref-type="bibr" rid="ref85">85</xref>]. Second, the VR tool Oculus Rift has the characteristics of high resolution, a large field of view, low weight, easy setup, and easy access to good driver support [<xref ref-type="bibr" rid="ref44">44</xref>]. However, it is essential to note that if the training system is to be applied to older adults or people with dysfunction, then the weight of HMD must be considered. In addition, although the use of HMD for TCC learning has achieved good learning results, we must consider that helmet displays are limited to a certain extent in the field of vision.</p><p>Kinects are widely adopted for motion capture due to their low cost, portability, and markerless capabilities, enabling data collection in diverse environments. They are particularly prevalent in VR training systems and are recognized as safe, effective, and feasible tools for rehabilitation in geriatric, neurological, and sports settings [<xref ref-type="bibr" rid="ref50">50</xref>,<xref ref-type="bibr" rid="ref86">86</xref>,<xref ref-type="bibr" rid="ref87">87</xref>]. However, the disadvantage of Kinect-based systems is lower capture accuracy. There are 2 solutions in the current system design to solve this problem. One is to use the high-precision Vicon system as the motion acquisition equipment for the TCC coach, while the practitioner uses Kinect to capture the motion. This system design ensures the professionalism and accuracy of the coach action database and increases system portability, which is no longer limited by time and place, allowing it to be used more widely. Another method is to use a multimodal input combination. For example, based on Kinect, the plantar pressure sensor works together to obtain motion data from TCC practitioners and instantiates virtual images from practitioners as real-time input, effectively increasing the interactivity of the virtual environment [<xref ref-type="bibr" rid="ref44">44</xref>,<xref ref-type="bibr" rid="ref49">49</xref>,<xref ref-type="bibr" rid="ref58">58</xref>]. Notably, the validity of the kinematic data recorded by Kinect should be improved. The book &#x201C;Virtual Reality for Physical and Motor Rehabilitation&#x201D; recommends camera placement to obtain the best motion tracking for upper limb applications. The Kinect camera should be located within 30&#x00D7;30 cm<sup>2</sup>, at a distance of 1.45 to 1.75 meters from the user, and 0.15 meters to either the left or right [<xref ref-type="bibr" rid="ref88">88</xref>]. This study can provide a reference for system development and design in the future.</p><p>Another multimodal interaction uses wearable devices, Noitom and HMD, as action and voice inputs, resulting in natural voice interaction and gesture interaction, which creates a stronger sense of immersive experience and dramatically increases the practitioner&#x2019;s sense of participation. Simultaneously, from the perspective of neural rehabilitation, this visual-auditory interaction can effectively improve the cognitive ability of users, especially attention and working memory [<xref ref-type="bibr" rid="ref89">89</xref>]. With respect to current technology, achieving a complete multimodal interactive VR environment is not easy. The ultimate goal of human-computer interaction is to achieve seamless interaction as if the TCC coach and the practitioner were interacting face-to-face. Therefore, based on multimodal means including gestures, voice, and touch, we should also consider vision-based facial expression capture, mental state assessment, and wearable physiological index detection.</p><p>When the TCC auxiliary training system converts the TCC action of the practitioner into signal data that can be calculated and analyzed through multiple input devices, the algorithm is then used for segmenting, feature extraction, and classification of signal data. The most frequently mentioned classification algorithm is the DTW algorithm based on template matching. In the TCC auxiliary training system, the algorithm is mainly used for similarity matching. With the wide application of the algorithm, several researchers have innovated and improved it. A study used 8 bone vectors from human bones and body directions as input features and proposed a concise version of the DTW algorithm, which can further convert DTW distances into meaningful performance scores without requiring expert training data and experience [<xref ref-type="bibr" rid="ref50">50</xref>]. The algorithm was effective and consistent compared to the expert score. However, the background of the 3 scoring experts was not introduced, so the results require further validation.</p><p>In recent years, machine learning has emerged as one of the forefront technologies in AI and is undergoing rapid development. Neural network models relevant to machine learning have found extensive applications across various domains, including video analysis and digital image processing. The TCC auxiliary training system has recently started using neural network models for human posture estimation. Some studies transformed the human posture estimation problem into a neural network&#x2013;based skeletal joint regression problem [<xref ref-type="bibr" rid="ref61">61</xref>,<xref ref-type="bibr" rid="ref68">68</xref>]. Compared to conventional motion capture systems, another study proposed a TCC auxiliary training system based on pose estimation using convolutional neural networks, which demonstrated enhanced accuracy in estimating the postures of TCC practitioners [<xref ref-type="bibr" rid="ref54">54</xref>].</p><p>The TCC auxiliary training system helps practitioners complete the required movements through continuous human-computer interactions. Therefore, after applying the algorithm to evaluate the collected motion data, it is necessary to provide feedback on motion quality to the practitioner in real time to improve the degree of participation. According to the literature, real-time visual feedback is the most widely used feedback strategy. Through real-time visual feedback, TCC practitioners can see themselves on the same screen as the instructor in the video [<xref ref-type="bibr" rid="ref56">56</xref>], as well as the center of gravity distribution [<xref ref-type="bibr" rid="ref39">39</xref>,<xref ref-type="bibr" rid="ref49">49</xref>,<xref ref-type="bibr" rid="ref51">51</xref>] and breathing status [<xref ref-type="bibr" rid="ref39">39</xref>], which helps practitioners quickly improve their physical movements, enhance their interest in learning, and improve learning efficiency. In addition, some studies have designed 3 exaggerated real-time virtual visual cues to display the center of mass distribution. This exaggerated design enables practitioners to more easily observe the center of mass distribution of the virtual coach, providing useful visual feedback, particularly suited for beginners [<xref ref-type="bibr" rid="ref51">51</xref>]. Simultaneously, exaggerated virtual sports design is more entertaining, increasing practitioner engagement in the practice process and improving virtual interaction performance. Introducing virtual smoke into the TCC training system to enhance the effect of motion display can encourage practitioners to practice longer, support sports memory, and aid memory retention [<xref ref-type="bibr" rid="ref52">52</xref>,<xref ref-type="bibr" rid="ref90">90</xref>].</p><p>With the advancement of technology, multimodal interaction has provided TCC practitioners with a more enriched and personalized learning experience. The feedback-based spatial path teaching method helps learners achieve better training outcomes through feedback and guidance. In addition, to emphasize training personalization, effects such as voice prompts, track settings, and arrow indicators are introduced for learners to choose, which improves training effectiveness and user participation [<xref ref-type="bibr" rid="ref65">65</xref>]. To overcome the interactive limitations of traditional teaching methods and enable TCC learners to fully experience the joy of motion learning through natural interactions with the system, multimodal interaction via multiplatform browsers has been used to seamlessly and swiftly interact with the system, controlling the scenes and characters in the web and achieving effective learning objectives [<xref ref-type="bibr" rid="ref67">67</xref>].</p><p>Notably, some researchers [<xref ref-type="bibr" rid="ref91">91</xref>] believe that real-time feedback may be more effective for beginners, while terminal feedback is more suitable for skilled users. Therefore, combining real-time and terminal feedback can be considered in future system design, stimulating practitioners&#x2019; efforts, motivation, and persistence to some extent and potentially exceeding the goals achieved in current practice.</p></sec><sec id="s4-3"><title>TCC Auxiliary Training System Evaluation</title><p>In our opinion, the TCC auxiliary training system based on VR serves both as an auxiliary training system and a medical tool for rehabilitation exercises. However, the use of this system in clinical settings must be clinically validated. Of the studies we reviewed, only 2 studies clinically validated the designed system. Compared to traditional coach-guided training groups, the AR-based TCC auxiliary training system is effective for rehabilitation in older adults, and its clinical efficacy has been validated [<xref ref-type="bibr" rid="ref55">55</xref>]. In addition, combining the VR-based TCC auxiliary training system can offer VR and TCC exercise programs for older adults with cognitive impairments. Compared to the control group maintaining regular physical activity, this system preserves cognitive and physical functions in older adults with cognitive impairments [<xref ref-type="bibr" rid="ref48">48</xref>].</p><p>Although the results mentioned in the above "Clinical Efficacy" section are promising, these systems still lack robust research designs, rigorous measurement methods, and standardized, appropriate clinical outcome measures. This not only affects the external validity of the intervention effects but also limits the implementation of large-scale, multicenter randomized controlled trials to some extent. Particularly, in the context of rapid system iterations, the stability and generalizability of these systems in long-term clinical pathways remain unclear.</p><p>To advance the field, we recommend that future studies: first, adopt multicenter, randomized controlled trials with sufficient sample sizes, focusing on populations with high clinical value, such as older adults at high risk of falls, patients with Parkinson disease, and those with mild cognitive impairment. The control group may receive traditional rehabilitation training, health education, or VR experiences with placebo effects, ensuring clear attribution of the intervention effects. The intervention period should be at least 8 weeks, with a follow-up period of 3 to 6 months, and measurements taken at multiple time points to assess the maintenance effects of the intervention. Second, future studies should clearly define prioritized outcome measures in the research protocol and align them with the 4 dimensions of the VR core framework: acceptability, feasibility, tolerance, and clinical efficacy. For example, clinical trials targeting older adults may include primary outcomes such as balance or cognitive function, and secondary outcomes such as quality of life, lower limb strength, fall incidence, and the 4 dimensions of the VR core framework. Finally, the TCC system should be integrated into clinical rehabilitation pathways or telemedicine frameworks. It could be implemented in rehabilitation departments of tertiary hospitals, where the TCC system can serve as an adjunct module for physical therapy or occupational therapy in inpatient and outpatient rehabilitation processes. In community rehabilitation centers, TCC could be promoted as a group or individualized intervention to enhance its accessibility. In addition, remote medical platforms could integrate head-mounted displays, motion sensors, and cloud-based monitoring to enable real-time guidance and data feedback in home settings, allowing physicians and rehabilitation therapists to remotely assess progress and adjust training programs [<xref ref-type="bibr" rid="ref92">92</xref>].</p><p>Despite the VR core framework recommending a comprehensive evaluation of VR-based interventions across 4 key domains (acceptability, feasibility, tolerance, and clinical efficacy), most of the included studies in this review did not report all these dimensions. This omission limits the ability to fully assess the value and applicability of the TCC auxiliary training system. The lack of acceptability and feasibility data hinders understanding of whether the intervention can be practically adopted in various settings, while the absence of tolerance data impedes a robust assessment of safety and user comfort. Without this information, even positive clinical efficacy results may overestimate the practical applicability of these systems. Potential reasons for this reporting gap include the early development stages of many systems, heterogeneous study designs, and a focus in most studies on efficacy outcomes rather than comprehensive evaluations. Future research should incorporate all 4 domains of the VR core framework into study protocols and report using standardized measures, leading to more balanced and generalizable conclusions regarding the efficacy and implementation potential of VR-assisted TCC training systems, with higher comparability across studies.</p></sec><sec id="s4-4"><title>Clinical Implication of the TCC Auxiliary Training System</title><p>In recent years, there have been numerous new findings regarding the clinical efficacy of TCC, with traditional TCC practices proving highly effective in reducing stress and promoting meditation or relaxation. The research findings of Kim et al [<xref ref-type="bibr" rid="ref64">64</xref>] also demonstrated that virtual TCC practices can induce relaxation, reduce stress, and promote mental tranquility. This suggests that the virtual TCC auxiliary training system can serve as a mindfulness and meditation tool for promoting emotional well-being in older adults. In addition, TCC auxiliary training systems enable individuals to engage effortlessly and authentically in TCC practices, significantly reducing physical and psychological barriers. TCC auxiliary training systems can create both standing and sitting TCC practices [<xref ref-type="bibr" rid="ref44">44</xref>,<xref ref-type="bibr" rid="ref93">93</xref>]. Sitting TCC can reduce the pressure on the joints to the greatest extent and eliminate pain or fear caused by exercise. Therefore, sitting during TCC practice can make the practitioner&#x2019;s mind more relaxed and peaceful. Moreover, the TCC auxiliary training system offers unique features such as engaging virtual environments, soothing background music, and a scoring system. These features are attractive and motivating for participants, facilitating the execution of movements and meditation techniques.</p><p>The TCC auxiliary training systems are suitable for people of all ages, including those with chronic diseases or dysfunction. Therefore, these systems are effective for home and community rehabilitation and can maintain the continuity of neural rehabilitation by overcoming social obstacles such as distance and cost, allowing practitioners to adjust their training schedule and exercise intensity. For practitioners, this is an entertaining and healthy way of life that positively impacts the mental health, physical health, self-esteem, and attention of older adults.</p><p>Notably, only 2 of the included studies explicitly assessed and reported clinical efficacy, highlighting a critical gap in the current evidence base. Clinical efficacy is the most direct indicator of the health benefits of the TCC auxiliary training system. Although other metrics, such as acceptability or feasibility, may be favorable, the lack of clinical efficacy data limits the ability to determine the real-world therapeutic value of these interventions. This scarcity may be attributed to the early development stage of most systems, small sample sizes, short intervention durations, and a primary focus on technical validation rather than patient-centered outcomes. Therefore, future research should prioritize well-powered, well-controlled trials that include validated clinical outcome measures as primary endpoints, while also conducting comprehensive assessments of acceptability, feasibility, and tolerability to provide an overall evaluation of intervention impact.</p></sec><sec id="s4-5"><title>Limitations</title><p>This review has several limitations. First, our search strategy included studies broadly related to the TCC auxiliary training system but did not systematically target specific diseases or populations. As a result, this review may not fully capture TCC auxiliary training systems tailored to specific diseases (eg, depression, anxiety, bipolar disorder, or post-traumatic stress disorder) or populations (eg, middle-aged individuals, older adults, or beginners). Future research should adopt more refined approaches to evaluate TCC auxiliary training systems targeting different diseases or populations, providing a deeper understanding of their effectiveness for these groups.</p><p>Second, the lack of standardized clinical efficacy assessment criteria is a major challenge in this field. The assessment tools and clinical indicators used in different studies are often inconsistent, leading to poor comparability and consistency of the findings. Clinical efficacy is typically assessed based on short-term effects, with a lack of long-term follow-up studies, making the evaluation of the TCC auxiliary training system&#x2019;s efficacy and lasting impact incomplete. This limits the credibility of broader adoption and clinical application. A standardized evaluation system should be established for TCC auxiliary training systems to improve the reliability of assessing their therapeutic effects.</p><p>Third, this review found that most of the included studies were conducted in China, likely due to TCC&#x2019;s origin and widespread practice in the country. This geographic concentration may limit the generalizability of our findings to populations in other cultural and health care contexts. Factors such as familiarity with TCC culture, local rehabilitation practices, and available technological infrastructure may influence user engagement and intervention outcomes. In addition, although our search was limited to English-language databases, the dominance of studies from a single country may introduce geographical bias. Future research should include multicenter trials conducted in different geographic regions to validate the applicability and effectiveness of TCC-based training systems in diverse cultural and health care settings.</p><p>Finally, while this review addresses ethical issues related to TCC auxiliary training systems to a lesser extent, empirical research examining the real-world ethical, social, and clinical impacts remains limited. The ethical impacts and accessibility challenges associated with deploying TCC auxiliary training systems are critical and cannot be overlooked. On one hand, while the application of body-tracking technology can provide accurate motion data and personalized feedback, it also raises significant concerns about privacy protection. TCC training systems use high-precision sensors and cameras to monitor practitioners&#x2019; movements, posture, and physiological states in real time, thereby increasing the risk of personal data breaches, particularly during data storage, transmission, and analysis. This issue is especially concerning older adults or vulnerable populations, who may not fully recognize the potential risks of data collection or how their data will be used, thereby complicating privacy protection. Ensuring data anonymization, legal and compliant use, and informed user consent is therefore crucial. Accordingly, the system should establish a robust data privacy protection mechanism to ensure that users can make informed and autonomous choices.</p><p>On the other hand, another accessibility challenge for TCC auxiliary training systems is their applicability to frail older adults. Frail older adults often face multiple physical, cognitive, and psychological challenges, such as poor motor coordination, slow response times, and difficulties understanding and operating complex technologies. While TCC, as a mild form of exercise, can improve the physical health of older adults, complex system operations and interfaces may impose a burden on some frail individuals. Furthermore, the precision of the personalized training system and the intensity of real-time feedback may need further adaptation and adjustment based on the physical conditions of older adults. To ensure that the TCC auxiliary training system benefits all groups, system design must account for the diverse needs of users with varying physical conditions, provide an intuitive interface, and reduce technical barriers through personalized features. This will not only help older users overcome technical adaptation challenges but also ensure the system&#x2019;s sustainability and wide applicability in real-world use. Despite these limitations, this review provides a comprehensive overview of the application of TCC auxiliary training systems in the current context, identifies key research gaps, and offers feasible suggestions for future studies.</p></sec><sec id="s4-6"><title>Conclusion</title><p>This review provides a comprehensive evaluation of the design, application, research trends, and clinical effectiveness of TCC auxiliary training systems and offers recommendations for future development. We followed the PRISMA-ScR guidelines and analyzed 34 peer-reviewed studies published after 2014. This review addresses 4 key questions (RQs): development tools (RQ1), system design (RQ2), evaluation or validation (RQ3), and future development (RQ4). It emphasizes current development trends of TCC auxiliary training systems and outlines the design framework required for future advancements.</p><p>For RQ1, our findings indicate that development tools for TCC auxiliary training systems are diverse in both hardware and software. In terms of hardware, motion capture devices, VR tools, and AR technologies are widely used. In terms of software, Unity3D plays a leading role in system development, aiding in the creation of high-quality virtual training environments. However, further attention is needed to integrate various types of motion capture technologies and develop more immersive systems to enhance training effectiveness.</p><p>Regarding RQ2, we found that the design of TCC auxiliary training systems addresses certain demographic needs, with the 24-form Tai Chi style as the primary reference, incorporating various interaction modes and algorithms. However, further optimization is needed in integrating different interaction modes and considering accessibility to enhance training outcomes and user engagement.</p><p>For RQ3, the findings suggest that TCC auxiliary training systems show promising results in acceptability, feasibility, tolerability, and clinical efficacy. However, further optimization of system design is needed to address existing usability barriers and conduct large-scale longitudinal studies to strengthen clinical validation. Finally, for RQ4, future research could further optimize the integration of technologies and promote interdisciplinary collaboration to enhance the intelligence and precision of TCC auxiliary training systems.</p></sec></sec></body><back><ack><p>We thank everyone who offered help and all our collaborators for their active cooperation.</p></ack><notes><sec><title>Funding</title><p>This work was supported by the Key Research and Development Project funded by the Ministry of Science and Technology of the People&#x2019;s Republic of China (grant no 2023YFC3503701) and the Projects of Fujian University of Traditional Chinese Medicine (grant no X2023002).</p></sec><sec><title>Data Availability</title><p>All data generated or analyzed during this study are included in this published study and its supplementary information files.</p></sec></notes><fn-group><fn fn-type="con"><p>Hong Liu was involved in conceptualization, data curation, methodology, formal analysis, writing the original draft, supervision, and project administration. Huibiao Li was involved in data curation, formal analysis, methodology, validation, visualization, and writing the original draft. HH was involved in formal analysis, investigation, and writing the original draft. JH was involved in conceptualization, supervision, and writing the original draft. YZ was involved in conceptualization, methodology, and reviewing and editing the draft. LC was involved in conceptualization, resources, supervision, project administration, and reviewing and editing the draft.</p></fn><fn fn-type="conflict"><p>None declared.</p></fn></fn-group><glossary><title>Abbreviations</title><def-list><def-item><term id="abb1">AI</term><def><p>artificial intelligence</p></def></def-item><def-item><term id="abb2">AR</term><def><p>augmented reality</p></def></def-item><def-item><term id="abb3">CAVE</term><def><p>cave automatic virtual environment</p></def></def-item><def-item><term id="abb4">DTW</term><def><p>dynamic time warping</p></def></def-item><def-item><term id="abb5">HMD</term><def><p>helmet-mounted display</p></def></def-item><def-item><term id="abb6">HRV</term><def><p>heart rate variability</p></def></def-item><def-item><term id="abb7">PRISMA</term><def><p>Preferred reporting items for systematic reviews and meta-analyses</p></def></def-item><def-item><term id="abb8">PRISMA-ScR</term><def><p>Preferred Reporting Items for Systematic Reviews and Meta-Analyses extension for Scoping Reviews</p></def></def-item><def-item><term id="abb9">RQ</term><def><p>research question</p></def></def-item><def-item><term id="abb10">TCC</term><def><p>Tai Chi Chuan</p></def></def-item><def-item><term id="abb11">VR</term><def><p>virtual reality</p></def></def-item><def-item><term id="abb12">VRTC</term><def><p>virtual reality&#x2013;based Tai Chi</p></def></def-item></def-list></glossary><ref-list><title>References</title><ref id="ref1"><label>1</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kerr</surname><given-names>NR</given-names> </name><name name-style="western"><surname>Booth</surname><given-names>FW</given-names> </name></person-group><article-title>Contributions of physical inactivity and sedentary behavior to metabolic and endocrine diseases</article-title><source>Trends Endocrinol Metab</source><year>2022</year><month>12</month><volume>33</volume><issue>12</issue><fpage>817</fpage><lpage>827</lpage><pub-id pub-id-type="doi">10.1016/j.tem.2022.09.002</pub-id><pub-id pub-id-type="medline">36283907</pub-id></nlm-citation></ref><ref id="ref2"><label>2</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Booth</surname><given-names>FW</given-names> </name><name name-style="western"><surname>Roberts</surname><given-names>CK</given-names> </name><name name-style="western"><surname>Thyfault</surname><given-names>JP</given-names> </name><name name-style="western"><surname>Ruegsegger</surname><given-names>GN</given-names> </name><name name-style="western"><surname>Toedebusch</surname><given-names>RG</given-names> </name></person-group><article-title>Role of inactivity in chronic diseases: evolutionary insight and pathophysiological mechanisms</article-title><source>Physiol Rev</source><year>2017</year><month>10</month><day>1</day><volume>97</volume><issue>4</issue><fpage>1351</fpage><lpage>1402</lpage><pub-id pub-id-type="doi">10.1152/physrev.00019.2016</pub-id><pub-id pub-id-type="medline">28814614</pub-id></nlm-citation></ref><ref id="ref3"><label>3</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Zhang</surname><given-names>T</given-names> </name><name name-style="western"><surname>Zhou</surname><given-names>R</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>T</given-names> </name><name name-style="western"><surname>Xin</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>X</given-names> </name><name name-style="western"><surname>Huang</surname><given-names>H</given-names> </name></person-group><article-title>Effects of traditional mind-body movement therapy on chronic cardiopulmonary dyspnoea: a systematic review and meta-analysis</article-title><source>Thorax</source><year>2023</year><month>01</month><volume>78</volume><issue>1</issue><fpage>69</fpage><lpage>75</lpage><pub-id pub-id-type="doi">10.1136/thoraxjnl-2021-218030</pub-id><pub-id pub-id-type="medline">35483892</pub-id></nlm-citation></ref><ref id="ref4"><label>4</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gonz&#x00E1;lez-Rocha</surname><given-names>A</given-names> </name><name name-style="western"><surname>Mendez-Sanchez</surname><given-names>L</given-names> </name><name name-style="western"><surname>Ort&#x00ED;z-Rodr&#x00ED;guez</surname><given-names>MA</given-names> </name><name name-style="western"><surname>Denova-Guti&#x00E9;rrez</surname><given-names>E</given-names> </name></person-group><article-title>Effect of exercise on muscle mass, fat mass, bone mass, muscular strength and physical performance in community dwelling older adults: systematic review and meta-analysis</article-title><source>Aging Dis</source><year>2022</year><month>10</month><day>1</day><volume>13</volume><issue>5</issue><fpage>1421</fpage><lpage>1435</lpage><pub-id pub-id-type="doi">10.14336/AD.2022.0215</pub-id><pub-id pub-id-type="medline">36186132</pub-id></nlm-citation></ref><ref id="ref5"><label>5</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mcleod</surname><given-names>JC</given-names> </name><name name-style="western"><surname>Currier</surname><given-names>BS</given-names> </name><name name-style="western"><surname>Lowisz</surname><given-names>CV</given-names> </name><name name-style="western"><surname>Phillips</surname><given-names>SM</given-names> </name></person-group><article-title>The influence of resistance exercise training prescription variables on skeletal muscle mass, strength, and physical function in healthy adults: an umbrella review</article-title><source>J Sport Health Sci</source><year>2024</year><month>01</month><volume>13</volume><issue>1</issue><fpage>47</fpage><lpage>60</lpage><pub-id pub-id-type="doi">10.1016/j.jshs.2023.06.005</pub-id><pub-id pub-id-type="medline">37385345</pub-id></nlm-citation></ref><ref id="ref6"><label>6</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Isath</surname><given-names>A</given-names> </name><name name-style="western"><surname>Koziol</surname><given-names>KJ</given-names> </name><name name-style="western"><surname>Martinez</surname><given-names>MW</given-names> </name><etal/></person-group><article-title>Exercise and cardiovascular health: a state-of-the-art review</article-title><source>Prog Cardiovasc Dis</source><year>2023</year><volume>79</volume><fpage>44</fpage><lpage>52</lpage><pub-id pub-id-type="doi">10.1016/j.pcad.2023.04.008</pub-id><pub-id pub-id-type="medline">37120119</pub-id></nlm-citation></ref><ref id="ref7"><label>7</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Northey</surname><given-names>JM</given-names> </name><name name-style="western"><surname>Cherbuin</surname><given-names>N</given-names> </name><name name-style="western"><surname>Pumpa</surname><given-names>KL</given-names> </name><name name-style="western"><surname>Smee</surname><given-names>DJ</given-names> </name><name name-style="western"><surname>Rattray</surname><given-names>B</given-names> </name></person-group><article-title>Exercise interventions for cognitive function in adults older than 50: a systematic review with meta-analysis</article-title><source>Br J Sports Med</source><year>2018</year><month>02</month><volume>52</volume><issue>3</issue><fpage>154</fpage><lpage>160</lpage><pub-id pub-id-type="doi">10.1136/bjsports-2016-096587</pub-id><pub-id pub-id-type="medline">28438770</pub-id></nlm-citation></ref><ref id="ref8"><label>8</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>R</given-names> </name><name name-style="western"><surname>Guo</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Kuang</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Zhang</surname><given-names>Q</given-names> </name></person-group><article-title>Effects of home-based exercise interventions on post-stroke depression: a systematic review and network meta-analysis</article-title><source>Int J Nurs Stud</source><year>2024</year><month>04</month><volume>152</volume><fpage>104698</fpage><pub-id pub-id-type="doi">10.1016/j.ijnurstu.2024.104698</pub-id><pub-id pub-id-type="medline">38290424</pub-id></nlm-citation></ref><ref id="ref9"><label>9</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Memon</surname><given-names>AR</given-names> </name><name name-style="western"><surname>Gupta</surname><given-names>CC</given-names> </name><name name-style="western"><surname>Crowther</surname><given-names>ME</given-names> </name><name name-style="western"><surname>Ferguson</surname><given-names>SA</given-names> </name><name name-style="western"><surname>Tuckwell</surname><given-names>GA</given-names> </name><name name-style="western"><surname>Vincent</surname><given-names>GE</given-names> </name></person-group><article-title>Sleep and physical activity in university students: a systematic review and meta-analysis</article-title><source>Sleep Med Rev</source><year>2021</year><month>08</month><volume>58</volume><fpage>101482</fpage><pub-id pub-id-type="doi">10.1016/j.smrv.2021.101482</pub-id><pub-id pub-id-type="medline">33864990</pub-id></nlm-citation></ref><ref id="ref10"><label>10</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Rijal</surname><given-names>A</given-names> </name><name name-style="western"><surname>Nielsen</surname><given-names>EE</given-names> </name><name name-style="western"><surname>Adhikari</surname><given-names>TB</given-names> </name><etal/></person-group><article-title>Effects of adding exercise to usual care in patients with either hypertension, type 2 diabetes or cardiovascular disease: a systematic review with meta-analysis and trial sequential analysis</article-title><source>Br J Sports Med</source><year>2023</year><month>07</month><volume>57</volume><issue>14</issue><fpage>930</fpage><lpage>939</lpage><pub-id pub-id-type="doi">10.1136/bjsports-2022-106002</pub-id><pub-id pub-id-type="medline">36450440</pub-id></nlm-citation></ref><ref id="ref11"><label>11</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>van Baak</surname><given-names>MA</given-names> </name><name name-style="western"><surname>Mariman</surname><given-names>ECM</given-names> </name></person-group><article-title>Obesity-induced and weight-loss-induced physiological factors affecting weight regain</article-title><source>Nat Rev Endocrinol</source><year>2023</year><month>11</month><volume>19</volume><issue>11</issue><fpage>655</fpage><lpage>670</lpage><pub-id pub-id-type="doi">10.1038/s41574-023-00887-4</pub-id><pub-id pub-id-type="medline">37696920</pub-id></nlm-citation></ref><ref id="ref12"><label>12</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ekelund</surname><given-names>U</given-names> </name><name name-style="western"><surname>Tarp</surname><given-names>J</given-names> </name><name name-style="western"><surname>Steene-Johannessen</surname><given-names>J</given-names> </name><etal/></person-group><article-title>Dose-response associations between accelerometry measured physical activity and sedentary time and all cause mortality: systematic review and harmonised meta-analysis</article-title><source>BMJ</source><year>2019</year><month>08</month><day>21</day><volume>366</volume><fpage>l4570</fpage><pub-id pub-id-type="doi">10.1136/bmj.l4570</pub-id><pub-id pub-id-type="medline">31434697</pub-id></nlm-citation></ref><ref id="ref13"><label>13</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mattli</surname><given-names>R</given-names> </name><name name-style="western"><surname>Farcher</surname><given-names>R</given-names> </name><name name-style="western"><surname>Syleouni</surname><given-names>ME</given-names> </name><etal/></person-group><article-title>Physical activity interventions for primary prevention in adults: a systematic review of randomized controlled trial-based economic evaluations</article-title><source>Sports Med</source><year>2020</year><month>04</month><volume>50</volume><issue>4</issue><fpage>731</fpage><lpage>750</lpage><pub-id pub-id-type="doi">10.1007/s40279-019-01233-3</pub-id><pub-id pub-id-type="medline">31755043</pub-id></nlm-citation></ref><ref id="ref14"><label>14</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Oldridge</surname><given-names>N</given-names> </name><name name-style="western"><surname>Taylor</surname><given-names>RS</given-names> </name></person-group><article-title>Cost-effectiveness of exercise therapy in patients with coronary heart disease, chronic heart failure and associated risk factors: a systematic review of economic evaluations of randomized clinical trials</article-title><source>Eur J Prev Cardiol</source><year>2020</year><month>07</month><volume>27</volume><issue>10</issue><fpage>1045</fpage><lpage>1055</lpage><pub-id pub-id-type="doi">10.1177/2047487319881839</pub-id><pub-id pub-id-type="medline">31657233</pub-id></nlm-citation></ref><ref id="ref15"><label>15</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>D&#x2019;Onofrio</surname><given-names>G</given-names> </name><name name-style="western"><surname>Kirschner</surname><given-names>J</given-names> </name><name name-style="western"><surname>Prather</surname><given-names>H</given-names> </name><name name-style="western"><surname>Goldman</surname><given-names>D</given-names> </name><name name-style="western"><surname>Rozanski</surname><given-names>A</given-names> </name></person-group><article-title>Musculoskeletal exercise: its role in promoting health and longevity</article-title><source>Prog Cardiovasc Dis</source><year>2023</year><volume>77</volume><fpage>25</fpage><lpage>36</lpage><pub-id pub-id-type="doi">10.1016/j.pcad.2023.02.006</pub-id><pub-id pub-id-type="medline">36841491</pub-id></nlm-citation></ref><ref id="ref16"><label>16</label><nlm-citation citation-type="web"><article-title>Tai Chi and your health</article-title><source>News in Health, National Institutes of Health</source><access-date>2024-06-13</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://newsinhealth.nih.gov/2016/12/tai-chi-your-health">https://newsinhealth.nih.gov/2016/12/tai-chi-your-health</ext-link></comment></nlm-citation></ref><ref id="ref17"><label>17</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Choo</surname><given-names>YT</given-names> </name><name name-style="western"><surname>Jiang</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Hong</surname><given-names>J</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>W</given-names> </name></person-group><article-title>Effectiveness of Tai Chi on quality of life, depressive symptoms and physical function among community-dwelling older adults with chronic disease: a systematic review and meta-analysis</article-title><source>Int J Nurs Stud</source><year>2020</year><month>11</month><volume>111</volume><fpage>103737</fpage><pub-id pub-id-type="doi">10.1016/j.ijnurstu.2020.103737</pub-id><pub-id pub-id-type="medline">32891966</pub-id></nlm-citation></ref><ref id="ref18"><label>18</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Zheng</surname><given-names>G</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>F</given-names> </name><name name-style="western"><surname>Li</surname><given-names>S</given-names> </name><name name-style="western"><surname>Huang</surname><given-names>M</given-names> </name><name name-style="western"><surname>Tao</surname><given-names>J</given-names> </name><name name-style="western"><surname>Chen</surname><given-names>L</given-names> </name></person-group><article-title>Tai Chi and the protection of cognitive ability: a systematic review of prospective studies in healthy adults</article-title><source>Am J Prev Med</source><year>2015</year><month>07</month><volume>49</volume><issue>1</issue><fpage>89</fpage><lpage>97</lpage><pub-id pub-id-type="doi">10.1016/j.amepre.2015.01.002</pub-id><pub-id pub-id-type="medline">26094229</pub-id></nlm-citation></ref><ref id="ref19"><label>19</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mak</surname><given-names>MK</given-names> </name><name name-style="western"><surname>Wong-Yu</surname><given-names>IS</given-names> </name><name name-style="western"><surname>Shen</surname><given-names>X</given-names> </name><name name-style="western"><surname>Chung</surname><given-names>CL</given-names> </name></person-group><article-title>Long-term effects of exercise and physical therapy in people with Parkinson disease</article-title><source>Nat Rev Neurol</source><year>2017</year><month>11</month><volume>13</volume><issue>11</issue><fpage>689</fpage><lpage>703</lpage><pub-id pub-id-type="doi">10.1038/nrneurol.2017.128</pub-id><pub-id pub-id-type="medline">29027544</pub-id></nlm-citation></ref><ref id="ref20"><label>20</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>G</given-names> </name><name name-style="western"><surname>Huang</surname><given-names>P</given-names> </name><name name-style="western"><surname>Cui</surname><given-names>SS</given-names> </name><etal/></person-group><article-title>Mechanisms of motor symptom improvement by long-term Tai Chi training in Parkinson&#x2019;s disease patients</article-title><source>Transl Neurodegener</source><year>2022</year><month>02</month><day>7</day><volume>11</volume><issue>1</issue><fpage>6</fpage><pub-id pub-id-type="doi">10.1186/s40035-022-00280-7</pub-id><pub-id pub-id-type="medline">35125106</pub-id></nlm-citation></ref><ref id="ref21"><label>21</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chan</surname><given-names>AWK</given-names> </name><name name-style="western"><surname>Chair</surname><given-names>SY</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>DTF</given-names> </name><etal/></person-group><article-title>Tai Chi exercise is more effective than brisk walking in reducing cardiovascular disease risk factors among adults with hypertension: a randomised controlled trial</article-title><source>Int J Nurs Stud</source><year>2018</year><month>12</month><volume>88</volume><fpage>44</fpage><lpage>52</lpage><pub-id pub-id-type="doi">10.1016/j.ijnurstu.2018.08.009</pub-id><pub-id pub-id-type="medline">30195124</pub-id></nlm-citation></ref><ref id="ref22"><label>22</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Qin</surname><given-names>J</given-names> </name><name name-style="western"><surname>Tao</surname><given-names>L</given-names> </name><etal/></person-group><article-title>Effects of Tai Chi Chuan on cognitive function in adults 60 years or older with type 2 diabetes and mild cognitive impairment in China</article-title><source>JAMA Netw Open</source><year>2023</year><month>04</month><day>3</day><volume>6</volume><issue>4</issue><fpage>e237004</fpage><pub-id pub-id-type="doi">10.1001/jamanetworkopen.2023.7004</pub-id></nlm-citation></ref><ref id="ref23"><label>23</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wang</surname><given-names>C</given-names> </name><name name-style="western"><surname>Schmid</surname><given-names>CH</given-names> </name><name name-style="western"><surname>Fielding</surname><given-names>RA</given-names> </name><etal/></person-group><article-title>Effect of tai chi versus aerobic exercise for fibromyalgia: comparative effectiveness randomized controlled trial</article-title><source>BMJ</source><year>2018</year><month>03</month><day>21</day><volume>360</volume><fpage>k851</fpage><pub-id pub-id-type="doi">10.1136/bmj.k851</pub-id><pub-id pub-id-type="medline">29563100</pub-id></nlm-citation></ref><ref id="ref24"><label>24</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>X</given-names> </name><name name-style="western"><surname>Chang</surname><given-names>P</given-names> </name><name name-style="western"><surname>Wu</surname><given-names>M</given-names> </name><etal/></person-group><article-title>Effect of Tai Chi vs aerobic exercise on blood pressure in patients with prehypertension: a randomized clinical trial</article-title><source>JAMA Netw Open</source><year>2024</year><month>02</month><day>5</day><volume>7</volume><issue>2</issue><fpage>e2354937</fpage><pub-id pub-id-type="doi">10.1001/jamanetworkopen.2023.54937</pub-id><pub-id pub-id-type="medline">38335001</pub-id></nlm-citation></ref><ref id="ref25"><label>25</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Winters-Stone</surname><given-names>KM</given-names> </name><name name-style="western"><surname>Horak</surname><given-names>F</given-names> </name><name name-style="western"><surname>Dieckmann</surname><given-names>NF</given-names> </name><etal/></person-group><article-title>GET FIT: a randomized clinical trial of Tai Ji Quan versus strength training for fall prevention after chemotherapy in older, postmenopausal women cancer survivors</article-title><source>J Clin Oncol</source><year>2023</year><month>06</month><day>20</day><volume>41</volume><issue>18</issue><fpage>3384</fpage><lpage>3396</lpage><pub-id pub-id-type="doi">10.1200/JCO.22.01519</pub-id><pub-id pub-id-type="medline">36888933</pub-id></nlm-citation></ref><ref id="ref26"><label>26</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>F</given-names> </name><name name-style="western"><surname>Harmer</surname><given-names>P</given-names> </name><name name-style="western"><surname>Fitzgerald</surname><given-names>K</given-names> </name><etal/></person-group><article-title>Effectiveness of a therapeutic Tai Ji Quan intervention vs a multimodal exercise intervention to prevent falls among older adults at high risk of falling: a randomized clinical trial</article-title><source>JAMA Intern Med</source><year>2018</year><month>10</month><day>1</day><volume>178</volume><issue>10</issue><fpage>1301</fpage><lpage>1310</lpage><pub-id pub-id-type="doi">10.1001/jamainternmed.2018.3915</pub-id><pub-id pub-id-type="medline">30208396</pub-id></nlm-citation></ref><ref id="ref27"><label>27</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>F</given-names> </name><name name-style="western"><surname>Harmer</surname><given-names>P</given-names> </name><name name-style="western"><surname>Eckstrom</surname><given-names>E</given-names> </name><name name-style="western"><surname>Fitzgerald</surname><given-names>K</given-names> </name><name name-style="western"><surname>Chou</surname><given-names>LS</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>Y</given-names> </name></person-group><article-title>Effectiveness of Tai Ji Quan vs multimodal and stretching exercise interventions for reducing injurious falls in older adults at high risk of falling: follow-up analysis of a randomized clinical trial</article-title><source>JAMA Netw Open</source><year>2019</year><month>02</month><day>1</day><volume>2</volume><issue>2</issue><fpage>e188280</fpage><pub-id pub-id-type="doi">10.1001/jamanetworkopen.2018.8280</pub-id><pub-id pub-id-type="medline">30768195</pub-id></nlm-citation></ref><ref id="ref28"><label>28</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Montero-Odasso</surname><given-names>M</given-names> </name><name name-style="western"><surname>van der Velde</surname><given-names>N</given-names> </name><name name-style="western"><surname>Martin</surname><given-names>FC</given-names> </name><etal/></person-group><article-title>World guidelines for falls prevention and management for older adults: a global initiative</article-title><source>Age Ageing</source><year>2022</year><month>09</month><day>2</day><volume>51</volume><issue>9</issue><fpage>afac205</fpage><pub-id pub-id-type="doi">10.1093/ageing/afac205</pub-id><pub-id pub-id-type="medline">36178003</pub-id></nlm-citation></ref><ref id="ref29"><label>29</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>F</given-names> </name><name name-style="western"><surname>Harmer</surname><given-names>P</given-names> </name><name name-style="western"><surname>Fitzgerald</surname><given-names>K</given-names> </name><etal/></person-group><article-title>Tai Chi and postural stability in patients with Parkinson&#x2019;s disease</article-title><source>N Engl J Med</source><year>2012</year><month>02</month><day>9</day><volume>366</volume><issue>6</issue><fpage>511</fpage><lpage>519</lpage><pub-id pub-id-type="doi">10.1056/NEJMoa1107911</pub-id><pub-id pub-id-type="medline">22316445</pub-id></nlm-citation></ref><ref id="ref30"><label>30</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Osborne</surname><given-names>JA</given-names> </name><name name-style="western"><surname>Botkin</surname><given-names>R</given-names> </name><name name-style="western"><surname>Colon-Semenza</surname><given-names>C</given-names> </name><etal/></person-group><article-title>Physical therapist management of Parkinson disease: a clinical practice guideline from the American Physical Therapy Association</article-title><source>Phys Ther</source><year>2022</year><month>04</month><day>1</day><volume>102</volume><issue>4</issue><fpage>pzab302</fpage><pub-id pub-id-type="doi">10.1093/ptj/pzab302</pub-id><pub-id pub-id-type="medline">34963139</pub-id></nlm-citation></ref><ref id="ref31"><label>31</label><nlm-citation citation-type="confproc"><person-group person-group-type="editor"><name name-style="western"><surname>Becker</surname><given-names>DA</given-names> </name><name name-style="western"><surname>Pentland</surname><given-names>A</given-names> </name></person-group><article-title>Using a virtual environment to teach cancer patients t&#x2019;ai chi, relaxation and self-imagery</article-title><year>1997</year><access-date>2025-12-16</access-date><conf-name>ACM Siggraph Symposium on Interactive 3D Graphics</conf-name><comment><ext-link ext-link-type="uri" xlink:href="https://tinyurl.com/588samwc">https://tinyurl.com/588samwc</ext-link></comment></nlm-citation></ref><ref id="ref32"><label>32</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>X</given-names> </name><name name-style="western"><surname>Chen</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Li</surname><given-names>Y</given-names> </name><etal/></person-group><article-title>ImmerTai: immersive motion learning in VR environments</article-title><source>J Vis Commun Image Represent</source><year>2019</year><month>01</month><volume>58</volume><fpage>416</fpage><lpage>427</lpage><pub-id pub-id-type="doi">10.1016/j.jvcir.2018.11.039</pub-id></nlm-citation></ref><ref id="ref33"><label>33</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Arlati</surname><given-names>S</given-names> </name><name name-style="western"><surname>Colombo</surname><given-names>V</given-names> </name><name name-style="western"><surname>Spoladore</surname><given-names>D</given-names> </name><etal/></person-group><article-title>A social virtual reality-based application for the physical and cognitive training of the elderly at home</article-title><source>Sensors (Basel)</source><year>2019</year><month>01</month><day>10</day><volume>19</volume><issue>2</issue><fpage>261</fpage><pub-id pub-id-type="doi">10.3390/s19020261</pub-id><pub-id pub-id-type="medline">30634719</pub-id></nlm-citation></ref><ref id="ref34"><label>34</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Choi</surname><given-names>SD</given-names> </name><name name-style="western"><surname>Guo</surname><given-names>L</given-names> </name><name name-style="western"><surname>Kang</surname><given-names>D</given-names> </name><name name-style="western"><surname>Xiong</surname><given-names>S</given-names> </name></person-group><article-title>Exergame technology and interactive interventions for elderly fall prevention: a systematic literature review</article-title><source>Appl Ergon</source><year>2017</year><month>11</month><volume>65</volume><fpage>570</fpage><lpage>581</lpage><pub-id pub-id-type="doi">10.1016/j.apergo.2016.10.013</pub-id><pub-id pub-id-type="medline">27825723</pub-id></nlm-citation></ref><ref id="ref35"><label>35</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Liberati</surname><given-names>A</given-names> </name><name name-style="western"><surname>Altman</surname><given-names>DG</given-names> </name><name name-style="western"><surname>Tetzlaff</surname><given-names>J</given-names> </name><etal/></person-group><article-title>The PRISMA statement for reporting systematic reviews and meta-analyses of studies that evaluate health care interventions: explanation and elaboration</article-title><source>PLoS Med</source><year>2009</year><month>07</month><day>21</day><volume>6</volume><issue>7</issue><fpage>e1000100</fpage><pub-id pub-id-type="doi">10.1371/journal.pmed.1000100</pub-id><pub-id pub-id-type="medline">19621070</pub-id></nlm-citation></ref><ref id="ref36"><label>36</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Santos</surname><given-names>A dos</given-names> </name><name name-style="western"><surname>Delamaro</surname><given-names>ME</given-names> </name><name name-style="western"><surname>Nunes</surname><given-names>FLS</given-names> </name></person-group><article-title>The relationship between requirements engineering and virtual reality systems: a systematic literature review</article-title><year>2013</year><conf-name>2013 XV Symposium on Virtual and Augmented Reality IEEE</conf-name><conf-date>May 28-31, 2013</conf-date><conf-loc>Cuiaba - Mato Grosso, Brazil</conf-loc><fpage>53</fpage><lpage>62</lpage><pub-id pub-id-type="doi">10.1109/SVR.2013.52</pub-id></nlm-citation></ref><ref id="ref37"><label>37</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Lee</surname><given-names>JD</given-names> </name><name name-style="western"><surname>Hsieh</surname><given-names>CH</given-names> </name><name name-style="western"><surname>Lin</surname><given-names>TY</given-names> </name></person-group><article-title>A Kinect-based Tai Chi exercises evaluation system for physical rehabilitation</article-title><year>2014</year><conf-name>2014 IEEE International Conference on Consumer Electronics (ICCE)</conf-name><conf-date>Jan 10-13, 2014</conf-date><conf-loc>Las Vegas, NV, USA</conf-loc><fpage>177</fpage><lpage>178</lpage><pub-id pub-id-type="doi">10.1109/ICCE.2014.6775961</pub-id></nlm-citation></ref><ref id="ref38"><label>38</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>YC</given-names> </name></person-group><person-group person-group-type="editor"><name name-style="western"><surname>Chen</surname><given-names>YC</given-names> </name><name name-style="western"><surname>Kao</surname><given-names>PY</given-names> </name><name name-style="western"><surname>Lu</surname><given-names>KY</given-names> </name><name name-style="western"><surname>Wei</surname><given-names>SY</given-names> </name><name name-style="western"><surname>Hung</surname><given-names>YP</given-names> </name></person-group><article-title>Pressure sensing insoles for learning Tai-Chi Chuan</article-title><year>2015</year><conf-name>The fourth International Conference on Information Science and Cloud Computing</conf-name><conf-date>Dec 18-19, 2015</conf-date><pub-id pub-id-type="doi">10.22323/1.264.0048</pub-id></nlm-citation></ref><ref id="ref39"><label>39</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Iwaanaguchi</surname><given-names>T</given-names> </name><name name-style="western"><surname>Shinya</surname><given-names>M</given-names> </name><name name-style="western"><surname>Nakajima</surname><given-names>S</given-names> </name><name name-style="western"><surname>Shiraishi</surname><given-names>M</given-names> </name></person-group><article-title>Cyber Tai Chi - CG-based video materials for Tai Chi Chuan self-study</article-title><year>2015</year><conf-name>2015 International Conference on Cyberworlds (CW)</conf-name><conf-loc>Visby, Sweden</conf-loc><fpage>365</fpage><lpage>368</lpage><pub-id pub-id-type="doi">10.1109/CW.2015.13</pub-id></nlm-citation></ref><ref id="ref40"><label>40</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Bian</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Yang</surname><given-names>C</given-names> </name><name name-style="western"><surname>Guan</surname><given-names>D</given-names> </name><etal/></person-group><article-title>Effects of pedagogical agent&#x2019;s personality and emotional feedback strategy on Chinese students&#x2019; learning experiences and performance</article-title><source>CHI &#x2019;16: Proceedings of the 2016 CHI Conference on Human Factors in Computing Systems</source><year>2016</year><month>05</month><day>7</day><pub-id pub-id-type="doi">10.1145/2858036.2858351</pub-id></nlm-citation></ref><ref id="ref41"><label>41</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Han</surname><given-names>PH</given-names> </name><name name-style="western"><surname>Chen</surname><given-names>K</given-names> </name><name name-style="western"><surname>Hsieh</surname><given-names>CH</given-names> </name><name name-style="western"><surname>Huang</surname><given-names>YJ</given-names> </name><name name-style="western"><surname>Hung</surname><given-names>YP</given-names> </name></person-group><article-title>AR-arm: augmented visualization for guiding arm movement in the first-person perspective</article-title><source>AH &#x2019;16: Proceedings of the 7th Augmented Human International Conference 2016</source><year>2016</year><fpage>1</fpage><lpage>4</lpage><pub-id pub-id-type="doi">10.1145/2875194.2875237</pub-id></nlm-citation></ref><ref id="ref42"><label>42</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Han</surname><given-names>P</given-names> </name><name name-style="western"><surname>Chen</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Zhong</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>H</given-names> </name><name name-style="western"><surname>Hung</surname><given-names>Y</given-names> </name></person-group><article-title>My Tai-Chi coaches: an augmented-learning tool for practicing Tai-Chi Chuan</article-title><source>AH &#x2019;17: Proceedings of the 8th Augmented Human International Conference</source><year>2017</year><fpage>1</fpage><lpage>4</lpage><pub-id pub-id-type="doi">10.1145/3041164.3041194</pub-id></nlm-citation></ref><ref id="ref43"><label>43</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Zhihong</surname><given-names>X</given-names> </name><name name-style="western"><surname>Liying</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Zhenhua</surname><given-names>C</given-names> </name><name name-style="western"><surname>Haozhi</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Chunhui</surname><given-names>Y</given-names> </name></person-group><article-title>Research of Tai-Chi-Chuan auxiliary training system based on Kinect</article-title><source>Journal of Hebei University of Science and Technology</source><year>2017</year><volume>38</volume><issue>2</issue><fpage>183</fpage><lpage>189</lpage><pub-id pub-id-type="doi">10.7535/hbkd.2017yx02013</pub-id></nlm-citation></ref><ref id="ref44"><label>44</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Liang</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Wu</surname><given-names>D</given-names> </name><name name-style="western"><surname>Ledesma</surname><given-names>D</given-names> </name><name name-style="western"><surname>Davis</surname><given-names>C</given-names> </name><name name-style="western"><surname>Slaughter</surname><given-names>R</given-names> </name><name name-style="western"><surname>Guo</surname><given-names>Z</given-names> </name></person-group><article-title>Virtual Tai-Chi system: a smart-connected modality for rehabilitation</article-title><source>Smart Health (2014)</source><year>2018</year><month>12</month><volume>9-10</volume><fpage>232</fpage><lpage>249</lpage><pub-id pub-id-type="doi">10.1016/j.smhl.2018.07.021</pub-id></nlm-citation></ref><ref id="ref45"><label>45</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Guimar&#x00E3;es</surname><given-names>V</given-names> </name><name name-style="western"><surname>Pereira</surname><given-names>A</given-names> </name><name name-style="western"><surname>Oliveira</surname><given-names>E</given-names> </name><name name-style="western"><surname>Carvalho</surname><given-names>A</given-names> </name><name name-style="western"><surname>Peixoto</surname><given-names>R</given-names> </name></person-group><article-title>Design and evaluation of an exergame for motor-cognitive training and fall prevention in older adults</article-title><source>Proceedings of the 4th EAI International Conference on Smart Objects and Technologies for Social Good</source><year>2018</year><month>11</month><day>28</day><fpage>202</fpage><lpage>207</lpage><pub-id pub-id-type="doi">10.1145/3284869.3284918</pub-id></nlm-citation></ref><ref id="ref46"><label>46</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Delfa</surname><given-names>J</given-names> </name><name name-style="western"><surname>Jarvis</surname><given-names>R</given-names> </name><name name-style="western"><surname>Khot</surname><given-names>RA</given-names> </name><name name-style="western"><surname>Mueller</surname><given-names>F</given-names> </name></person-group><article-title>Tai Chi in The Clouds: using micro UAV&#x2019;s to support Tai Chi practice</article-title><source>CHI PLAY &#x2019;18 Extended Abstracts: Proceedings of the 2018 Annual Symposium on Computer-Human Interaction in PLAY Companion Extended Abstracts</source><year>2018</year><fpage>513</fpage><lpage>519</lpage><pub-id pub-id-type="doi">10.1145/3270316.3271511</pub-id></nlm-citation></ref><ref id="ref47"><label>47</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Bian</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Yang</surname><given-names>C</given-names> </name><name name-style="western"><surname>Zhou</surname><given-names>C</given-names> </name><etal/></person-group><article-title>Exploring the weak association between flow experience and performance in virtual environments</article-title><year>2018</year><month>04</month><day>21</day><conf-name>Proceedings of the 2018 CHI conference on human factors in computing systems</conf-name><conf-loc>Montreal, QC, Canada</conf-loc><fpage>1</fpage><lpage>12</lpage><pub-id pub-id-type="doi">10.1145/3173574.3173975</pub-id></nlm-citation></ref><ref id="ref48"><label>48</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hsieh</surname><given-names>CC</given-names> </name><name name-style="western"><surname>Lin</surname><given-names>PS</given-names> </name><name name-style="western"><surname>Hsu</surname><given-names>WC</given-names> </name><etal/></person-group><article-title>The effectiveness of a virtual reality-based Tai Chi exercise on cognitive and physical function in older adults with cognitive impairment</article-title><source>Dement Geriatr Cogn Disord</source><year>2018</year><volume>46</volume><issue>5-6</issue><fpage>358</fpage><lpage>370</lpage><pub-id pub-id-type="doi">10.1159/000494659</pub-id><pub-id pub-id-type="medline">30537752</pub-id></nlm-citation></ref><ref id="ref49"><label>49</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lin</surname><given-names>H</given-names> </name><name name-style="western"><surname>Han</surname><given-names>P</given-names> </name><name name-style="western"><surname>Lu</surname><given-names>K</given-names> </name><etal/></person-group><article-title>Stillness moves: exploring body weight-transfer learning in physical training for Tai-Chi exercise</article-title><source>MMSports&#x2019;18: Proceedings of the 1st International Workshop on Multimedia Content Analysis in Sports</source><year>2018</year><fpage>21</fpage><lpage>29</lpage><pub-id pub-id-type="doi">10.1145/3265845.3265856</pub-id></nlm-citation></ref><ref id="ref50"><label>50</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Yu</surname><given-names>X</given-names> </name><name name-style="western"><surname>Xiong</surname><given-names>S</given-names> </name></person-group><article-title>A dynamic time warping based algorithm to evaluate Kinect-enabled home-based physical rehabilitation exercises for older people</article-title><source>Sensors (Basel)</source><year>2019</year><month>06</month><day>28</day><volume>19</volume><issue>13</issue><fpage>2882</fpage><pub-id pub-id-type="doi">10.3390/s19132882</pub-id><pub-id pub-id-type="medline">31261746</pub-id></nlm-citation></ref><ref id="ref51"><label>51</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Kao</surname><given-names>PY</given-names> </name><name name-style="western"><surname>Han</surname><given-names>PH</given-names> </name><name name-style="western"><surname>Jan</surname><given-names>YF</given-names> </name><name name-style="western"><surname>Yang</surname><given-names>ZW</given-names> </name><name name-style="western"><surname>Li</surname><given-names>CH</given-names> </name><name name-style="western"><surname>Hung</surname><given-names>YP</given-names> </name></person-group><article-title>On learning weight distribution of Tai Chi Chuan using pressure sensing insoles and MR-HMD</article-title><year>2019</year><conf-name>2019 IEEE Conference on Virtual Reality and 3D User Interfaces (VR)</conf-name><conf-loc>Osaka, Japan</conf-loc><fpage>1457</fpage><lpage>1464</lpage><pub-id pub-id-type="doi">10.1109/VR.2019.8797986</pub-id></nlm-citation></ref><ref id="ref52"><label>52</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Zhu</surname><given-names>L</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Song</surname><given-names>A</given-names> </name><name name-style="western"><surname>Potel</surname><given-names>M</given-names> </name></person-group><article-title>Follow the smoke: immersive display of motion sata with synthesized smoke</article-title><source>IEEE Comput Graph Appl</source><year>2019</year><volume>39</volume><issue>4</issue><fpage>86</fpage><lpage>94</lpage><pub-id pub-id-type="doi">10.1109/MCG.2019.2915717</pub-id><pub-id pub-id-type="medline">31226062</pub-id></nlm-citation></ref><ref id="ref53"><label>53</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Hung</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Kao</surname><given-names>P</given-names> </name><name name-style="western"><surname>Jan</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Li</surname><given-names>C</given-names> </name><name name-style="western"><surname>Chang</surname><given-names>C</given-names> </name><name name-style="western"><surname>Han</surname><given-names>P</given-names> </name></person-group><article-title>An adaptive Tai-Chi-Chuan AR guiding system based on speed estimation of movement</article-title><source>ICS 2018: New Trends in Computer Technologies and Applications</source><year>2019</year><fpage>115</fpage><lpage>130</lpage><pub-id pub-id-type="doi">10.1007/978-981-13-9190-3_11</pub-id></nlm-citation></ref><ref id="ref54"><label>54</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kamel</surname><given-names>A</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>B</given-names> </name><name name-style="western"><surname>Li</surname><given-names>P</given-names> </name><name name-style="western"><surname>Sheng</surname><given-names>B</given-names> </name></person-group><article-title>An investigation of 3D human pose estimation for learning Tai Chi: a human factor perspective</article-title><source>Int J Hum-Comput Interact</source><year>2019</year><month>03</month><day>16</day><volume>35</volume><issue>4-5</issue><fpage>427</fpage><lpage>439</lpage><pub-id pub-id-type="doi">10.1080/10447318.2018.1543081</pub-id></nlm-citation></ref><ref id="ref55"><label>55</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>PJ</given-names> </name><name name-style="western"><surname>Penn</surname><given-names>IW</given-names> </name><name name-style="western"><surname>Wei</surname><given-names>SH</given-names> </name><name name-style="western"><surname>Chuang</surname><given-names>LR</given-names> </name><name name-style="western"><surname>Sung</surname><given-names>WH</given-names> </name></person-group><article-title>Augmented reality-assisted training with selected Tai-Chi movements improves balance control and increases lower limb muscle strength in older adults: a prospective randomized trial</article-title><source>J Exerc Sci Fit</source><year>2020</year><month>09</month><volume>18</volume><issue>3</issue><fpage>142</fpage><lpage>147</lpage><pub-id pub-id-type="doi">10.1016/j.jesf.2020.05.003</pub-id><pub-id pub-id-type="medline">32514277</pub-id></nlm-citation></ref><ref id="ref56"><label>56</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Tharatipyakul</surname><given-names>A</given-names> </name><name name-style="western"><surname>Choo</surname><given-names>KTW</given-names> </name><name name-style="western"><surname>Perrault</surname><given-names>ST</given-names> </name></person-group><article-title>Pose estimation for facilitating movement learning from online videos</article-title><source>AVI &#x2019;20: Proceedings of the 2020 International Conference on Advanced Visual Interfaces</source><year>2020</year><month>09</month><day>28</day><fpage>1</fpage><lpage>5</lpage><pub-id pub-id-type="doi">10.1145/3399715.3399835</pub-id></nlm-citation></ref><ref id="ref57"><label>57</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Liu</surname><given-names>J</given-names> </name><name name-style="western"><surname>Zheng</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>K</given-names> </name><name name-style="western"><surname>Bian</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Gai</surname><given-names>W</given-names> </name><name name-style="western"><surname>Gao</surname><given-names>D</given-names> </name></person-group><article-title>A real-time interactive Tai Chi learning system based on VR and motion capture technology</article-title><source>Procedia Comput Sci</source><year>2020</year><volume>174</volume><fpage>712</fpage><lpage>719</lpage><pub-id pub-id-type="doi">10.1016/j.procs.2020.06.147</pub-id></nlm-citation></ref><ref id="ref58"><label>58</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>Q</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>S</given-names> </name></person-group><article-title>Design of Tai-Chi push-hands robot control system and construction of visual platform</article-title><year>2020</year><conf-name>2020 Chinese Control And Decision Conference (CCDC)</conf-name><pub-id pub-id-type="doi">10.1109/CCDC49329.2020.9164028</pub-id></nlm-citation></ref><ref id="ref59"><label>59</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Jan</surname><given-names>YF</given-names> </name><name name-style="western"><surname>Tseng</surname><given-names>KW</given-names> </name><name name-style="western"><surname>Kao</surname><given-names>PY</given-names> </name><name name-style="western"><surname>Hung</surname><given-names>YP</given-names> </name></person-group><article-title>Augmented Tai-Chi Chuan practice tool with pose evaluation</article-title><year>2021</year><conf-name>2021 IEEE 4th International Conference on Multimedia Information Processing and Retrieval (MIPR)</conf-name><pub-id pub-id-type="doi">10.1109/MIPR51284.2021.00013</pub-id></nlm-citation></ref><ref id="ref60"><label>60</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gao</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>A</given-names> </name><name name-style="western"><surname>Hui</surname><given-names>P</given-names> </name></person-group><article-title>Meditation in motion: interactive media art visualization based on ancient Tai Chi Chuan [Abstract]</article-title><source>MM &#x2019;22: Proceedings of the 30th ACM International Conference on Multimedia</source><year>2022</year><fpage>7241</fpage><lpage>7242</lpage><pub-id pub-id-type="doi">10.1145/3503161.3549967</pub-id></nlm-citation></ref><ref id="ref61"><label>61</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Wei</surname><given-names>C</given-names> </name><name name-style="western"><surname>Wen</surname><given-names>J</given-names> </name><name name-style="western"><surname>Bi</surname><given-names>R</given-names> </name><etal/></person-group><article-title>Online 8-form Tai Chi Chuan training and evaluation system based on pose estimation</article-title><year>2022</year><conf-name>2022 IEEE 24th Int Conf on High Performance Computing &#x0026; Communications; 8th Int Conf on Data Science &#x0026; Systems; 20th Int Conf on Smart City; 8th Int Conf on Dependability in Sensor, Cloud &#x0026; Big Data Systems &#x0026; Application (HPCC/DSS/SmartCity/DependSys)</conf-name><conf-date>Dec 18-20, 2022</conf-date><conf-loc>Hainan, China</conf-loc><fpage>366</fpage><lpage>371</lpage><pub-id pub-id-type="doi">10.1109/HPCC-DSS-SmartCity-DependSys57074.2022.00078</pub-id></nlm-citation></ref><ref id="ref62"><label>62</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>J</given-names> </name><name name-style="western"><surname>Hu</surname><given-names>H</given-names> </name><name name-style="western"><surname>Xing</surname><given-names>Q</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>X</given-names> </name><name name-style="western"><surname>Li</surname><given-names>J</given-names> </name><name name-style="western"><surname>Shen</surname><given-names>Y</given-names> </name></person-group><article-title>Tai chi action quality assessment and visual analysis with a consumer RGB-d camera</article-title><year>2022</year><conf-name>2022 IEEE 24th International Workshop on Multimedia Signal Processing (MMSP)</conf-name><conf-date>Sep 26-28, 2022</conf-date><conf-loc>Shanghai, China</conf-loc><fpage>1</fpage><lpage>06</lpage><pub-id pub-id-type="doi">10.1109/MMSP55362.2022.9949464</pub-id></nlm-citation></ref><ref id="ref63"><label>63</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Yuan</surname><given-names>T</given-names> </name><name name-style="western"><surname>Yu</surname><given-names>K</given-names> </name></person-group><article-title>Design and implementation of taijiquan learning system based on PHP+ MYSQL</article-title><year>2022</year><conf-name>2022 International Conference on Information System, Computing and Educational Technology (ICISCET)</conf-name><conf-date>May 23-25, 2022</conf-date><conf-loc>Montreal, QC, Canada</conf-loc><fpage>35</fpage><lpage>39</lpage><pub-id pub-id-type="doi">10.1109/ICISCET56785.2022.00017</pub-id></nlm-citation></ref><ref id="ref64"><label>64</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kim</surname><given-names>J</given-names> </name><name name-style="western"><surname>Kim</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Chang</surname><given-names>PS</given-names> </name><name name-style="western"><surname>Min Oh</surname><given-names>S</given-names> </name><name name-style="western"><surname>Han</surname><given-names>S</given-names> </name></person-group><article-title>A pilot study of virtual reality (VR) Tai Chi program on mental health among older adults during the COVID-19 pandemic</article-title><source>Am J Health Behav</source><year>2022</year><month>10</month><day>17</day><volume>46</volume><issue>5</issue><fpage>576</fpage><lpage>585</lpage><pub-id pub-id-type="doi">10.5993/AJHB.46.5.8</pub-id><pub-id pub-id-type="medline">36333829</pub-id></nlm-citation></ref><ref id="ref65"><label>65</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Tian</surname><given-names>F</given-names> </name><name name-style="western"><surname>Zou</surname><given-names>J</given-names> </name><name name-style="western"><surname>Li</surname><given-names>K</given-names> </name><name name-style="western"><surname>Li</surname><given-names>Y</given-names> </name></person-group><article-title>Kung Fu Metaverse: a movement guidance training system</article-title><source>IEEE Trans Learning Technol</source><year>2023</year><volume>16</volume><issue>6</issue><fpage>1082</fpage><lpage>1095</lpage><pub-id pub-id-type="doi">10.1109/TLT.2023.3317945</pub-id></nlm-citation></ref><ref id="ref66"><label>66</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Wang</surname><given-names>L</given-names> </name><name name-style="western"><surname>Deng</surname><given-names>W</given-names> </name></person-group><article-title>Research on the auxiliary training system of tai chi fitness qigong based on computer 3D image vision technology</article-title><year>2023</year><conf-name>2023 IEEE International Conference on Image Processing and Computer Applications (ICIPCA)</conf-name><conf-date>Aug 11-13, 2023</conf-date><conf-loc>Changchun, China</conf-loc><fpage>177</fpage><lpage>182</lpage><pub-id pub-id-type="doi">10.1109/ICIPCA59209.2023.10257873</pub-id></nlm-citation></ref><ref id="ref67"><label>67</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Wang</surname><given-names>L</given-names> </name><name name-style="western"><surname>Deng</surname><given-names>W</given-names> </name></person-group><article-title>Research on tai chi APP simulation system based on computer virtual reality technology</article-title><year>2023</year><conf-name>2023 IEEE International Conference on Image Processing and Computer Applications (ICIPCA)</conf-name><conf-date>Aug 11-13, 2023</conf-date><conf-loc>Changchun, China</conf-loc><fpage>90</fpage><lpage>95</lpage><pub-id pub-id-type="doi">10.1109/ICIPCA59209.2023.10257805</pub-id></nlm-citation></ref><ref id="ref68"><label>68</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Kanchanapaetnukul</surname><given-names>S</given-names> </name><name name-style="western"><surname>Aunkaew</surname><given-names>R</given-names> </name><name name-style="western"><surname>Charernmool</surname><given-names>P</given-names> </name><name name-style="western"><surname>Daoudi</surname><given-names>M</given-names> </name><name name-style="western"><surname>Saraubon</surname><given-names>K</given-names> </name><name name-style="western"><surname>Visutsak</surname><given-names>P</given-names> </name></person-group><article-title>Tai chi exercise posture detection and assessment for the elderly using BPNN and 2 kinect cameras</article-title><year>2023</year><conf-name>2023 International Technical Conference on Circuits/Systems, Computers, and Communications (ITC-CSCC)</conf-name><conf-date>Jun 25-28, 2023</conf-date><conf-loc>Jeju, Republic of Korea</conf-loc><fpage>1</fpage><lpage>5</lpage><pub-id pub-id-type="doi">10.1109/ITC-CSCC58803.2023.10212570</pub-id></nlm-citation></ref><ref id="ref69"><label>69</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Tian</surname><given-names>F</given-names> </name><name name-style="western"><surname>Ni</surname><given-names>S</given-names> </name><name name-style="western"><surname>Zhang</surname><given-names>X</given-names> </name><etal/></person-group><article-title>Enhancing Tai Chi training system: towards group-based and hyper-realistic training experiences</article-title><source>IEEE Trans Vis Comput Graph</source><year>2024</year><month>05</month><volume>30</volume><issue>5</issue><fpage>1</fpage><lpage>11</lpage><pub-id pub-id-type="doi">10.1109/TVCG.2024.3372099</pub-id><pub-id pub-id-type="medline">38457324</pub-id></nlm-citation></ref><ref id="ref70"><label>70</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Zhu</surname><given-names>W</given-names> </name><name name-style="western"><surname>Fan</surname><given-names>X</given-names> </name><name name-style="western"><surname>Zhang</surname><given-names>Y</given-names> </name></person-group><article-title>Applications and research trends of digital human models in the manufacturing industry</article-title><source>Virtual Reality &#x0026; Intelligent Hardware</source><year>2019</year><month>12</month><volume>1</volume><issue>6</issue><fpage>558</fpage><lpage>579</lpage><pub-id pub-id-type="doi">10.1016/j.vrih.2019.09.005</pub-id></nlm-citation></ref><ref id="ref71"><label>71</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>G&#x00FC;rerk</surname><given-names>&#x00D6;</given-names> </name><name name-style="western"><surname>B&#x00F6;nsch</surname><given-names>A</given-names> </name><name name-style="western"><surname>Kittsteiner</surname><given-names>T</given-names> </name><name name-style="western"><surname>Staffeldt</surname><given-names>A</given-names> </name></person-group><article-title>Virtual humans as co-workers: a novel methodology to study peer effects</article-title><source>J Behav Exp Econ</source><year>2019</year><month>02</month><volume>78</volume><fpage>17</fpage><lpage>29</lpage><pub-id pub-id-type="doi">10.1016/j.socec.2018.11.003</pub-id></nlm-citation></ref><ref id="ref72"><label>72</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Paravizo</surname><given-names>E</given-names> </name><name name-style="western"><surname>Braatz</surname><given-names>D</given-names> </name></person-group><article-title>Using a game engine for simulation in ergonomics analysis, design and education: an exploratory study</article-title><source>Appl Ergon</source><year>2019</year><month>05</month><volume>77</volume><fpage>22</fpage><lpage>28</lpage><pub-id pub-id-type="doi">10.1016/j.apergo.2019.01.001</pub-id><pub-id pub-id-type="medline">30832775</pub-id></nlm-citation></ref><ref id="ref73"><label>73</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Paravizo</surname><given-names>E</given-names> </name><name name-style="western"><surname>Braatz</surname><given-names>D</given-names> </name></person-group><article-title>Employing game engines for ergonomics analysis, design and education</article-title><source>Proceedings of the 20th Congress of the International Ergonomics Association (IEA 2018) IEA 2018 Advances in Intelligent Systems and Computing</source><year>2018</year><volume>822</volume><pub-id pub-id-type="doi">10.1007/978-3-319-96077-7_35</pub-id></nlm-citation></ref><ref id="ref74"><label>74</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Birckhead</surname><given-names>B</given-names> </name><name name-style="western"><surname>Khalil</surname><given-names>C</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>X</given-names> </name><etal/></person-group><article-title>Recommendations for methodology of virtual reality clinical trials in health care by an international working group: iterative study</article-title><source>JMIR Ment Health</source><year>2019</year><month>01</month><day>31</day><volume>6</volume><issue>1</issue><fpage>e11973</fpage><pub-id pub-id-type="doi">10.2196/11973</pub-id><pub-id pub-id-type="medline">30702436</pub-id></nlm-citation></ref><ref id="ref75"><label>75</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Aboueldahab</surname><given-names>A</given-names> </name><name name-style="western"><surname>Damaschi</surname><given-names>G</given-names> </name><name name-style="western"><surname>D&#x2019;Addario</surname><given-names>M</given-names> </name><name name-style="western"><surname>Steca</surname><given-names>P</given-names> </name></person-group><article-title>Exploring young adults&#x2019; attitudes toward AI-driven mHealth apps: qualitative study</article-title><source>JMIR Hum Factors</source><year>2025</year><month>09</month><day>26</day><volume>12</volume><fpage>e76075</fpage><pub-id pub-id-type="doi">10.2196/76075</pub-id><pub-id pub-id-type="medline">41004277</pub-id></nlm-citation></ref><ref id="ref76"><label>76</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Nairn</surname><given-names>B</given-names> </name><name name-style="western"><surname>Tsakanikas</surname><given-names>V</given-names> </name><name name-style="western"><surname>Gordon</surname><given-names>B</given-names> </name><etal/></person-group><article-title>Smart wearable technologies for balance rehabilitation in older adults at risk of falls: scoping review and comparative analysis</article-title><source>JMIR Rehabil Assist Technol</source><year>2025</year><month>05</month><day>28</day><volume>12</volume><fpage>e69589</fpage><pub-id pub-id-type="doi">10.2196/69589</pub-id><pub-id pub-id-type="medline">40435383</pub-id></nlm-citation></ref><ref id="ref77"><label>77</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Cerfoglio</surname><given-names>S</given-names> </name><name name-style="western"><surname>Ferraris</surname><given-names>C</given-names> </name><name name-style="western"><surname>Vismara</surname><given-names>L</given-names> </name><etal/></person-group><article-title>Estimation of gait parameters in healthy and hemiplegic individuals using Azure Kinect: a comparative study with the optoelectronic system</article-title><source>Front Bioeng Biotechnol</source><year>2024</year><volume>12</volume><fpage>1449680</fpage><pub-id pub-id-type="doi">10.3389/fbioe.2024.1449680</pub-id><pub-id pub-id-type="medline">39654825</pub-id></nlm-citation></ref><ref id="ref78"><label>78</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Pak</surname><given-names>SS</given-names> </name><name name-style="western"><surname>Janela</surname><given-names>D</given-names> </name><name name-style="western"><surname>Freitas</surname><given-names>N</given-names> </name><etal/></person-group><article-title>Comparing digital to conventional physical therapy for chronic shoulder pain: randomized controlled trial</article-title><source>J Med Internet Res</source><year>2023</year><month>08</month><day>18</day><volume>25</volume><fpage>e49236</fpage><pub-id pub-id-type="doi">10.2196/49236</pub-id><pub-id pub-id-type="medline">37490337</pub-id></nlm-citation></ref><ref id="ref79"><label>79</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Seong</surname><given-names>S</given-names> </name><name name-style="western"><surname>Kim</surname><given-names>H</given-names> </name><name name-style="western"><surname>Cho</surname><given-names>Y</given-names> </name><etal/></person-group><article-title>Impact of virtual reality-based biofeedback on sleep quality among individuals with depressive symptoms, anxiety symptoms, or both: 4-week randomized controlled study</article-title><source>J Med Internet Res</source><year>2025</year><month>06</month><day>20</day><volume>27</volume><fpage>e65772</fpage><pub-id pub-id-type="doi">10.2196/65772</pub-id><pub-id pub-id-type="medline">40539943</pub-id></nlm-citation></ref><ref id="ref80"><label>80</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ferreira</surname><given-names>S</given-names> </name><name name-style="western"><surname>Rodrigues</surname><given-names>MA</given-names> </name><name name-style="western"><surname>Mateus</surname><given-names>C</given-names> </name><name name-style="western"><surname>Rodrigues</surname><given-names>PP</given-names> </name><name name-style="western"><surname>Rocha</surname><given-names>NB</given-names> </name></person-group><article-title>Interventions based on biofeedback systems to improve workers&#x2019; psychological well-being, mental health, and safety: systematic literature review</article-title><source>J Med Internet Res</source><year>2025</year><month>09</month><day>12</day><volume>27</volume><fpage>e70134</fpage><pub-id pub-id-type="doi">10.2196/70134</pub-id><pub-id pub-id-type="medline">40939175</pub-id></nlm-citation></ref><ref id="ref81"><label>81</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lai</surname><given-names>YJ</given-names> </name><name name-style="western"><surname>Chiu</surname><given-names>HY</given-names> </name><name name-style="western"><surname>Wu</surname><given-names>KC</given-names> </name><name name-style="western"><surname>Chang</surname><given-names>CW</given-names> </name></person-group><article-title>Diaphragmatic breathing interfaces to promote relaxation for mitigating insomnia: pilot study</article-title><source>JMIR Serious Games</source><year>2025</year><month>03</month><day>4</day><volume>13</volume><fpage>e67000</fpage><pub-id pub-id-type="doi">10.2196/67000</pub-id><pub-id pub-id-type="medline">40053714</pub-id></nlm-citation></ref><ref id="ref82"><label>82</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Steen</surname><given-names>JP</given-names> </name><name name-style="western"><surname>Kannan</surname><given-names>V</given-names> </name><name name-style="western"><surname>Zaidi</surname><given-names>A</given-names> </name><name name-style="western"><surname>Cramer</surname><given-names>H</given-names> </name><name name-style="western"><surname>Ng</surname><given-names>JY</given-names> </name></person-group><article-title>Mind-body therapy for treating fibromyalgia: a systematic review</article-title><source>Pain Med</source><year>2024</year><month>08</month><day>2</day><volume>25</volume><issue>12</issue><fpage>703</fpage><lpage>737</lpage><pub-id pub-id-type="doi">10.1093/pm/pnae076</pub-id><pub-id pub-id-type="medline">39093008</pub-id></nlm-citation></ref><ref id="ref83"><label>83</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Drummond</surname><given-names>D</given-names> </name><name name-style="western"><surname>Gonsard</surname><given-names>A</given-names> </name></person-group><article-title>Definitions and characteristics of patient digital twins being developed for clinical use: scoping review</article-title><source>J Med Internet Res</source><year>2024</year><month>11</month><day>13</day><volume>26</volume><fpage>e58504</fpage><pub-id pub-id-type="doi">10.2196/58504</pub-id><pub-id pub-id-type="medline">39536311</pub-id></nlm-citation></ref><ref id="ref84"><label>84</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Vall&#x00E9;e</surname><given-names>A</given-names> </name></person-group><article-title>Envisioning the future of personalized medicine: role and realities of digital twins</article-title><source>J Med Internet Res</source><year>2024</year><month>05</month><day>13</day><volume>26</volume><fpage>e50204</fpage><pub-id pub-id-type="doi">10.2196/50204</pub-id><pub-id pub-id-type="medline">38739913</pub-id></nlm-citation></ref><ref id="ref85"><label>85</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Palumbo</surname><given-names>A</given-names> </name></person-group><article-title>Microsoft HoloLens 2 in medical and healthcare context: state of the art and future prospects</article-title><source>Sensors (Basel)</source><year>2022</year><month>10</month><day>11</day><volume>22</volume><issue>20</issue><fpage>7709</fpage><pub-id pub-id-type="doi">10.3390/s22207709</pub-id><pub-id pub-id-type="medline">36298059</pub-id></nlm-citation></ref><ref id="ref86"><label>86</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ayed</surname><given-names>I</given-names> </name><name name-style="western"><surname>Ghazel</surname><given-names>A</given-names> </name><name name-style="western"><surname>Jaume-I-Cap&#x00F3;</surname><given-names>A</given-names> </name><name name-style="western"><surname>Moy&#x00E0;-Alcover</surname><given-names>G</given-names> </name><name name-style="western"><surname>Varona</surname><given-names>J</given-names> </name><name name-style="western"><surname>Mart&#x00ED;nez-Bueso</surname><given-names>P</given-names> </name></person-group><article-title>Vision-based serious games and virtual reality systems for motor rehabilitation: a review geared toward a research methodology</article-title><source>Int J Med Inform</source><year>2019</year><month>11</month><volume>131</volume><fpage>103909</fpage><pub-id pub-id-type="doi">10.1016/j.ijmedinf.2019.06.016</pub-id><pub-id pub-id-type="medline">31557701</pub-id></nlm-citation></ref><ref id="ref87"><label>87</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Molhemi</surname><given-names>F</given-names> </name><name name-style="western"><surname>Monjezi</surname><given-names>S</given-names> </name><name name-style="western"><surname>Mehravar</surname><given-names>M</given-names> </name><etal/></person-group><article-title>Effects of virtual reality vs conventional balance training on balance and falls in people with multiple sclerosis: a randomized controlled trial</article-title><source>Arch Phys Med Rehabil</source><year>2021</year><month>02</month><volume>102</volume><issue>2</issue><fpage>290</fpage><lpage>299</lpage><pub-id pub-id-type="doi">10.1016/j.apmr.2020.09.395</pub-id><pub-id pub-id-type="medline">33161005</pub-id></nlm-citation></ref><ref id="ref88"><label>88</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Levin</surname><given-names>MF</given-names> </name><name name-style="western"><surname>Deutsch</surname><given-names>JE</given-names> </name><name name-style="western"><surname>Kafri</surname><given-names>M</given-names> </name><name name-style="western"><surname>Liebermann</surname><given-names>DG</given-names> </name></person-group><article-title>Validity of virtual reality environments for sensorimotor rehabilitation</article-title><source>Virtual Reality for Physical and Motor Rehabilitation</source><year>2014</year><publisher-name>Springer Science + Business Media</publisher-name><fpage>95</fpage><lpage>118</lpage><pub-id pub-id-type="doi">10.1007/978-1-4939-0968-1_6</pub-id></nlm-citation></ref><ref id="ref89"><label>89</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wang</surname><given-names>D</given-names> </name><name name-style="western"><surname>Zheng</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Li</surname><given-names>T</given-names> </name><name name-style="western"><surname>Peng</surname><given-names>C</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>L</given-names> </name><name name-style="western"><surname>Zhang</surname><given-names>Y</given-names> </name></person-group><article-title>Multi-modal human-machine interaction for human intelligence augmentation</article-title><source>Sci Sin-Inf</source><year>2018</year><month>04</month><day>1</day><volume>48</volume><issue>4</issue><fpage>449</fpage><lpage>465</lpage><pub-id pub-id-type="doi">10.1360/N112017-00213</pub-id></nlm-citation></ref><ref id="ref90"><label>90</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gielniak</surname><given-names>MJ</given-names> </name><name name-style="western"><surname>Thomaz</surname><given-names>AL</given-names> </name></person-group><article-title>Enhancing interaction through exaggerated motion synthesis</article-title><source>HRI &#x2019;12: Proceedings of the seventh annual ACM/IEEE international conference on Human-Robot Interaction</source><year>2012</year><month>03</month><day>5</day><fpage>375</fpage><lpage>382</lpage><pub-id pub-id-type="doi">10.1145/2157689.2157813</pub-id></nlm-citation></ref><ref id="ref91"><label>91</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Timmermans</surname><given-names>AAA</given-names> </name><name name-style="western"><surname>Seelen</surname><given-names>HAM</given-names> </name><name name-style="western"><surname>Willmann</surname><given-names>RD</given-names> </name><name name-style="western"><surname>Kingma</surname><given-names>H</given-names> </name></person-group><article-title>Technology-assisted training of arm-hand skills in stroke: concepts on reacquisition of motor control and therapist guidelines for rehabilitation technology design</article-title><source>J Neuroeng Rehabil</source><year>2009</year><month>01</month><day>20</day><volume>6</volume><fpage>1</fpage><pub-id pub-id-type="doi">10.1186/1743-0003-6-1</pub-id><pub-id pub-id-type="medline">19154570</pub-id></nlm-citation></ref><ref id="ref92"><label>92</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Triantafyllidis</surname><given-names>A</given-names> </name><name name-style="western"><surname>Segkouli</surname><given-names>S</given-names> </name><name name-style="western"><surname>Zygouris</surname><given-names>S</given-names> </name><etal/></person-group><article-title>Mobile app interventions for Parkinson&#x2019;s disease, multiple sclerosis and stroke: a systematic literature review</article-title><source>Sensors (Basel)</source><year>2023</year><month>03</month><day>23</day><volume>23</volume><issue>7</issue><fpage>3396</fpage><pub-id pub-id-type="doi">10.3390/s23073396</pub-id><pub-id pub-id-type="medline">37050456</pub-id></nlm-citation></ref><ref id="ref93"><label>93</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Lin</surname><given-names>TY</given-names> </name><name name-style="western"><surname>Hsieh</surname><given-names>CH</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>JD</given-names> </name></person-group><article-title>Kinect-based system for physical rehabilitation: utilizing tai chi exercises to improve movement disorders in patients with balance ability</article-title><year>2013</year><conf-name>In: 2013 7th Asia Modelling Symposium</conf-name><fpage>149</fpage><lpage>153</lpage><pub-id pub-id-type="doi">10.1109/AMS.2013.29</pub-id></nlm-citation></ref></ref-list><app-group><supplementary-material id="app1"><label>Multimedia Appendix 1</label><p>Search strategy for PubMed, Embase, Scopus, IEEE Xplore, and the ACM Digital Library.</p><media xlink:href="jmir_v27i1e64207_app1.docx" xlink:title="DOCX File, 21 KB"/></supplementary-material><supplementary-material id="app2"><label>Checklist 1</label><p>PRISMA-ScR (Preferred Reporting Items for Systematic Reviews and Meta-Analyses extension for Scoping Reviews) checklist.</p><media xlink:href="jmir_v27i1e64207_app2.docx" xlink:title="DOCX File, 85 KB"/></supplementary-material></app-group></back></article>