<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="review-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JMIR</journal-id>
      <journal-id journal-id-type="nlm-ta">J Med Internet Res</journal-id>
      <journal-title>Journal of Medical Internet Research</journal-title>
      <issn pub-type="epub">1438-8871</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v27i1e67373</article-id>
      <article-id pub-id-type="pmid">40373307</article-id>
      <article-id pub-id-type="doi">10.2196/67373</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Review</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Review</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Recognition of Basic Activities of Daily Living Using Wearable Devices for Older Adults: Scoping Review</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Mavragani</surname>
            <given-names>Amaryllis</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Atigossou</surname>
            <given-names>Orthelo Léonel Gbètoho</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Ozkaynak</surname>
            <given-names>Mustafa</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author" corresp="yes">
          <name name-style="western">
            <surname>Ray</surname>
            <given-names>Samantha J</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <address>
            <institution>Department of Medical Physiology</institution>
            <institution>College of Medicine</institution>
            <institution>Texas A&#38;M University</institution>
            <addr-line>MREB2</addr-line>
            <addr-line>8447 John Sharp Parkway</addr-line>
            <addr-line>Bryan, TX, 77807</addr-line>
            <country>United States</country>
            <phone>1 979 436 9259</phone>
            <email>sjr45@tamu.edu</email>
          </address>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-3189-8899</ext-link>
        </contrib>
        <contrib id="contrib2" contrib-type="author">
          <name name-style="western">
            <surname>Cherian</surname>
            <given-names>Josh</given-names>
          </name>
          <degrees>MS, PhD</degrees>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-7749-2109</ext-link>
        </contrib>
        <contrib id="contrib3" contrib-type="author">
          <name name-style="western">
            <surname>Liberty</surname>
            <given-names>Amanda Mae</given-names>
          </name>
          <degrees>BS</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0009-0002-5874-788X</ext-link>
        </contrib>
        <contrib id="contrib4" contrib-type="author">
          <name name-style="western">
            <surname>Hammond</surname>
            <given-names>Tracy Anne</given-names>
          </name>
          <degrees>MS, MA, FTO, PhD</degrees>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-7272-0507</ext-link>
        </contrib>
        <contrib id="contrib5" contrib-type="author">
          <name name-style="western">
            <surname>Shireman</surname>
            <given-names>Paula K</given-names>
          </name>
          <degrees>MS, MBA, MD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff4" ref-type="aff">4</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-9701-5422</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>Department of Medical Physiology</institution>
        <institution>College of Medicine</institution>
        <institution>Texas A&#38;M University</institution>
        <addr-line>Bryan, TX</addr-line>
        <country>United States</country>
      </aff>
      <aff id="aff2">
        <label>2</label>
        <institution>Center for Remote Health Monitoring, Department of Biomedical Engineering</institution>
        <institution>School of Medicine</institution>
        <institution>Wake Forest University</institution>
        <addr-line>Winston-Salem, NC</addr-line>
        <country>United States</country>
      </aff>
      <aff id="aff3">
        <label>3</label>
        <institution>Department of Computer Science &#38; Engineering</institution>
        <institution>Texas A&#38;M University</institution>
        <addr-line>College Station, TX</addr-line>
        <country>United States</country>
      </aff>
      <aff id="aff4">
        <label>4</label>
        <institution>Department of Primary Care &#38; Rural Medicine</institution>
        <institution>College of Medicine</institution>
        <institution>Texas A&#38;M University</institution>
        <addr-line>Bryan, TX</addr-line>
        <country>United States</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Samantha J Ray <email>sjr45@tamu.edu</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <year>2025</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>15</day>
        <month>5</month>
        <year>2025</year>
      </pub-date>
      <volume>27</volume>
      <elocation-id>e67373</elocation-id>
      <history>
        <date date-type="received">
          <day>9</day>
          <month>10</month>
          <year>2024</year>
        </date>
        <date date-type="rev-request">
          <day>31</day>
          <month>12</month>
          <year>2024</year>
        </date>
        <date date-type="rev-recd">
          <day>21</day>
          <month>2</month>
          <year>2025</year>
        </date>
        <date date-type="accepted">
          <day>16</day>
          <month>3</month>
          <year>2025</year>
        </date>
      </history>
      <copyright-statement>©Samantha J Ray, Josh Cherian, Amanda Mae Liberty, Tracy Anne Hammond, Paula K Shireman. Originally published in the Journal of Medical Internet Research (https://www.jmir.org), 15.05.2025.</copyright-statement>
      <copyright-year>2025</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in the Journal of Medical Internet Research (ISSN 1438-8871), is properly cited. The complete bibliographic information, a link to the original publication on https://www.jmir.org/, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://www.jmir.org/2025/1/e67373" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>Tracking the performance of activities of daily living (ADLs) using ADL recognition has the potential to facilitate aging-in-place strategies, allowing older adults to live in their homes longer and enabling their families and caregivers to monitor changes in health status. However, the ADL recognition literature historically has evaluated systems in controlled settings with data from younger populations, creating the question of whether these systems will work in real-world conditions for older populations.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>This scoping review seeks to establish the state-of-the-art for recognizing basic ADLs using wearable sensors. This primary goal will identify literature gaps and research needed to make ADL tracking viable for aging-in-place solutions. In addition, this paper will quantify how many publications include older adults. This secondary goal assesses how often studies evaluate their system with older adult participants, enhancing the trustworthiness of the approach.</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>We conducted a scoping review using the PRISMA-ScR (Preferred Reporting Items for Systematic reviews and Meta-Analyses extension for Scoping Reviews) guidelines. We identify studies focused on basic ADL recognition using wearable sensors within the PubMed, Association of Computing Machinery Digital Library (ACM DL), and Google Scholar databases using papers published in the last 5 calendar years (2019-2024) to identify current trends given the rapid changes in wearable technology devices. Publications must include at least one of the basic ADLs (ie, bathing, dressing, toileting, transferring, continence, and feeding) and include some sort of wearable sensor or device. Studies focusing on instrumental ADLs, general physical activity tracking, fall detection, or only using environmental devices are excluded. Studies that include older adults in the design or evaluation of their ADL recognition system are highlighted.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>The database search identified 695 papers; 164 papers passed title screening. A total of 58 studies satisfied the inclusion criteria; only 8 studies included older adults despite most studies identifying this population as a focus for their research. Most studies focused on eating (n=27), hygiene (n=24), drinking (n=20), or transitions (n=13). Few works included toileting (n=3), dressing (n=2), or bathing (n=1) activities. Of the 8 studies that included older adults, 5 focused on recognition performance while 3 focused on user experience and system acceptability.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>Basic ADLs are unevenly covered in the literature; more research is needed for recognizing bathing, dressing, and toileting activities. Despite all studies stating the importance of tracking ADLs in older adults, only 14% (8/58) of the included works involve older adult participants. A commonality between these outcomes is difficulty collecting or obtaining adequate training data for ADL recognition systems. Many works are predominantly concerned with proving system feasibility and do not assess usability or real-world deployment. For these systems to move from academic experiments to actual systems with clinical utility, ADL recognition systems must consider the design requirements of being part of remote health monitoring systems.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>activity recognition</kwd>
        <kwd>health monitoring</kwd>
        <kwd>wearable sensors</kwd>
        <kwd>activities of daily living</kwd>
        <kwd>aging in place</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <p>The need for aging-in-place solutions increases as the population of adults over the age of 65 years escalates [<xref ref-type="bibr" rid="ref1">1</xref>,<xref ref-type="bibr" rid="ref2">2</xref>]. One way to enable people to live independently in their own homes is to use technology to aid health management. Existing examples of this type of technology include medication reminders, vital sign monitoring, and fall detection systems [<xref ref-type="bibr" rid="ref3">3</xref>]. Many of these systems serve as safety nets to detect adverse events.</p>
      <p>An open problem is developing systems that can automatically track the performance of activities of daily living (ADLs). ADLs are generally divided into 2 categories: basic and instrumental [<xref ref-type="bibr" rid="ref4">4</xref>]. Basic ADLs (BADLs) are the essential activities to maintain quality of life and satisfy basic needs to stay alive [<xref ref-type="bibr" rid="ref4">4</xref>-<xref ref-type="bibr" rid="ref6">6</xref>]. By contrast, instrumental ADLs are characterized by more complex daily interactions, such as health and home management, driving and community mobility, child rearing, meal preparation and cleanup, medication management, and shopping [<xref ref-type="bibr" rid="ref7">7</xref>]. The ability to perform ADLs determines whether a person can live independently. Tracking ADLs offers opportunities for remote health monitoring and proactive health care by detecting changes in ADL performance as early as possible. For example, ADL tracking can be used to predict the presence of an acute illness by detecting symptoms such as lethargy, weakness, and decreased appetite [<xref ref-type="bibr" rid="ref8">8</xref>].</p>
      <p>One way to automate ADL tracking is to use activity recognition, a subarea of artificial intelligence focused on understanding human behavior. Activity recognition has become more feasible given the commercial proliferation of sensors partnered with access to the sensor data as well as advances in machine learning techniques [<xref ref-type="bibr" rid="ref9">9</xref>-<xref ref-type="bibr" rid="ref11">11</xref>]. Early studies in the field relied on custom hardware for the recognition of human activities [<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref13">13</xref>]. More recent work has predominantly used commercially available devices [<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref15">15</xref>]. ADLs such as brushing teeth [<xref ref-type="bibr" rid="ref16">16</xref>,<xref ref-type="bibr" rid="ref17">17</xref>], taking medication [<xref ref-type="bibr" rid="ref18">18</xref>,<xref ref-type="bibr" rid="ref19">19</xref>], and washing hands [<xref ref-type="bibr" rid="ref20">20</xref>] have been recognized with encouraging degrees of accuracy. Current research focuses on recognizing multiple ADLs to create ubiquitous health monitoring applications [<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref21">21</xref>].</p>
      <p>However, activity recognition covers a broad range of applications and hardware options. Novel sensors and approaches must collect custom datasets [<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref22">22</xref>], but the standard practice is to evaluate proposed systems with existing datasets as benchmarks [<xref ref-type="bibr" rid="ref11">11</xref>,<xref ref-type="bibr" rid="ref21">21</xref>,<xref ref-type="bibr" rid="ref23">23</xref>-<xref ref-type="bibr" rid="ref29">29</xref>]. Publicly available datasets are heterogenous with respect to the activities included, sensors used, and placement of the sensors. However, a commonality is recruiting young, healthy adults, resulting in the average age generally being less than 30 years. Common benchmark datasets (eg, MobiAct [<xref ref-type="bibr" rid="ref30">30</xref>], PAMAP2 [<xref ref-type="bibr" rid="ref31">31</xref>], UniMiB-SHAR [<xref ref-type="bibr" rid="ref32">32</xref>], and WISDM [<xref ref-type="bibr" rid="ref33">33</xref>]) follow this trend with an average age of 27 years or less. Older adults are more likely to be included only if the application has strong ties to age (eg, fall detection with the SisFall dataset [<xref ref-type="bibr" rid="ref34">34</xref>]).</p>
      <p>Existing datasets often only include younger adults because the main impetus for most data collection is to prove system feasibility (ie, prove that a computer can recognize the targeted activities with the used sensors). Researchers often assume that systems trained on data from younger adults will perform adequately for older adults because researchers expect that their systems will generalize to new populations. However, the realization that this assumption does not hold is growing [<xref ref-type="bibr" rid="ref35">35</xref>], and attention has been drawn specifically to activity recognition for older adults [<xref ref-type="bibr" rid="ref36">36</xref>]. These systems need to be proven to be reliable when used by older adults prior to clinical use. Consequently, there exists an open question of how effective these ADL recognition systems would be as tools to support aging in place.</p>
      <p>The primary goal of this scoping review is to understand the current state-of-the-art activity recognition systems focusing on BADLs using wearable devices. Recent, related reviews have focused on ambient sensors or smart home environments [<xref ref-type="bibr" rid="ref37">37</xref>-<xref ref-type="bibr" rid="ref40">40</xref>] or wearable sensors for just bathroom activities [<xref ref-type="bibr" rid="ref41">41</xref>]. This primary goal will summarize trends in BADL recognition works and identify gaps in the literature. The secondary goal is to identify works that include older adults as participants. This secondary goal will give insight into how many of the identified works could be used in aging-in-place solutions. Our review seeks to answer the following questions:</p>
      <list list-type="order">
        <list-item>
          <p>What is the current state-of-the-art for recognizing BADLs using wearable sensors?</p>
        </list-item>
        <list-item>
          <p>How many studies focused on BADL recognition using wearable sensors include older adults in the research?</p>
        </list-item>
      </list>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Overview</title>
        <p>This review identifies studies that focus on ADL recognition using wearable sensors to recognize basic ADLs as defined by Katz et al [<xref ref-type="bibr" rid="ref5">5</xref>]. A subgoal within this review is to identify studies that include older adults in the research, for example, as participants in data collection for training ADL recognition systems or in user studies centered on using wearable sensors for ADL recognition. This work is a scoping review that follows the PRISMA-ScR (Preferred Reporting Items for Systematic Reviews and Meta-Analyses extension for Scoping Reviews) guidelines [<xref ref-type="bibr" rid="ref42">42</xref>] and the protocol [<xref ref-type="bibr" rid="ref43">43</xref>] is registered with the Open Science Foundation. Following standard procedures for PRISMA (Preferred Reporting Items for Systematic Reviews and Meta-Analyses), publications are systematically and hierarchically screened and assessed for eligibility. Title and abstracts are screened during the first step of the screening phase because some of our inclusion criteria (<xref ref-type="table" rid="table1">Table 1</xref>) may not appear in the title.</p>
        <table-wrap position="float" id="table1">
          <label>Table 1</label>
          <caption>
            <p>Inclusion and exclusion criteria for paper selection.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="200"/>
            <col width="400"/>
            <col width="400"/>
            <thead>
              <tr valign="top">
                <td>Criteria type</td>
                <td>Inclusion</td>
                <td>Exclusion</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>Sensor type</td>
                <td>Includes a sensor or device that is worn on the user’s body (ie, is a wearable solution)</td>
                <td>Includes only sensors or devices that are placed in the environment (ie, is an ambient solution)</td>
              </tr>
              <tr valign="top">
                <td>Included ADLs<sup>a</sup></td>
                <td>Includes at least one basic ADL (defined in <xref ref-type="table" rid="table3">Table 3</xref>)</td>
                <td>Does not include any basic ADLs (defined in <xref ref-type="table" rid="table3">Table 3</xref>)</td>
              </tr>
              <tr valign="top">
                <td>Paper focus</td>
                <td>Focuses on ADL recognition or ADL tracking systems</td>
                <td>Focuses on physical activity tracking (eg, exercise tracking) or fall detection</td>
              </tr>
              <tr valign="top">
                <td>Publication date</td>
                <td>Published in the range of January 1, 2019-December 31, 2024</td>
                <td>Published before 2019</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table1fn1">
              <p><sup>a</sup>ADL: activity of daily living.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
      </sec>
      <sec>
        <title>Sources of Evidence</title>
        <p>The focus of this survey is relevant to both the medical and computer science literature. We queried both PubMed and the Association of Computing Machinery Digital Library (ACM DL) to survey a substantial corpus of studies in both domains. In addition, we supplemented the queries with additional studies identified from Google Scholar. The queries were conducted in January 2025.</p>
      </sec>
      <sec>
        <title>Search Strategy</title>
        <p>The survey summarizes the state of basic ADL recognition by covering the last 5 calendar years (2019-2024). This timeframe focuses on recent trends as technology in this area can rapidly change, for example, the proliferation of consumer smart devices.</p>
        <p>The search terms used in the queries are provided in <xref ref-type="table" rid="table2">Table 2</xref>. These terms align with previous reviews focused on ADL recognition [<xref ref-type="bibr" rid="ref37">37</xref>,<xref ref-type="bibr" rid="ref41">41</xref>]. Each row in <xref ref-type="table" rid="table2">Table 2</xref> is combined with AND Boolean logic. “Wearable” is the only term used to specify the sensors used to minimize the capture of activity recognition systems using ambient or environmental sensors. The term “basic” is not clarified in the ADL-related terms because many works do not specify basic versus instrumental.</p>
        <table-wrap position="float" id="table2">
          <label>Table 2</label>
          <caption>
            <p>Search terms used in database queries. Rows of search terms are combined with AND logic. The asterisk (*) is a special character to capture any number of additional characters.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="500"/>
            <col width="500"/>
            <thead>
              <tr valign="top">
                <td>Search terms</td>
                <td>Rationale</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>Wearable</td>
                <td>Use of wearable sensors or devices</td>
              </tr>
              <tr valign="top">
                <td>elder* OR older</td>
                <td>Target population of older adults</td>
              </tr>
              <tr valign="top">
                <td>recogni* OR monitor* OR detect*</td>
                <td>Focus on activity recognition</td>
              </tr>
              <tr valign="top">
                <td>adl OR adls OR “activities of daily living”</td>
                <td>Target within activity recognition</td>
              </tr>
            </tbody>
          </table>
        </table-wrap>
        <p>Katz et al [<xref ref-type="bibr" rid="ref5">5</xref>] defines BADLs as Bathing, Dressing, Toileting, Transferring, Continence, and Feeding. The ability to perform these activities serves as a score of patient independence. The Katz Index of Independence in Activities of Daily Living gives clear definitions of what skills the patient needs to have and what types of assistance they can receive; for example, food preparation is not part of the Feeding activity. In practice, Katz BADLs are treated as categories and will include related and proxy activities. For example, Bathing includes other personal hygiene activities such as brushing teeth, and Toileting includes activities such as flushing which serve as a proxy indicator of the main activity. Continence is generally combined with Toileting as the former is not an activity within the scope of activity recognition research. <xref ref-type="table" rid="table3">Table 3</xref> provides the adjusted definitions of each BADL to align with the practices in the ADL recognition literature.</p>
        <p>Activity recognition has a distinct literature related to ambulation activities such as walking, running, and ascending or descending stairs. These studies are diverse with respect to focusing on clinical applications, for example, remote monitoring of rehabilitation after injury, or nonclinical applications, such as exercise tracking. Many of these studies model ambulation as different states of being (eg, “the person is currently sitting” or “the person is currently walking”) and do not capture transitions such as standing up from sitting. Katz’s definition of transferring focuses on transitions rather than the patient’s general mobility; we do not include general ambulation activities in <xref ref-type="table" rid="table3">Table 3</xref>. Recognition of ambulatory activities and related gait attributes merit their own study.</p>
        <table-wrap position="float" id="table3">
          <label>Table 3</label>
          <caption>
            <p>Mapping of Katz's basic activities of daily living to activities.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="30"/>
            <col width="500"/>
            <col width="470"/>
            <thead>
              <tr valign="bottom">
                <td colspan="2">BADL<sup>a</sup> and categories</td>
                <td>Example activities</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td colspan="3">Bathing</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Bathing</td>
                <td>Bathing and showering</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Hygiene</td>
                <td>Brushing teeth and washing hands</td>
              </tr>
              <tr valign="top">
                <td colspan="3">Dressing</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Dressing</td>
                <td>Putting on and taking off clothing</td>
              </tr>
              <tr valign="top">
                <td colspan="3">Toileting and continence</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Toileting</td>
                <td>Using the toilet or urinal and flushing</td>
              </tr>
              <tr valign="top">
                <td colspan="3">Transferring</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Transitions</td>
                <td>Sitting-to-standing, lying-to-standing, and their inversions</td>
              </tr>
              <tr valign="top">
                <td colspan="3">Feeding</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Eating</td>
                <td>Eating with utensils and eating with hands</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Drinking</td>
                <td>Drinking from a cup or mug</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table3fn1">
              <p><sup>a</sup>BADL: basic activity of daily living.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
        <p>The eligibility and exclusion criteria are given in <xref ref-type="table" rid="table1">Table 1</xref>. ADL recognition needs to be a direct outcome or goal of the study to be included. Furthermore, at least one BADL needs to be included; studies focused on instrumental ADLs such as cooking were excluded. Two related areas to BADL recognition are physical activity recognition and fall detection. Physical activity recognition papers tend to focus on maintaining one’s physical health and tracking exercise, which are disparate goals from basic ADL monitoring. Some studies in fall detection have the goal of distinguishing falls versus normal, daily activities, but the identification of daily activities is a strategy to decrease false positives in lieu of a direct goal of the work. As such, studies focused on general physical activity or fall detection are excluded.</p>
        <p>The reviewing process was conducted predominantly by one author. The author met with at least 2 other authors to discuss ambiguous papers at the end of each step during the screening process. The eligibility and exclusion criteria were not subjective; one reviewer was sufficient for most papers.</p>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <sec>
        <title>Overview</title>
        <p>The search resulted in 695 studies. After removing duplicates, the titles and abstracts of 690 publications were screened. A total of 164 studies were eligible for full-text assessment; 58 studies satisfied the inclusion criteria. Overall, 8 of these studies included older adults in the research. The paper selection flowchart is given in <xref rid="figure1" ref-type="fig">Figure 1</xref>.</p>
        <fig id="figure1" position="float">
          <label>Figure 1</label>
          <caption>
            <p>Paper selection flowchart based on PRISMA (Preferred Reporting Items for Systematic reviews and Meta-Analyses) guidelines. ACM DL: Association of Computing Machinery Digital Library; ADL: activity of daily living; BADL: basic activity of daily living.</p>
          </caption>
          <graphic xlink:href="jmir_v27i1e67373_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <p>We summarize the state-of-the-art for the recognition of BADLs in the following subsections (<xref ref-type="table" rid="table4">Table 4</xref>). We then highlight the papers that included older adults (<xref ref-type="table" rid="table5">Table 5</xref>).</p>
        <p>Most studies use commodity devices such as smartphones and smartwatches for motion sensors and microphones regardless of which ADLs were targeted. Using these devices maximizes the solution’s potential for adoption as these devices are ubiquitous. Research using smartphones assumes that the phone will either be in the user's pocket or nearby them. Some studies strap the smartphone to a person’s arm to simulate a watch-like form factor [<xref ref-type="bibr" rid="ref44">44</xref>] or to keep the device near the user in an unobtrusive manner [<xref ref-type="bibr" rid="ref45">45</xref>]. Some studies still leverage custom sensor arrays, for example, the Opportunity dataset where participants had accelerometers placed all over the body [<xref ref-type="bibr" rid="ref46">46</xref>] and the study by Bedri et al [<xref ref-type="bibr" rid="ref47">47</xref>], which placed sensors on a pair of glasses, on the ear, and on the back of the neck.</p>
        <table-wrap position="float" id="table4">
          <label>Table 4</label>
          <caption>
            <p>Activity of daily living recognition studies grouped by activities of daily living.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="30"/>
            <col width="330"/>
            <col width="320"/>
            <col width="320"/>
            <thead>
              <tr valign="bottom">
                <td colspan="2">BADL<sup>a</sup> and categories</td>
                <td>Relevant studies</td>
                <td>Number of studies</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td colspan="4">Bathing</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Bathing</td>
                <td>[<xref ref-type="bibr" rid="ref45">45</xref>]</td>
                <td>1</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Hygiene</td>
                <td>[<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref15">15</xref>,<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref44">44</xref>,<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref48">48</xref>-<xref ref-type="bibr" rid="ref64">64</xref>]</td>
                <td>24</td>
              </tr>
              <tr valign="top">
                <td colspan="4">Dressing</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Dressing</td>
                <td>[<xref ref-type="bibr" rid="ref15">15</xref>,<xref ref-type="bibr" rid="ref64">64</xref>]</td>
                <td>2</td>
              </tr>
              <tr valign="top">
                <td colspan="4">Toileting and Continence</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Toileting</td>
                <td>[<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref65">65</xref>]</td>
                <td>3</td>
              </tr>
              <tr valign="top">
                <td colspan="4">Transferring</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Transitions</td>
                <td>[<xref ref-type="bibr" rid="ref15">15</xref>,<xref ref-type="bibr" rid="ref24">24</xref>-<xref ref-type="bibr" rid="ref29">29</xref>,<xref ref-type="bibr" rid="ref54">54</xref>,<xref ref-type="bibr" rid="ref66">66</xref>-<xref ref-type="bibr" rid="ref68">68</xref>]</td>
                <td>13</td>
              </tr>
              <tr valign="top">
                <td colspan="4">Feeding</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Eating</td>
                <td>[<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref15">15</xref>,<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref24">24</xref>,<xref ref-type="bibr" rid="ref47">47</xref>,<xref ref-type="bibr" rid="ref55">55</xref>,<xref ref-type="bibr" rid="ref57">57</xref>,<xref ref-type="bibr" rid="ref58">58</xref>,<xref ref-type="bibr" rid="ref60">60</xref>,<xref ref-type="bibr" rid="ref62">62</xref>-<xref ref-type="bibr" rid="ref64">64</xref>,<xref ref-type="bibr" rid="ref69">69</xref>-<xref ref-type="bibr" rid="ref83">83</xref>]</td>
                <td>27</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Drinking</td>
                <td>[<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref15">15</xref>,<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref24">24</xref>,<xref ref-type="bibr" rid="ref55">55</xref>,<xref ref-type="bibr" rid="ref58">58</xref>,<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref62">62</xref>-<xref ref-type="bibr" rid="ref64">64</xref>,<xref ref-type="bibr" rid="ref67">67</xref>,<xref ref-type="bibr" rid="ref69">69</xref>,<xref ref-type="bibr" rid="ref74">74</xref>,<xref ref-type="bibr" rid="ref77">77</xref>,<xref ref-type="bibr" rid="ref83">83</xref>-<xref ref-type="bibr" rid="ref87">87</xref>]</td>
                <td>20</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table4fn1">
              <p><sup>a</sup>BADL: basic activity of daily living.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
      </sec>
      <sec>
        <title>Bathing and Hygiene</title>
        <p>Bathing and showering behaviors are rare inclusions for ADL recognition systems. Previous surveys did not capture any papers with these activities [<xref ref-type="bibr" rid="ref37">37</xref>,<xref ref-type="bibr" rid="ref41">41</xref>]. Liang et al [<xref ref-type="bibr" rid="ref45">45</xref>] used an audio-based approach to detect sounds associated with bathtubs and showers, for example, filling with water or washing. The lack of focus on these activities has several likely reasons. First, these activities are difficult to simulate in laboratory settings due to their facility requirements. Second, wearable sensors need to be sufficiently waterproof or protected during these activities. Finally, people are more likely to be uncomfortable performing these activities while being recorded. The protocol in the study by Liang et al [<xref ref-type="bibr" rid="ref45">45</xref>] avoided these issues. They conducted a free-living study where the researcher followed the participant around their own home, and the researcher kept a distance to minimize any influence on how activities were performed.</p>
        <p>Many hygiene activities primarily involve the hands and can be captured with wrist-worn devices such as smartwatches. The most commonly included activities are brushing teeth, combing hair, and washing hands. Each of these activities tends to be performed for a sustained amount of time and has rhythmic attributes in their motions, making them distinct from other daily activities. Furthermore, these activities have low expected variation in their performance, making it easier for an ADL recognition model to generalize. These activities are usually included in a large set of general ADLs (ie, the work is not specifically focused on bathroom or hygiene activities). Examples of this practice include studies by Bhattacharya et al [<xref ref-type="bibr" rid="ref10">10</xref>] and Cherian et al [<xref ref-type="bibr" rid="ref22">22</xref>] that recognized 23 and 8 different activities, respectively, and covered a range of everyday activities. Exceptions to this pattern include Akther et al [<xref ref-type="bibr" rid="ref17">17</xref>] who focused on assessing how thoroughly the user brushed their teeth and Mondol et al [<xref ref-type="bibr" rid="ref51">51</xref>] and Santos-Gago et al [<xref ref-type="bibr" rid="ref53">53</xref>] whose work focused on identifying hand-washing behaviors that are compliant with the World Health Organization (WHO) guidelines.</p>
      </sec>
      <sec>
        <title>Dressing</title>
        <p>Dressing is an uncommon activity to be included within the constraints of this review. Motion sensors such as accelerometers are the most common type of sensor used in wearable systems. However, the motions associated with dressing activities tend to be subtle, making them difficult to distinguish from other everyday activities. Furthermore, the diversity in the styles of clothing people wear makes designing a general recognition system complicated. Sun et al [<xref ref-type="bibr" rid="ref15">15</xref>] included “putting on clothes” and “taking off clothes” among the total of 221 activities included in their work focused on developing a multimodal activity recognition system. This work found that both motion and Wi-Fi signals were useful in determining if the user was interacting with clothing. Dressing in Narkhede et al [<xref ref-type="bibr" rid="ref64">64</xref>] used both motion and location data and commented that location context was necessary due to the high variability in the motion data.</p>
      </sec>
      <sec>
        <title>Toileting</title>
        <p>Toileting behaviors are generally detected via a proxy indicator. Using a toilet or urinal does not involve significant bodily motion, therefore common locations for wearable sensors will not detect these activities. However, this activity is normally followed by flushing the toilet or urinal, as this sound is used to signal the end of the activity. Liang et al [<xref ref-type="bibr" rid="ref45">45</xref>] use only sound, Masum et al [<xref ref-type="bibr" rid="ref65">65</xref>] uses only motion, and Mollyn et al [<xref ref-type="bibr" rid="ref59">59</xref>] uses both. Liang et al [<xref ref-type="bibr" rid="ref45">45</xref>] and Mollyn et al [<xref ref-type="bibr" rid="ref59">59</xref>] specify that their systems recognize the flushing action while Masum et al [<xref ref-type="bibr" rid="ref65">65</xref>] focuses on the action of sitting on the toilet.</p>
      </sec>
      <sec>
        <title>Transferring</title>
        <p>Ambulation activities are widely studied in activity recognition research.</p>
        <p>Many studies developed methods for distinguishing between different modes of ambulation and posture such as walking, jogging, ascending or descending stairs, sitting, standing, and lying down [<xref ref-type="bibr" rid="ref88">88</xref>-<xref ref-type="bibr" rid="ref93">93</xref>]. However, most of these studies focus on just detecting the current state of the user and do not directly capture the transitions between modes (eg, sit-to-stand, stand-to-sit, lying-to-stand, or stand-to-lying). The datasets UniMiB-SHAR [<xref ref-type="bibr" rid="ref32">32</xref>], MobiAct [<xref ref-type="bibr" rid="ref30">30</xref>], and Transitional Activities [<xref ref-type="bibr" rid="ref94">94</xref>] do include these annotations, and many recent deep learning–centric studies have leveraged these datasets to detect these activities [<xref ref-type="bibr" rid="ref11">11</xref>,<xref ref-type="bibr" rid="ref24">24</xref>-<xref ref-type="bibr" rid="ref29">29</xref>]. The first 2 datasets involve a smartphone placed in the front pocket of the user’s pants while the last dataset places sensor nodes across the body on the waist, right wrist, left wrist, right arm, left thigh, and right ankle.</p>
      </sec>
      <sec>
        <title>Eating and Drinking</title>
        <p>Eating and drinking have the most diversity in the sensors used to recognize these activities. While they can be recognized with wrist-worn sensors [<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref77">77</xref>], these activities also afford opportunities to place sensors on the head or neck [<xref ref-type="bibr" rid="ref47">47</xref>,<xref ref-type="bibr" rid="ref70">70</xref>,<xref ref-type="bibr" rid="ref85">85</xref>]. Bedri et al [<xref ref-type="bibr" rid="ref70">70</xref>] used a glasses form factor to detect episodes of eating and drinking. This approach allows the system to avoid ambiguity with other hand-centric activities and focus on detecting actions such as chewing and swallowing. Some studies will use the high-level labels in the Opportunity dataset [<xref ref-type="bibr" rid="ref46">46</xref>] to recognize examples of eating and drinking. Opportunity contains kitchen-centric activities where the participant goes through a gauntlet of activities including opening and closing cabinets, opening and closing a refrigerator door, making a sandwich, and making coffee. This dataset has low-level annotations focused on the specific motions, for example, “open drawer” and high-level annotations focused on the activity being performed, such as “sandwich time.” Some studies only focus on recognizing the 17 gestures in an activity-agnostic fashion, but others use high-level annotations to recognize actions such as taking a bite of a sandwich or taking a sip of coffee [<xref ref-type="bibr" rid="ref24">24</xref>,<xref ref-type="bibr" rid="ref58">58</xref>,<xref ref-type="bibr" rid="ref74">74</xref>,<xref ref-type="bibr" rid="ref83">83</xref>].</p>
      </sec>
      <sec>
        <title>Studies With Older Adults</title>
        <p>We highlight studies that include older adults in the main research (<xref ref-type="table" rid="table5">Table 5</xref>). Only 5 of these studies have the design and evaluation of activity recognition systems as their main contribution. The other 3 studies are highly connected to ADL recognition using wearable sensors, meriting their inclusion.</p>
        <p>Alam et al [<xref ref-type="bibr" rid="ref36">36</xref>] represents the concern that systems trained on younger populations will not generalize to older populations when deployed. Their work focuses on mitigating biases in their developed ADL recognition system to be robust to differences in physical ability. The main evaluation is a gesture recognition system to distinguish between 8 hand gestures and a walking recognition system that is robust to the user having a mobility aid such as a walker.</p>
        <table-wrap position="float" id="table5">
          <label>Table 5</label>
          <caption>
            <p>Studies that include older adults.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="30"/>
            <col width="360"/>
            <col width="0"/>
            <col width="250"/>
            <col width="0"/>
            <col width="360"/>
            <thead>
              <tr valign="top">
                <td colspan="3">Study type and references</td>
                <td colspan="2">Year</td>
                <td>Focus</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td colspan="6">Activity recognition</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Wellnitz et al [<xref ref-type="bibr" rid="ref85">85</xref>]</td>
                <td colspan="2">2020</td>
                <td colspan="2">Drinking recognition</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Alam et al [<xref ref-type="bibr" rid="ref36">36</xref>]</td>
                <td colspan="2">2021</td>
                <td colspan="2">Bias mitigation</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Cao et al [<xref ref-type="bibr" rid="ref56">56</xref>]</td>
                <td colspan="2">2022</td>
                <td colspan="2">Handwashing recognition</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Cook et al [<xref ref-type="bibr" rid="ref57">57</xref>]</td>
                <td colspan="2">2022</td>
                <td colspan="2">Brain health intervention</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Alevizaki et al [<xref ref-type="bibr" rid="ref61">61</xref>]</td>
                <td colspan="2">2023</td>
                <td colspan="2">System design</td>
              </tr>
              <tr valign="top">
                <td colspan="6">Human-computer interaction</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Kim et al [<xref ref-type="bibr" rid="ref95">95</xref>]</td>
                <td colspan="2">2022</td>
                <td colspan="2">In-situ data annotation system</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Caldeira et al [<xref ref-type="bibr" rid="ref96">96</xref>]</td>
                <td colspan="2">2023</td>
                <td colspan="2">User experience in smart home</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Cherian et al [<xref ref-type="bibr" rid="ref97">97</xref>]</td>
                <td colspan="2">2024</td>
                <td colspan="2">Acceptability of ADL monitoring system</td>
              </tr>
            </tbody>
          </table>
        </table-wrap>
        <p>Cao et al [<xref ref-type="bibr" rid="ref56">56</xref>] focuses on recognizing the activity of washing hands in older adults with dementia in a user-independent fashion. Their system identifies specific steps in the handwashing process to identify if the patient needs assistance in properly washing their hands. Their evaluation included 8 older adults with cognitive impairment as tested by the Montreal Cognitive Assessment (MoCA) [<xref ref-type="bibr" rid="ref98">98</xref>].</p>
        <p>Cook et al [<xref ref-type="bibr" rid="ref57">57</xref>] leverages an ADL recognition system to label the participants’ behaviors in free-living conditions. The goal of this work was to distinguish between brain health intervention and nonintervention participants. The behavior predictions were used as input to the brain health intervention versus nonintervention classifier.</p>
        <p>Kim et al [<xref ref-type="bibr" rid="ref95">95</xref>] created a speech-based smartwatch application to gather in-situ annotations of daily activities. The design of the system focused on the needs and comfort of older adults to minimize the burden associated with data annotation. They conducted a user study over the course of 7 days with 13 older adults to evaluate the experience of using such a system. An envisioned goal for this system is facilitating personalized activity recognition.</p>
        <p>Caldeira et al [<xref ref-type="bibr" rid="ref96">96</xref>] and Cherian et al [<xref ref-type="bibr" rid="ref97">97</xref>] include perspectives from older adults regarding the usage of monitoring technology for ADL performance. Calderia et al [<xref ref-type="bibr" rid="ref96">96</xref>] interviewed participants after living in a smart home and wearing a smartwatch for 2.5 years. They found that participants wanted to be included and to engage with their data, especially with respect to determining if they were living an active lifestyle. Cherian et al [<xref ref-type="bibr" rid="ref97">97</xref>] interviewed participants who lived in the assisted living section of a continuing care retirement community before and after wearing smartwatches for 1 week to simulate an ADL monitoring system. Participants acknowledged the potential utility of such a system and voiced a desire to maintain their independence.</p>
        <p>Wellnitz et al [<xref ref-type="bibr" rid="ref85">85</xref>] and Alevizaki et al [<xref ref-type="bibr" rid="ref61">61</xref>] discuss that their systems have utility for ADL tracking for older adults and include at least one older adult in their data collection and system evaluation. However, the majority of their participants are younger adults, and including older adults was not an explicit goal.</p>
      </sec>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <sec>
        <title>Principal Results</title>
        <p>BADLs are not equally covered in the ADL recognition literature. Few studies attempt to recognize bathing, dressing, and toileting. These activities have attributes that make them more difficult to detect as stated in the Results section. Bathing and toileting relied on sound cues, and dressing relied on location context. By contrast, the other BADLs were recognizable using motion sensors, which are ubiquitous in commercially available wearable devices. The special facility requirements and concerns for subject privacy of bathing, toileting, and dressing may have caused them to be deprioritized in the ADL recognition literature because systems that recognize many activities (ie, multiclass recognition) are a current focus. However, recognizing bathing, dressing, and toileting has value in health monitoring applications as they give medical practitioners insight into their patients’ health, for example, whether the patient is maintaining regular bathroom habits. Robustly detecting these activities is an open problem for ADL recognition researchers to address as the field progresses.</p>
        <p>All the papers collected in this scoping review discuss older adults at some point in their work by the nature of our query. However, our results show that only 14% (8/58) of studies in this scoping review include older adults in the design and evaluation of their systems. The main reason for this disconnect is that many publicly available datasets for ADL recognition include only younger adults [<xref ref-type="bibr" rid="ref30">30</xref>-<xref ref-type="bibr" rid="ref33">33</xref>,<xref ref-type="bibr" rid="ref46">46</xref>]. Many techniques in artificial intelligence and machine learning require annotated datasets, and researchers are strongly encouraged to use existing datasets to benchmark their contributions and increase the reproducibility of their work. Including older adults requires conducting a custom user study and annotating the data which is time-consuming and costly. Evaluating the system with the target audience is not a necessary condition for the completion of studies that focus on developing new systems or techniques, for example, exploring self-supervised learning. This type of research consequently remains relatively unexplored, creating a knowledge gap in how well systems trained on younger populations generalize to older populations.</p>
      </sec>
      <sec>
        <title>Comparison to Prior Work</title>
        <p>Activity recognition research is united by common goals. Techniques and approaches vary greatly across works, making surveys valuable summaries of the myriad of explored solutions. The most related scoping reviews to this work are the Camp et al survey of tools for ADL tracking for community-dwelling older adults [<xref ref-type="bibr" rid="ref37">37</xref>] and Zhang et al survey of recognition of bathroom activities with wearable devices [<xref ref-type="bibr" rid="ref41">41</xref>]. Camp et al did not focus on the sensing medium (ie, wearable versus ambient) but instead centered on commercially available devices. The study scope of Zhang et al was limited to only bathroom activities which are understudied as supported by this work. The current contribution is distinct by focusing on summarizing the state-of-the-art research approaches for all BADLs.</p>
      </sec>
      <sec>
        <title>Limitations and Future Directions</title>
        <p>One of the goals of this work was to summarize the current practices in the design of ADL recognition systems for BADLs. A limitation of the search strategy used in this review is the inability to establish a performance benchmark for future research. We summarize the common techniques and considerations the studies contribute, but the performance evaluations of their solutions were not captured due to the heterogeneity of the data.</p>
        <p>Another limitation is that our approach does not provide concrete insights into how to incorporate wearable ADL tracking systems into aging-in-place solutions. The studies in this survey predominantly focus on proving system feasibility, not usability. Understanding what older adults, their caregivers, and their loved ones desire in an ADL tracking system for supporting aging in place is an avenue for future research.</p>
        <p>Our work focused on ADL tracking systems for BADLs to support aging in place. Future surveys should consider systems for recognizing instrumental ADLs or other solutions for supporting aging in place. Potential avenues include intelligent systems for home automation and social robotics for assistance in performing ADLs.</p>
        <p>Most systems in activity recognition are evaluated in terms of correctly identifying the activity performed. However, this problem definition ignores the additional information about how the activity was performed (ie, whether it was performed correctly, adequately, or even abnormally). Systems that can detect and assess activity performance have more utility for caregivers and family members, providing more insight into the individual's health status. Specifically, knowing the quality of the activity performance can be used to detect declines in health and signal the need for an intervention or increase in needed care. Designing ADL recognition systems that can detect degradation in ADL performance is a potential avenue for future work.</p>
      </sec>
      <sec>
        <title>Conclusions</title>
        <p>Over the coming decades, the population of older adults is expected to increase significantly, a trend that will put tremendous strain on health care systems around the world. Making it possible for people to stay in their homes longer safely (ie, aging in place) has great potential clinical implications. One solution to support aging in place is using human activity recognition systems to automatically track ADL performance, providing a safety net that can detect significant changes in ADL performance.</p>
        <p>Wearable ADL recognition has promise for enabling these aging-in-place systems, but the current literature has several gaps to be addressed before this option becomes feasible. Several basic ADLs (eg, bathing, dressing, and toileting) have little coverage and remain open problems for ADL recognition. Additionally, many works are predominantly concerned with proving system feasibility and do not assess usability or real-world deployment. For these systems to move from academic experiments to actual systems with clinical utility, ADL recognition systems must consider the design requirements of being part of remote health monitoring systems.</p>
        <p>In this survey, we reviewed human activity recognition systems designed to recognize basic ADLs using wearable sensors. Despite targeting older adults as users, many studies do not directly include this population in their research. To address this gap, ADL recognition researchers are encouraged to evaluate their systems with older adults as participants to assess how their systems would work in a real-world deployment.</p>
      </sec>
    </sec>
  </body>
  <back>
    <app-group>
      <supplementary-material id="app1">
        <label>Multimedia Appendix 1</label>
        <p>PRISMA-ScR Checklist.</p>
        <media xlink:href="jmir_v27i1e67373_app1.pdf" xlink:title="PDF File  (Adobe PDF File), 2528 KB"/>
      </supplementary-material>
    </app-group>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">ACM DL</term>
          <def>
            <p>Association of Computing Machinery Digital Library</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">ADL</term>
          <def>
            <p>activity of daily living</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">BADL</term>
          <def>
            <p>basic activity of daily living</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb4">MoCA</term>
          <def>
            <p>Montreal Cognitive Assessment</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb5">PRISMA</term>
          <def>
            <p>Preferred Reporting Items for Systematic reviews and Meta-Analyses</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb6">PRISMA-ScR</term>
          <def>
            <p>Preferred Reporting Items for Systematic reviews and Meta-Analyses extension for Scoping Reviews</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb7">WHO</term>
          <def>
            <p>World Health Organization</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>This research was funded by the National Science Foundation (grant 1952236) and by Texas A&#38;M Health.</p>
    </ack>
    <fn-group>
      <fn fn-type="con">
        <p>JAC and TAH contributed to conceptualization. SJR, JAC, and PKS handled methodology. SJR assisted with investigation. SJR and JAC contributed to writing – original draft. PKS handled writing – review &#38; editing. TAH and PKS assisted with supervision. AML handled project administration.</p>
      </fn>
      <fn fn-type="conflict">
        <p>None declared.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="book">
          <source>World Population Prospects: The 2017 Revision</source>
          <year>2017</year>
          <publisher-loc>New York City, United States</publisher-loc>
          <publisher-name>United Nations Department of Economic and Social Affairs</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="book">
          <source>World Population Ageing</source>
          <year>2015</year>
          <publisher-loc>New York City, United States</publisher-loc>
          <publisher-name>United Nations Department of Economic and Social Affairs</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Haufe</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Peek</surname>
              <given-names>STM</given-names>
            </name>
            <name name-style="western">
              <surname>Luijkx</surname>
              <given-names>KG</given-names>
            </name>
          </person-group>
          <article-title>Matching gerontechnologies to independent-living seniors' individual needs: development of the GTM tool</article-title>
          <source>BMC Health Serv Res</source>
          <year>2019</year>
          <volume>19</volume>
          <issue>1</issue>
          <fpage>26</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://bmchealthservres.biomedcentral.com/articles/10.1186/s12913-018-3848-5"/>
          </comment>
          <pub-id pub-id-type="doi">10.1186/s12913-018-3848-5</pub-id>
          <pub-id pub-id-type="medline">30634971</pub-id>
          <pub-id pub-id-type="pii">10.1186/s12913-018-3848-5</pub-id>
          <pub-id pub-id-type="pmcid">PMC6329159</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Costenoble</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Knoop</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Vermeiren</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Vella</surname>
              <given-names>R A</given-names>
            </name>
            <name name-style="western">
              <surname>Debain</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Rossi</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Bautmans</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Verté</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Gorus</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>De Vriendt</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>A comprehensive overview of activities of daily living in existing frailty instruments: a systematic literature search</article-title>
          <source>Gerontologist</source>
          <year>2021</year>
          <volume>61</volume>
          <issue>3</issue>
          <fpage>e12</fpage>
          <lpage>e22</lpage>
          <pub-id pub-id-type="doi">10.1093/geront/gnz147</pub-id>
          <pub-id pub-id-type="medline">31872238</pub-id>
          <pub-id pub-id-type="pii">5679755</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Katz</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Assessing self-maintenance: activities of daily living, mobility, and instrumental activities of daily living</article-title>
          <source>J Am Geriatr Soc</source>
          <year>1983</year>
          <volume>31</volume>
          <issue>12</issue>
          <fpage>721</fpage>
          <lpage>727</lpage>
          <pub-id pub-id-type="doi">10.1111/j.1532-5415.1983.tb03391.x</pub-id>
          <pub-id pub-id-type="medline">6418786</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Edemekong</surname>
              <given-names>PF</given-names>
            </name>
            <name name-style="western">
              <surname>Bomgaars</surname>
              <given-names>DL</given-names>
            </name>
            <name name-style="western">
              <surname>Sukumaran</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Schoo</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <source>Activities of Daily Living</source>
          <year>2023</year>
          <publisher-loc>Treasure Island FL</publisher-loc>
          <publisher-name>StatPearls. StatPearls Publishing</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Amini</surname>
              <given-names>DA</given-names>
            </name>
            <name name-style="western">
              <surname>Kannenberg</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Bodison</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Chang</surname>
              <given-names>PFJ</given-names>
            </name>
          </person-group>
          <article-title>Occupational therapy practice framework: domain and process (3rd edition)</article-title>
          <source>Am J Occup Ther</source>
          <year>2014</year>
          <volume>68</volume>
          <issue>Supplement_1</issue>
          <fpage>S1</fpage>
          <lpage>S48</lpage>
          <pub-id pub-id-type="doi">10.5014/ajot.2014.68s1</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Boockvar</surname>
              <given-names>KS</given-names>
            </name>
            <name name-style="western">
              <surname>Lachs</surname>
              <given-names>MS</given-names>
            </name>
          </person-group>
          <article-title>Predictive value of nonspecific symptoms for acute illness in nursing home residents</article-title>
          <source>J Am Geriatr Soc</source>
          <year>2003</year>
          <volume>51</volume>
          <issue>8</issue>
          <fpage>1111</fpage>
          <lpage>1115</lpage>
          <pub-id pub-id-type="doi">10.1046/j.1532-5415.2003.51360.x</pub-id>
          <pub-id pub-id-type="medline">12890074</pub-id>
          <pub-id pub-id-type="pii">jgs51360</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Laput</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Harrison</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>Sensing fine-grained hand activity with smartwatches</article-title>
          <year>2019</year>
          <conf-name>Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems. ACM</conf-name>
          <conf-date>May 4-9, 2019</conf-date>
          <conf-loc>Glasgow, Scotland</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3290605.3300568</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bhattacharya</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Adaimi</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Thomaz</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>Leveraging sound and wrist motion to detect activities of daily living with commodity smartwatches</article-title>
          <year>2022</year>
          <conf-name>Proc ACM Interact Mob Wearable Ubiquitous Technol</conf-name>
          <conf-date>2022 July 07</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>28</lpage>
          <pub-id pub-id-type="doi">10.1145/3534582</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Haresamudram</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Essa</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Plötz</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Assessing the state of self-supervised human activity recognition using wearables</article-title>
          <year>2022</year>
          <conf-name>Proc ACM Interact Mob Wearable Ubiquitous Technol</conf-name>
          <conf-date>2022 September 02</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>47</lpage>
          <pub-id pub-id-type="doi">10.1145/3550299</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bao</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Intille</surname>
              <given-names>SS</given-names>
            </name>
          </person-group>
          <source>Activity recognition from user-annotated acceleration data</source>
          <year>2004</year>
          <publisher-loc>Berlin, Heidelberg</publisher-loc>
          <publisher-name>Pervasive Computing. Springer</publisher-name>
          <fpage>1</fpage>
          <lpage>17</lpage>
          <pub-id pub-id-type="doi">10.1007/978-3-540-24646-6_1</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kao</surname>
              <given-names>TP</given-names>
            </name>
            <name name-style="western">
              <surname>Lin</surname>
              <given-names>CW</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>JS</given-names>
            </name>
          </person-group>
          <article-title>Development of a portable activity detector for daily activity recognition</article-title>
          <year>2009</year>
          <conf-name>IEEE International Symposium on Industrial Electronics</conf-name>
          <conf-date>July 5-8, 2009</conf-date>
          <conf-loc>Seoul, Republic of Korea</conf-loc>
          <fpage>115</fpage>
          <lpage>120</lpage>
          <pub-id pub-id-type="doi">10.1109/isie.2009.5222001</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Shoaib</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Bosch</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Incel</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Scholten</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Havinga</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Complex human activity recognition using smartphone and wrist-worn motion sensors</article-title>
          <source>Sensors (Basel)</source>
          <year>2016</year>
          <volume>16</volume>
          <issue>4</issue>
          <fpage>426</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s16040426"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s16040426</pub-id>
          <pub-id pub-id-type="medline">27023543</pub-id>
          <pub-id pub-id-type="pii">s16040426</pub-id>
          <pub-id pub-id-type="pmcid">PMC4850940</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sun</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Xia</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>CJ</given-names>
            </name>
          </person-group>
          <article-title>Multimodal daily-life logging in free-living environment using non-visual egocentric sensors on a smartphone</article-title>
          <year>2024</year>
          <conf-name>Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies</conf-name>
          <conf-date>March 06, 2024</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>32</lpage>
          <pub-id pub-id-type="doi">10.1145/3643553</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cherian</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Rajanna</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Goldberg</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Hammond</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Did you remember to brush?: a noninvasive wearable approach to recognizing brushing teeth for elderly care</article-title>
          <year>2017</year>
          <conf-name>PervasiveHealth '17: Proceedings of the 11th EAI International Conference on Pervasive Computing Technologies for Healthcare</conf-name>
          <conf-date>May 23-26, 2017</conf-date>
          <conf-loc>Barcelona Spain</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3154862.3154866</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Akther</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Saleheen</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Saha</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Shetty</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Kumar</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>mTeeth: identifying brushing teeth surfaces using wrist-worn inertial sensors</article-title>
          <year>2021</year>
          <conf-name>Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies</conf-name>
          <conf-date>2021 June 24</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>25</lpage>
          <pub-id pub-id-type="doi">10.1145/3463494</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kalantarian</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Alshurafa</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Sarrafzadeh</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Detection of gestures associated with medication adherence using smartwatch-based inertial sensors</article-title>
          <source>IEEE Sens J</source>
          <year>2016</year>
          <volume>16</volume>
          <issue>4</issue>
          <fpage>1054</fpage>
          <lpage>1061</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/36452935"/>
          </comment>
          <pub-id pub-id-type="doi">10.1109/jsen.2015.2497279</pub-id>
          <pub-id pub-id-type="medline">36452935</pub-id>
          <pub-id pub-id-type="pmcid">PMC9708062</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cherian</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ray</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Hammond</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>An activity recognition system for taking medicine using in-the-wild data to promote medication adherence</article-title>
          <year>2021</year>
          <conf-name>IUI '21: Proceedings of the 26th International Conference on Intelligent User Interfaces</conf-name>
          <conf-date>April 14-17, 2021</conf-date>
          <conf-loc>College Station TX USA</conf-loc>
          <fpage>26</fpage>
          <pub-id pub-id-type="doi">10.1145/3397481.3450673</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Galluzzi</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Herman</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Polgreen</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Hand hygiene duration and technique recognition using wrist-worn sensors</article-title>
          <year>2015</year>
          <conf-name>IPSN '15: Proceedings of the 14th International Conference on Information Processing in Sensor Networks</conf-name>
          <conf-date>April 13-16, 2015</conf-date>
          <conf-loc>Seattle, Washington</conf-loc>
          <fpage>106</fpage>
          <lpage>117</lpage>
          <pub-id pub-id-type="doi">10.1145/2737095.2737106</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Das</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Nishimura</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Vivek</surname>
              <given-names>RP</given-names>
            </name>
            <name name-style="western">
              <surname>Takeda</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Fish</surname>
              <given-names>ST</given-names>
            </name>
            <name name-style="western">
              <surname>Plötz</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Chernova</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Explainable activity recognition for smart home systems</article-title>
          <source>ACM Trans. Interact. Intell. Syst</source>
          <year>2023</year>
          <volume>13</volume>
          <issue>2</issue>
          <fpage>1</fpage>
          <lpage>39</lpage>
          <pub-id pub-id-type="doi">10.1145/3561533</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cherian</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ray</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Taele</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Koh</surname>
              <given-names>JI</given-names>
            </name>
            <name name-style="western">
              <surname>Hammond</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Exploring the impact of the NULL class on in-the-wild human activity recognition</article-title>
          <source>Sensors (Basel)</source>
          <year>2024</year>
          <volume>24</volume>
          <issue>12</issue>
          <fpage>3898</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s24123898"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s24123898</pub-id>
          <pub-id pub-id-type="medline">38931682</pub-id>
          <pub-id pub-id-type="pii">s24123898</pub-id>
          <pub-id pub-id-type="pmcid">PMC11207638</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Abedin</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Ehsanpour</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Shi</surname>
              <given-names>Q</given-names>
            </name>
            <name name-style="western">
              <surname>Rezatofighi</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Ranasinghe</surname>
              <given-names>DC</given-names>
            </name>
          </person-group>
          <article-title>Attend and discriminate: Beyond the state-of-the-art for human activity recognition using wearable sensors</article-title>
          <year>2021</year>
          <conf-name>Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies</conf-name>
          <conf-date>March 30, 2021</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>22</lpage>
          <pub-id pub-id-type="doi">10.1145/3448083</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Rokni</surname>
              <given-names>SA</given-names>
            </name>
            <name name-style="western">
              <surname>Ghasemzadeh</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Share-n-learn: a framework for sharing activity recognition models in wearable systems with context-varying sensors</article-title>
          <source>ACM Trans. Des. Autom. Electron. Syst</source>
          <year>2019</year>
          <volume>24</volume>
          <issue>4</issue>
          <fpage>1</fpage>
          <lpage>27</lpage>
          <pub-id pub-id-type="doi">10.1145/3318044</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Saeed</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Ozcelebi</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Lukkien</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Multi-task self-supervised learning for human activity detection</article-title>
          <source>Proc. ACM Interact. Mob. Wearable Ubiquitous Technol</source>
          <year>2019</year>
          <volume>3</volume>
          <issue>2</issue>
          <fpage>1</fpage>
          <lpage>30</lpage>
          <pub-id pub-id-type="doi">10.1145/3328932</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref26">
        <label>26</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Haresamudram</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Essa</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Plötz</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Contrastive predictive coding for human activity recognition</article-title>
          <source>Proc. ACM Interact. Mob. Wearable Ubiquitous Technol</source>
          <year>2021</year>
          <volume>5</volume>
          <issue>2</issue>
          <fpage>1</fpage>
          <lpage>26</lpage>
          <pub-id pub-id-type="doi">10.1145/3463506</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref27">
        <label>27</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lu</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Pan</surname>
              <given-names>SJ</given-names>
            </name>
            <name name-style="western">
              <surname>Hu</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Qin</surname>
              <given-names>X</given-names>
            </name>
          </person-group>
          <article-title>Semantic-discriminative mixup for generalizable sensor-based cross-domain activity recognition</article-title>
          <source>Proc. ACM Interact. Mob. Wearable Ubiquitous Technol</source>
          <year>2022</year>
          <volume>6</volume>
          <issue>2</issue>
          <fpage>1</fpage>
          <lpage>19</lpage>
          <pub-id pub-id-type="doi">10.1145/3534589</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref28">
        <label>28</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Tian</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Zhou</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Guo</surname>
              <given-names>Y</given-names>
            </name>
          </person-group>
          <article-title>IF-ConvTransformer: a framework for human activity recognition using IMU fusion and ConvTransformer</article-title>
          <source>Proc. ACM Interact. Mob. Wearable Ubiquitous Technol</source>
          <year>2022</year>
          <volume>6</volume>
          <issue>2</issue>
          <fpage>1</fpage>
          <lpage>26</lpage>
          <pub-id pub-id-type="doi">10.1145/3534584</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref29">
        <label>29</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kang</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Hu</surname>
              <given-names>Q</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>Q</given-names>
            </name>
          </person-group>
          <article-title>SF-Adapter: Computational-efficient source-free domain adaptation for human activity recognition</article-title>
          <source>Proc. ACM Interact. Mob. Wearable Ubiquitous Technol</source>
          <year>2024</year>
          <volume>7</volume>
          <issue>4</issue>
          <fpage>1</fpage>
          <lpage>23</lpage>
          <pub-id pub-id-type="doi">10.1145/3631428</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref30">
        <label>30</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Chatzaki</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Pediaditis</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Vavoulas</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Tsiknakis</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <person-group person-group-type="editor">
            <name name-style="western">
              <surname>Röcker</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>O'Donoghue</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ziefle</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Helfert</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Molloy</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>Human daily activity and fall recognition using a smartphone’s acceleration sensor</article-title>
          <source>Information and Communication Technologies for Ageing Well and E-Health</source>
          <year>2017</year>
          <publisher-loc>Cham</publisher-loc>
          <publisher-name>Springer</publisher-name>
          <fpage>100</fpage>
          <lpage>118</lpage>
          <pub-id pub-id-type="doi">10.1007/978-3-319-62704-5_7</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref31">
        <label>31</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Reiss</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Stricker</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>Introducing a new benchmarked dataset for activity monitoring</article-title>
          <year>2012</year>
          <conf-name>16th International Symposium on Wearable Computers</conf-name>
          <conf-date>June 18-22, 2012</conf-date>
          <conf-loc>Newcastle, UK</conf-loc>
          <fpage>108</fpage>
          <lpage>109</lpage>
          <pub-id pub-id-type="doi">10.1109/ISWC.2012.13</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref32">
        <label>32</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Micucci</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Mobilio</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Napoletano</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>UniMiB SHAR: A dataset for human activity recognition using acceleration data from smartphones</article-title>
          <source>Appl Sci</source>
          <year>2017</year>
          <volume>7</volume>
          <issue>10</issue>
          <fpage>1101</fpage>
          <pub-id pub-id-type="doi">10.3390/app7101101</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref33">
        <label>33</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Weiss</surname>
              <given-names>GM</given-names>
            </name>
            <name name-style="western">
              <surname>Yoneda</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Hayajneh</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Smartphone and smartwatch-based biometrics using activities of daily living</article-title>
          <source>IEEE Access</source>
          <year>2019</year>
          <volume>7</volume>
          <fpage>133190</fpage>
          <lpage>133202</lpage>
          <pub-id pub-id-type="doi">10.1109/access.2019.2940729</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref34">
        <label>34</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sucerquia</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>López</surname>
              <given-names>JD</given-names>
            </name>
            <name name-style="western">
              <surname>Vargas-Bonilla</surname>
              <given-names>JF</given-names>
            </name>
          </person-group>
          <article-title>SisFall: a fall and movement dataset</article-title>
          <source>Sensors (Basel)</source>
          <year>2017</year>
          <volume>17</volume>
          <issue>1</issue>
          <fpage>198</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s17010198"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s17010198</pub-id>
          <pub-id pub-id-type="medline">28117691</pub-id>
          <pub-id pub-id-type="pii">s17010198</pub-id>
          <pub-id pub-id-type="pmcid">PMC5298771</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref35">
        <label>35</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mehrabi</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Morstatter</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Saxena</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Lerman</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Galstyan</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>A survey on bias and fairness in machine learning</article-title>
          <source>ACM Comput. Surv</source>
          <year>2021</year>
          <volume>54</volume>
          <issue>6</issue>
          <fpage>1</fpage>
          <lpage>35</lpage>
          <pub-id pub-id-type="doi">10.1145/3457607</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref36">
        <label>36</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Alam</surname>
              <given-names>MAU</given-names>
            </name>
          </person-group>
          <article-title>AI-Fairness towards activity recognition of older adults</article-title>
          <year>2021</year>
          <conf-name>MobiQuitous '20: MobiQuitous 2020 - 17th EAI International Conference on Mobile and Ubiquitous Systems: Computing, Networking and Services</conf-name>
          <conf-date>August 9, 2021</conf-date>
          <conf-loc>Darmstadt, Germany</conf-loc>
          <fpage>108</fpage>
          <lpage>117</lpage>
          <pub-id pub-id-type="doi">10.1145/3448891.3448943</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref37">
        <label>37</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Camp</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Lewis</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Hunter</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Johnston</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Zecca</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Di Nuovo</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Magistro</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>Technology used to recognize activities of daily living in community-dwelling older adults</article-title>
          <source>Int J Environ Res Public Health</source>
          <year>2020</year>
          <volume>18</volume>
          <issue>1</issue>
          <fpage>163</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=ijerph18010163"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/ijerph18010163</pub-id>
          <pub-id pub-id-type="medline">33379319</pub-id>
          <pub-id pub-id-type="pii">ijerph18010163</pub-id>
          <pub-id pub-id-type="pmcid">PMC7795436</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref38">
        <label>38</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Morita</surname>
              <given-names>PP</given-names>
            </name>
            <name name-style="western">
              <surname>Sahu</surname>
              <given-names>KS</given-names>
            </name>
            <name name-style="western">
              <surname>Oetomo</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Health monitoring using smart home technologies: scoping review</article-title>
          <source>JMIR Mhealth Uhealth</source>
          <year>2023</year>
          <volume>11</volume>
          <fpage>e37347</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://mhealth.jmir.org/2023//e37347/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/37347</pub-id>
          <pub-id pub-id-type="medline">37052984</pub-id>
          <pub-id pub-id-type="pii">v11i1e37347</pub-id>
          <pub-id pub-id-type="pmcid">PMC10141305</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref39">
        <label>39</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Facchinetti</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Petrucci</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Albanesi</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>De Marinis</surname>
              <given-names>MG</given-names>
            </name>
            <name name-style="western">
              <surname>Piredda</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Can smart home technologies help older adults manage their chronic condition? A systematic literature review</article-title>
          <source>Int J Environ Res Public Health</source>
          <year>2023</year>
          <volume>20</volume>
          <issue>2</issue>
          <fpage>1205</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="http://hdl.handle.net/2318/1887928"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/ijerph20021205</pub-id>
          <pub-id pub-id-type="medline">36673957</pub-id>
          <pub-id pub-id-type="pii">ijerph20021205</pub-id>
          <pub-id pub-id-type="pmcid">PMC9859495</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref40">
        <label>40</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Tannou</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Lihoreau</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Couture</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Giroux</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>RH</given-names>
            </name>
            <name name-style="western">
              <surname>Spalla</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Zarshenas</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Gagnon-Roy</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Aboujaoudé</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Yaddaden</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Morin</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Bier</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>Is research on 'smart living environments' based on unobtrusive technologies for older adults going in circles? Evidence from an umbrella review</article-title>
          <source>Ageing Res Rev</source>
          <year>2023</year>
          <volume>84</volume>
          <fpage>101830</fpage>
          <pub-id pub-id-type="doi">10.1016/j.arr.2022.101830</pub-id>
          <pub-id pub-id-type="medline">36565962</pub-id>
          <pub-id pub-id-type="pii">S1568-1637(22)00272-0</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref41">
        <label>41</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>D'Haeseleer</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Coelho</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Vanden Abeele</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Vanrumste</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <article-title>Recognition of bathroom activities in older adults using wearable sensors: a systematic review and recommendations</article-title>
          <source>Sensors (Basel)</source>
          <year>2021</year>
          <volume>21</volume>
          <issue>6</issue>
          <fpage>2176</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s21062176"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s21062176</pub-id>
          <pub-id pub-id-type="medline">33804626</pub-id>
          <pub-id pub-id-type="pii">s21062176</pub-id>
          <pub-id pub-id-type="pmcid">PMC8003704</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref42">
        <label>42</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Tricco</surname>
              <given-names>AC</given-names>
            </name>
            <name name-style="western">
              <surname>Lillie</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Zarin</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>O'Brien</surname>
              <given-names>KK</given-names>
            </name>
            <name name-style="western">
              <surname>Colquhoun</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Levac</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Moher</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Peters</surname>
              <given-names>MD</given-names>
            </name>
            <name name-style="western">
              <surname>Horsley</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Weeks</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Hempel</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Akl</surname>
              <given-names>EA</given-names>
            </name>
            <name name-style="western">
              <surname>Chang</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>McGowan</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Stewart</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Hartling</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Aldcroft</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Wilson</surname>
              <given-names>MG</given-names>
            </name>
            <name name-style="western">
              <surname>Garritty</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Lewin</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Godfrey</surname>
              <given-names>CM</given-names>
            </name>
            <name name-style="western">
              <surname>Macdonald</surname>
              <given-names>MT</given-names>
            </name>
            <name name-style="western">
              <surname>Langlois</surname>
              <given-names>EV</given-names>
            </name>
            <name name-style="western">
              <surname>Soares-Weiser</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Moriarty</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Clifford</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Tunçalp</surname>
              <given-names>Ö</given-names>
            </name>
            <name name-style="western">
              <surname>Straus</surname>
              <given-names>SE</given-names>
            </name>
          </person-group>
          <article-title>PRISMA extension for scoping reviews (PRISMA-ScR): checklist and explanation</article-title>
          <source>Ann Intern Med</source>
          <year>2018</year>
          <volume>169</volume>
          <issue>7</issue>
          <fpage>467</fpage>
          <lpage>473</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.acpjournals.org/doi/abs/10.7326/M18-0850?url_ver=Z39.88-2003&#38;rfr_id=ori:rid:crossref.org&#38;rfr_dat=cr_pub  0pubmed"/>
          </comment>
          <pub-id pub-id-type="doi">10.7326/M18-0850</pub-id>
          <pub-id pub-id-type="medline">30178033</pub-id>
          <pub-id pub-id-type="pii">2700389</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref43">
        <label>43</label>
        <nlm-citation citation-type="web">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ray</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Recognition of basic ADLs using wearable sensors</article-title>
          <source>OSF</source>
          <year>2024</year>
          <month>07</month>
          <day>23</day>
          <access-date>2024-07-23</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://osf.io/db6qj">https://osf.io/db6qj</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref44">
        <label>44</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Abreu</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Barandas</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Leonardo</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Gamboa</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Detailed human activity recognition based on multiple HMM</article-title>
          <year>2019</year>
          <conf-name>Proceedings of the 12th International Joint Conference on Biomedical Engineering Systems and Technologies - Volume 4: BIOSTEC</conf-name>
          <conf-date>February 22-24, 2019</conf-date>
          <conf-loc>Prague, Czech Republic</conf-loc>
          <fpage>171</fpage>
          <lpage>178</lpage>
          <pub-id pub-id-type="doi">10.5220/0007386901710178</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref45">
        <label>45</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Liang</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Thomaz</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>Audio-Based Activities of Daily Living (ADL) recognition with large-scale acoustic embeddings from online videos</article-title>
          <source>Proc. ACM Interact. Mob. Wearable Ubiquitous Technol</source>
          <year>2019</year>
          <volume>3</volume>
          <issue>1</issue>
          <fpage>1</fpage>
          <lpage>18</lpage>
          <pub-id pub-id-type="doi">10.1145/3314404</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref46">
        <label>46</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Chavarriaga</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Sagha</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Calatroni</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Digumarti</surname>
              <given-names>ST</given-names>
            </name>
            <name name-style="western">
              <surname>Tröster</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Millán</surname>
              <given-names>JDR</given-names>
            </name>
            <name name-style="western">
              <surname>Roggen</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>The opportunity challenge: a benchmark database for on-body sensor-based activity recognition</article-title>
          <source>Pattern Recognition Letters</source>
          <year>2013</year>
          <volume>34</volume>
          <issue>15</issue>
          <fpage>2033</fpage>
          <lpage>2042</lpage>
          <pub-id pub-id-type="doi">10.1016/j.patrec.2012.12.014</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref47">
        <label>47</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bedri</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Liang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Boovaraghavan</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Kaufman</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Goel</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>FitNibble: A field study to evaluate the utility and usability of automatic diet monitoring in food journaling using an eyeglasses-based wearable</article-title>
          <year>2022</year>
          <conf-name>IUI '22: Proceedings of the 27th International Conference on Intelligent User Interfaces</conf-name>
          <conf-date>March 22, 2022</conf-date>
          <conf-loc>Helsinki, Finland</conf-loc>
          <fpage>79</fpage>
          <lpage>92</lpage>
          <pub-id pub-id-type="doi">10.1145/3490099.3511154</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref48">
        <label>48</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Akther</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Saleheen</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Samiei</surname>
              <given-names>SA</given-names>
            </name>
            <name name-style="western">
              <surname>Shetty</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Ertin</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Kumar</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>mORAL: An mHealth model for inferring oral hygiene behaviors in-the-wild using wrist-worn inertial sensors</article-title>
          <source>Proc. ACM Interact. Mob. Wearable Ubiquitous Technol</source>
          <year>2019</year>
          <volume>3</volume>
          <issue>1</issue>
          <fpage>1</fpage>
          <lpage>25</lpage>
          <pub-id pub-id-type="doi">10.1145/3314388</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref49">
        <label>49</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Luo</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Feng</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Xu</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>Brush like a dentist: Accurate monitoring of toothbrushing via wrist-worn gesture sensing</article-title>
          <year>2019</year>
          <conf-name>IEEE INFOCOM 2019 - IEEE Conference on Computer Communications</conf-name>
          <conf-date>April 29 to May 2, 2019</conf-date>
          <conf-loc>Paris, France</conf-loc>
          <pub-id pub-id-type="doi">10.1109/INFOCOM.2019.8737513</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref50">
        <label>50</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hussain</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Waterworth</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Aldeer</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Sheng</surname>
              <given-names>Q</given-names>
            </name>
          </person-group>
          <article-title>Toothbrushing data and analysis of its potential use in human activity recognition applications: dataset</article-title>
          <year>2020</year>
          <conf-name>DATA '20: Proceedings of the Third Workshop on Data: Acquisition To Analysis</conf-name>
          <conf-date>November 16-20, 2020</conf-date>
          <conf-loc>Virtual Event, Japan</conf-loc>
          <fpage>31</fpage>
          <lpage>34</lpage>
          <pub-id pub-id-type="doi">10.1145/3419016.3431489</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref51">
        <label>51</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mondol</surname>
              <given-names>MAS</given-names>
            </name>
            <name name-style="western">
              <surname>Stankovic</surname>
              <given-names>JA</given-names>
            </name>
          </person-group>
          <article-title>HAWAD: Hand washing detection using wrist wearable inertial sensors</article-title>
          <year>2020</year>
          <conf-name>16th International Conference on Distributed Computing in Sensor Systems (DCOSS)</conf-name>
          <conf-date>May 25-27, 2020</conf-date>
          <conf-loc>Marina del Rey, CA, USA</conf-loc>
          <pub-id pub-id-type="doi">10.1109/DCOSS49796.2020.00016</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref52">
        <label>52</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Samyoun</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Shubha</surname>
              <given-names>SS</given-names>
            </name>
            <name name-style="western">
              <surname>Sayeed Mondol</surname>
              <given-names>MA</given-names>
            </name>
            <name name-style="western">
              <surname>Stankovic</surname>
              <given-names>JA</given-names>
            </name>
          </person-group>
          <article-title>iWash: A smartwatch handwashing quality assessment and reminder system with real-time feedback in the context of infectious disease</article-title>
          <source>Smart Health (Amst)</source>
          <year>2021</year>
          <volume>19</volume>
          <fpage>100171</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/33521225"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.smhl.2020.100171</pub-id>
          <pub-id pub-id-type="medline">33521225</pub-id>
          <pub-id pub-id-type="pii">S2352-6483(20)30063-5</pub-id>
          <pub-id pub-id-type="pmcid">PMC7833562</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref53">
        <label>53</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Santos-Gago</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Ramos-Merino</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Alvarez-Sabucedo</surname>
              <given-names>LM</given-names>
            </name>
          </person-group>
          <article-title>Identification of free and WHO-compliant handwashing moments using low cost wrist-worn wearables</article-title>
          <source>IEEE Access</source>
          <year>2021</year>
          <volume>9</volume>
          <fpage>133574</fpage>
          <lpage>133593</lpage>
          <pub-id pub-id-type="doi">10.1109/access.2021.3115434</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref54">
        <label>54</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Xia</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>Optimal sensor position: Exploring the interface between the user and sensor in activity recognition system</article-title>
          <year>2021</year>
          <conf-name>CHI EA '21: Extended Abstracts of the 2021 CHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>May 08, 2021</conf-date>
          <conf-loc>Yokohama, Japan</conf-loc>
          <fpage>1</fpage>
          <lpage>5</lpage>
          <pub-id pub-id-type="doi">10.1145/3411763.3451517</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref55">
        <label>55</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bhalla</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Goel</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Khurana</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>IMU2Doppler: Cross-Modal domain adaptation for doppler-based activity recognition using IMU data</article-title>
          <source>Proc. ACM Interact. Mob. Wearable Ubiquitous Technol</source>
          <year>2021</year>
          <volume>5</volume>
          <issue>4</issue>
          <fpage>1</fpage>
          <lpage>20</lpage>
          <pub-id pub-id-type="doi">10.1145/3494994</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref56">
        <label>56</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cao</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Yang</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>Y</given-names>
            </name>
          </person-group>
          <article-title>Leveraging wearables for assisting the elderly with dementia in handwashing</article-title>
          <source>IEEE Trans. on Mobile Comput</source>
          <year>2022</year>
          <fpage>1</fpage>
          <lpage>16</lpage>
          <pub-id pub-id-type="doi">10.1109/tmc.2022.3193615</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref57">
        <label>57</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cook</surname>
              <given-names>DJ</given-names>
            </name>
            <name name-style="western">
              <surname>Strickland</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Schmitter-Edgecombe</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Detecting smartwatch-based behavior change in response to a multi-domain brain health intervention</article-title>
          <source>ACM Trans Comput Healthc</source>
          <year>2022</year>
          <volume>3</volume>
          <issue>3</issue>
          <fpage>1</fpage>
          <lpage>18</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/35815157"/>
          </comment>
          <pub-id pub-id-type="doi">10.1145/3508020</pub-id>
          <pub-id pub-id-type="medline">35815157</pub-id>
          <pub-id pub-id-type="pii">33</pub-id>
          <pub-id pub-id-type="pmcid">PMC9268550</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref58">
        <label>58</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Liang</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Adaimi</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Marculescu</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Thomaz</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>AudioIMU: Enhancing inertial sensing-based activity recognition with acoustic models</article-title>
          <year>2022</year>
          <conf-name>ISWC '22: Proceedings of the 2022 ACM International Symposium on Wearable Computers</conf-name>
          <conf-date>December 27, 2022</conf-date>
          <conf-loc>Cambridge, United Kingdom</conf-loc>
          <fpage>44</fpage>
          <lpage>88</lpage>
          <pub-id pub-id-type="doi">10.1145/3544794.3558471</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref59">
        <label>59</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mollyn</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Ahuja</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Verma</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Harrison</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Goel</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>SAMoSA: Sensing activities with motion and subsampled audio</article-title>
          <source>Proc. ACM Interact. Mob. Wearable Ubiquitous Technol</source>
          <year>2022</year>
          <volume>6</volume>
          <issue>3</issue>
          <fpage>1</fpage>
          <lpage>19</lpage>
          <pub-id pub-id-type="doi">10.1145/3550284</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref60">
        <label>60</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Woodward</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Kanjo</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Taylor</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Hunt</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>A multi-sensor deep learning approach for complex daily living activity recognition</article-title>
          <year>2022</year>
          <conf-name>DigiBiom '22: Proceedings of the 2022 Workshop on Emerging Devices for Digital Biomarkers</conf-name>
          <conf-date>June 27, 2022</conf-date>
          <conf-loc>Oregon, Portland</conf-loc>
          <fpage>13</fpage>
          <lpage>17</lpage>
          <pub-id pub-id-type="doi">10.1145/3539494.3542753</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref61">
        <label>61</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Alevizaki</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Pham</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Trigoni</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>Invited paper: Hierarchical activity recognition with smartwatch IMU</article-title>
          <year>2023</year>
          <conf-name>ICDCN '23: Proceedings of the 24th International Conference on Distributed Computing and Networking</conf-name>
          <conf-date>January 04, 2023</conf-date>
          <conf-loc>Kharagpur, India</conf-loc>
          <fpage>48</fpage>
          <lpage>57</lpage>
          <pub-id pub-id-type="doi">10.1145/3571306.3571390</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref62">
        <label>62</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Schleter</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Avdonina</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Adhikary</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Jaisinghani</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Sen</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Poster: An automated method to detect tooth brushing activity with smartwatch sensors</article-title>
          <year>2024</year>
          <conf-name>MOBISYS '24: Proceedings of the 22nd Annual International Conference on Mobile Systems, Applications and Services</conf-name>
          <conf-date>June 04, 2024</conf-date>
          <conf-loc>Minato-ku, Tokyo, Japan</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3643832.3661417</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref63">
        <label>63</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mahmud</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Parikh</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Liang</surname>
              <given-names>Q</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Ajit</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Gunda</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Agarwal</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Guimbretiere</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>ActSonic: recognizing everyday activities from inaudible acoustic wave around the body</article-title>
          <source>Proc. ACM Interact. Mob. Wearable Ubiquitous Technol</source>
          <year>2024</year>
          <volume>8</volume>
          <issue>4</issue>
          <fpage>1</fpage>
          <lpage>32</lpage>
          <pub-id pub-id-type="doi">10.1145/3699752</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref64">
        <label>64</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Narkhede</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Gowing</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Vandenberg</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Phan</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Wong</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Chan</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Automated detection of in-home activities with ultra-wideband sensors</article-title>
          <source>Sensors (Basel)</source>
          <year>2024</year>
          <volume>24</volume>
          <issue>14</issue>
          <fpage>4706</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s24144706"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s24144706</pub-id>
          <pub-id pub-id-type="medline">39066103</pub-id>
          <pub-id pub-id-type="pii">s24144706</pub-id>
          <pub-id pub-id-type="pmcid">PMC11281174</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref65">
        <label>65</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Masum</surname>
              <given-names>AKM</given-names>
            </name>
            <name name-style="western">
              <surname>Jannat</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Bahadur</surname>
              <given-names>EH</given-names>
            </name>
            <name name-style="western">
              <surname>Alam</surname>
              <given-names>MGR</given-names>
            </name>
            <name name-style="western">
              <surname>Khan</surname>
              <given-names>SI</given-names>
            </name>
            <name name-style="western">
              <surname>Alam</surname>
              <given-names>MR</given-names>
            </name>
          </person-group>
          <article-title>Human activity recognition using smartphone sensors: a dense neural network approach</article-title>
          <year>2019</year>
          <conf-name>1st International Conference on Advances in Science, Engineering and Robotics Technology (ICASERT)</conf-name>
          <conf-date>May 05, 2019</conf-date>
          <conf-loc>Dhaka, Bangladesh</conf-loc>
          <pub-id pub-id-type="doi">10.1109/icasert.2019.8934657</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref66">
        <label>66</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Tang</surname>
              <given-names>CI</given-names>
            </name>
            <name name-style="western">
              <surname>Perez-Pozuelo</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Spathis</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Brage</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Wareham</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Mascolo</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>SelfHAR: Improving human activity recognition through self-training with unlabeled data</article-title>
          <year>2021</year>
          <conf-name>Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies</conf-name>
          <conf-date>March 30, 2021</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>30</lpage>
          <pub-id pub-id-type="doi">10.1145/3448112</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref67">
        <label>67</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Augustinov</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Nisar</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Tabatabaei</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Grzegorzek</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Sohrabi</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Fudickar</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Transformer-based recognition of activities of daily living from wearable sensor data</article-title>
          <year>2023</year>
          <conf-name>iWOAR '22: Proceedings of the 7th International Workshop on Sensor-based Activity Recognition and Artificial Intelligence</conf-name>
          <conf-date>January 5, 2023</conf-date>
          <conf-loc>Rostock Germany</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3558884.3558895</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref68">
        <label>68</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Xi</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Wei</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Optimization-free test-time adaptation for cross-person activity recognition</article-title>
          <year>2024</year>
          <conf-name>Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies</conf-name>
          <conf-date>January 12, 2024</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>27</lpage>
          <pub-id pub-id-type="doi">10.1145/3631450</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref69">
        <label>69</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Gomes</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Mendes-Moreira</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Sousa</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Silva</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Eating and drinking recognition in free-living conditions for triggering smart reminders</article-title>
          <source>Sensors (Basel)</source>
          <year>2019</year>
          <volume>19</volume>
          <issue>12</issue>
          <fpage>2803</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s19122803"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s19122803</pub-id>
          <pub-id pub-id-type="medline">31234499</pub-id>
          <pub-id pub-id-type="pii">s19122803</pub-id>
          <pub-id pub-id-type="pmcid">PMC6631238</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref70">
        <label>70</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bedri</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Khurana</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Bhuwalka</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Goel</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>FitByte: Automatic diet monitoring in unconstrained situations using multimodal sensing on eyeglasses</article-title>
          <year>2020</year>
          <conf-name>CHI '20: Proceedings of the 2020 CHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 23, 2020</conf-date>
          <conf-loc>HI, Honolulu, USA</conf-loc>
          <fpage>1</fpage>
          <lpage>12</lpage>
          <pub-id pub-id-type="doi">10.1145/3313831.3376869</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref71">
        <label>71</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Zhao</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Nguyen</surname>
              <given-names>DT</given-names>
            </name>
            <name name-style="western">
              <surname>Xu</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Sen</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Hester</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Alshurafa</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>NeckSense: a multi-sensor necklace for detecting eating activities in free-living conditions</article-title>
          <year>2020</year>
          <conf-name>Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies</conf-name>
          <conf-date>June 15, 2020</conf-date>
          <conf-loc>United States</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3397313</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref72">
        <label>72</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Akbari</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Grimsley</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Jafari</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Data-driven context detection leveraging passively sensed nearables for recognizing complex activities of daily living</article-title>
          <source>ACM Trans. Comput. Healthcare</source>
          <year>2021</year>
          <volume>2</volume>
          <issue>2</issue>
          <fpage>1</fpage>
          <lpage>22</lpage>
          <pub-id pub-id-type="doi">10.1145/3428664</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref73">
        <label>73</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kyritsis</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Diou</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Delopoulos</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>A data driven end-to-end approach for in-the-wild monitoring of eating behavior using smartwatches</article-title>
          <source>IEEE J. Biomed. Health Inform</source>
          <year>2021</year>
          <volume>25</volume>
          <issue>1</issue>
          <fpage>22</fpage>
          <lpage>34</lpage>
          <pub-id pub-id-type="doi">10.1109/jbhi.2020.2984907</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref74">
        <label>74</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lago</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Matsuki</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Adachi</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Inoue</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Using additional training sensors to improve single-sensor complex activity recognition</article-title>
          <year>2021</year>
          <conf-name>ISWC '21: Proceedings of the 2021 ACM International Symposium on Wearable Computers</conf-name>
          <conf-date>September 21, 2021</conf-date>
          <conf-loc>Virtual USA</conf-loc>
          <fpage>18</fpage>
          <lpage>22</lpage>
          <pub-id pub-id-type="doi">10.1145/3460421.3480421</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref75">
        <label>75</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Morshed</surname>
              <given-names>MB</given-names>
            </name>
            <name name-style="western">
              <surname>Bin</surname>
              <given-names>MM</given-names>
            </name>
            <name name-style="western">
              <surname>Haresamudram</surname>
              <given-names>HK</given-names>
            </name>
            <name name-style="western">
              <surname>Bandaru</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Abowd</surname>
              <given-names>GD</given-names>
            </name>
            <name name-style="western">
              <surname>Ploetz</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>A personalized approach for developing a snacking detection system using earbuds in a semi-naturalistic setting</article-title>
          <year>2022</year>
          <conf-name>ISWC '22: Proceedings of the 2022 ACM International Symposium on Wearable Computers</conf-name>
          <conf-date>December 27, 2022</conf-date>
          <conf-loc>Cambridge United Kingdom</conf-loc>
          <fpage>11</fpage>
          <lpage>16</lpage>
          <pub-id pub-id-type="doi">10.1145/3544794.3558469</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref76">
        <label>76</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Saphala</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Amft</surname>
              <given-names>O</given-names>
            </name>
          </person-group>
          <article-title>Proximity-based eating event detection in smart eyeglasses with expert and data models</article-title>
          <year>2022</year>
          <conf-name>ISWC '22: Proceedings of the 2022 ACM International Symposium on Wearable Computers</conf-name>
          <conf-date>September 11-15, 2022</conf-date>
          <conf-loc>Cambridge, United Kingdom</conf-loc>
          <fpage>59</fpage>
          <lpage>63</lpage>
          <pub-id pub-id-type="doi">10.1145/3544794.3558476</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref77">
        <label>77</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Staab</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Bröning</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Luderschmidt</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Martin</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <article-title>Performance comparison of motion-related sensor technology and acoustic sensor technology in the field of human health monitoring</article-title>
          <year>2022</year>
          <conf-name>GoodIT '22: Proceedings of the 2022 ACM Conference on Information Technology for Social Good</conf-name>
          <conf-date>September 7-9, 2022</conf-date>
          <conf-loc>Limassol Cyprus</conf-loc>
          <fpage>198</fpage>
          <lpage>204</lpage>
          <pub-id pub-id-type="doi">10.1145/3524458.3547220</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref78">
        <label>78</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Allman-Farinelli</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Yang</surname>
              <given-names>JA</given-names>
            </name>
            <name name-style="western">
              <surname>Taylor</surname>
              <given-names>JC</given-names>
            </name>
            <name name-style="western">
              <surname>Gemming</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Hekler</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Rangan</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Enhancing nutrition care through real-time, sensor-based capture of eating occasions: a scoping review</article-title>
          <source>Front Nutr</source>
          <year>2022</year>
          <volume>9</volume>
          <fpage>852984</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/35586732"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/fnut.2022.852984</pub-id>
          <pub-id pub-id-type="medline">35586732</pub-id>
          <pub-id pub-id-type="pmcid">PMC9108538</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref79">
        <label>79</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Gade</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Cao</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Kim</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Yan</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>EatingTrak: Detecting fine-grained eating moments in the wild using a wrist-mounted IMU</article-title>
          <year>2022</year>
          <conf-name>Proceedings of the ACM on Human-Computer Interaction</conf-name>
          <conf-date>September 20, 2022</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>22</lpage>
          <pub-id pub-id-type="doi">10.1145/3546749</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref80">
        <label>80</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Assi</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Meegahapola</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Droz</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Kun</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Götzen</surname>
              <given-names>AD</given-names>
            </name>
            <name name-style="western">
              <surname>Bidoglia</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Stares</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Gaskell</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Chagnaa</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Ganbold</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Complex daily activities, country-level diversity,smartphone sensing: a study in denmark, italy, mongolia, paraguay, and UK</article-title>
          <year>2023</year>
          <conf-name>CHI '23: Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 19, 2023</conf-date>
          <conf-loc>Hamburg, Germany</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3544548.3581190</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref81">
        <label>81</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hiraguchi</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Perone</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Toet</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Camps</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Brouwer</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Technology to automatically record eating behavior in real life: a systematic review</article-title>
          <source>Sensors (Basel)</source>
          <year>2023</year>
          <volume>23</volume>
          <issue>18</issue>
          <fpage>7757</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s23187757"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s23187757</pub-id>
          <pub-id pub-id-type="medline">37765812</pub-id>
          <pub-id pub-id-type="pii">s23187757</pub-id>
          <pub-id pub-id-type="pmcid">PMC10534458</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref82">
        <label>82</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pedram</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Fernandes</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Romano</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Wei</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Sen</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Hester</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Alshurafa</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>Experience: barriers and opportunities of wearables for eating research</article-title>
          <year>2023</year>
          <conf-name>CHI EA '23: Extended Abstracts of the 2023 CHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 19, 2023</conf-date>
          <conf-loc>Hamburg, Germany</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3544549.3573841</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref83">
        <label>83</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kianpisheh</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Mariakakis</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Truong</surname>
              <given-names>KN</given-names>
            </name>
          </person-group>
          <article-title>exHAR: An interface for helping non-experts develop and debug knowledge-based human activity recognition systems</article-title>
          <year>2024</year>
          <conf-name>Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies</conf-name>
          <conf-date>March 06, 2024</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>30</lpage>
          <pub-id pub-id-type="doi">10.1145/3643500</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref84">
        <label>84</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Gómez-Carmona</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Casado-Mansilla</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>López-de-Ipiña</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>García-Zubia</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Simplicity is best: Addressing the computational cost of machine learning classifiers in constrained edge devices</article-title>
          <year>2019</year>
          <conf-name>IoT '19: Proceedings of the 9th International Conference on the Internet of Things</conf-name>
          <conf-date>October 22, 2019</conf-date>
          <conf-loc>United States</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3365871.3365889</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref85">
        <label>85</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wellnitz</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Wolff</surname>
              <given-names>JP</given-names>
            </name>
            <name name-style="western">
              <surname>Haubelt</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Kirste</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Fluid intake recognition using inertial sensors</article-title>
          <year>2020</year>
          <conf-name>Proceedings of the 6th International Workshop on Sensor-Based Activity Recognition and Interaction</conf-name>
          <conf-date>January  06, 2020</conf-date>
          <conf-loc>United States</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3361684.3361688</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref86">
        <label>86</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Kumar</surname>
              <given-names>TS</given-names>
            </name>
            <name name-style="western">
              <surname>De</surname>
              <given-names>RW</given-names>
            </name>
            <name name-style="western">
              <surname>Camps</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Hallez</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Vanrumste</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <article-title>Drinking gesture detection using wrist-worn IMU sensors with multi-stage temporal convolutional network in free-living environments</article-title>
          <year>2022</year>
          <conf-name>Inth Annual International Conference of the IEEE Engineering in Medicine &#38; Biology Society (EMBC)</conf-name>
          <conf-date>July 15, 2022</conf-date>
          <conf-loc>Glasgow, Scotland, United Kingdom</conf-loc>
          <pub-id pub-id-type="doi">10.1109/embc48229.2022.9871817</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref87">
        <label>87</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hsieh</surname>
              <given-names>CY</given-names>
            </name>
            <name name-style="western">
              <surname>Huang</surname>
              <given-names>HY</given-names>
            </name>
            <name name-style="western">
              <surname>Chan</surname>
              <given-names>CT</given-names>
            </name>
            <name name-style="western">
              <surname>Chiu</surname>
              <given-names>LT</given-names>
            </name>
          </person-group>
          <article-title>An analysis of fluid intake assessment approaches for fluid intake monitoring system</article-title>
          <source>Biosensors (Basel)</source>
          <year>2023</year>
          <volume>14</volume>
          <issue>1</issue>
          <fpage>14</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=bios14010014"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/bios14010014</pub-id>
          <pub-id pub-id-type="medline">38248391</pub-id>
          <pub-id pub-id-type="pii">bios14010014</pub-id>
          <pub-id pub-id-type="pmcid">PMC10813732</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref88">
        <label>88</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pärkkä</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ermes</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Korpipää</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Mäntyjärvi</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Peltola</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Korhonen</surname>
              <given-names>I</given-names>
            </name>
          </person-group>
          <article-title>Activity classification using realistic data from wearable sensors</article-title>
          <source>IEEE Trans Inf Technol Biomed</source>
          <year>2006</year>
          <volume>10</volume>
          <issue>1</issue>
          <fpage>119</fpage>
          <lpage>128</lpage>
          <pub-id pub-id-type="doi">10.1109/titb.2005.856863</pub-id>
          <pub-id pub-id-type="medline">16445257</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref89">
        <label>89</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zhu</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Sheng</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>Human daily activity recognition in robot-assisted living using multi-sensor fusion</article-title>
          <year>2009</year>
          <conf-name>In IEEE International Conference on Robotics and Automation. IEEE</conf-name>
          <conf-date>March 11-13, 2009</conf-date>
          <conf-loc>La Jolla, CA, USA</conf-loc>
          <pub-id pub-id-type="doi">10.1109/robot.2009.5152756</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref90">
        <label>90</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ronao</surname>
              <given-names>CA</given-names>
            </name>
            <name name-style="western">
              <surname>Cho</surname>
              <given-names>SB</given-names>
            </name>
          </person-group>
          <article-title>Human activity recognition with smartphone sensors using deep learning neural networks</article-title>
          <source>Expert Systems with Applications</source>
          <year>2016</year>
          <volume>59</volume>
          <fpage>235</fpage>
          <lpage>244</lpage>
          <pub-id pub-id-type="doi">10.1016/j.eswa.2016.04.032</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref91">
        <label>91</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Weiss</surname>
              <given-names>GM</given-names>
            </name>
            <name name-style="western">
              <surname>Timko</surname>
              <given-names>JL</given-names>
            </name>
            <name name-style="western">
              <surname>Gallagher</surname>
              <given-names>CM</given-names>
            </name>
            <name name-style="western">
              <surname>Yoneda</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Schreiber</surname>
              <given-names>AJ</given-names>
            </name>
          </person-group>
          <article-title>Smartwatch-based activity recognition: a machine learning approach</article-title>
          <year>2016</year>
          <conf-name>IEEE-EMBS International Conference on Biomedical and Health Informatics (BHI)</conf-name>
          <conf-date>February 27, 2016</conf-date>
          <conf-loc>Las Vegas, NV, United States</conf-loc>
          <fpage>426</fpage>
          <lpage>429</lpage>
          <pub-id pub-id-type="doi">10.1109/bhi.2016.7455925</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref92">
        <label>92</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Qin</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Meng</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Qin</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Choo</surname>
              <given-names>KKR</given-names>
            </name>
          </person-group>
          <article-title>Imaging and fusing time series for wearable sensor-based human activity recognition</article-title>
          <source>Information Fusion</source>
          <year>2020</year>
          <volume>53</volume>
          <fpage>80</fpage>
          <lpage>87</lpage>
          <pub-id pub-id-type="doi">10.1016/j.inffus.2019.06.014</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref93">
        <label>93</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Huang</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Wu</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Song</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Deep ensemble learning for human activity recognition using wearable sensors via filter activation</article-title>
          <source>ACM Trans. Embed. Comput. Syst</source>
          <year>2022</year>
          <volume>22</volume>
          <issue>1</issue>
          <fpage>1</fpage>
          <lpage>23</lpage>
          <pub-id pub-id-type="doi">10.1145/3551486</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref94">
        <label>94</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ghasemzadeh</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Amini</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Saeedi</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Sarrafzadeh</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Power-aware computing in wearable sensor networks: an optimal feature selection</article-title>
          <source>IEEE Trans. on Mobile Comput</source>
          <year>2015</year>
          <volume>14</volume>
          <issue>4</issue>
          <fpage>800</fpage>
          <lpage>812</lpage>
          <pub-id pub-id-type="doi">10.1109/tmc.2014.2331969</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref95">
        <label>95</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kim</surname>
              <given-names>YH</given-names>
            </name>
            <name name-style="western">
              <surname>Chou</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Danilovich</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Lazar</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Conroy</surname>
              <given-names>DE</given-names>
            </name>
            <name name-style="western">
              <surname>Kacorri</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Choe</surname>
              <given-names>EK</given-names>
            </name>
          </person-group>
          <article-title>MyMove: facilitating older adults to collect in-situ activity labels on a smartwatch with speech</article-title>
          <year>2022</year>
          <conf-name>Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 29, 2022</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>21</lpage>
          <pub-id pub-id-type="doi">10.1145/3491102.3517457</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref96">
        <label>96</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Caldeira</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Nurain</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Heintzman</surname>
              <given-names>AA</given-names>
            </name>
            <name name-style="western">
              <surname>Molchan</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Caine</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Demiris</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Siek</surname>
              <given-names>KA</given-names>
            </name>
            <name name-style="western">
              <surname>Reeder</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Connelly</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>How do i compare to the other people?": Older Adults' perspectives on personal smart home data for self-management"</article-title>
          <year>2023</year>
          <conf-name>Proc ACM Hum-Comput Interact. 2023;7(CSCW2)</conf-name>
          <conf-date>October 04, 2023</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>32</lpage>
          <pub-id pub-id-type="doi">10.1145/3610029</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref97">
        <label>97</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cherian</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ray</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Mernar</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Taele</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Mach</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Koh</surname>
              <given-names>JI</given-names>
            </name>
            <name name-style="western">
              <surname>Ye</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Hammond</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>A step toward better care: understanding what caregivers and residents in assisted living facilities value in health monitoring systems</article-title>
          <year>2024</year>
          <conf-name>Proc ACM Hum-Comput Interact. 2024;8(CSCW1)</conf-name>
          <conf-date>Apr 26, 2024</conf-date>
          <conf-loc>United States</conf-loc>
          <fpage>1</fpage>
          <lpage>29</lpage>
          <pub-id pub-id-type="doi">10.1145/3637290</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref98">
        <label>98</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Nasreddine</surname>
              <given-names>ZS</given-names>
            </name>
            <name name-style="western">
              <surname>Phillips</surname>
              <given-names>NA</given-names>
            </name>
            <name name-style="western">
              <surname>Bédirian</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Charbonneau</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Whitehead</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Collin</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Cummings</surname>
              <given-names>JL</given-names>
            </name>
            <name name-style="western">
              <surname>Chertkow</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>The montreal cognitive assessment, MoCA: a brief screening tool for mild cognitive impairment</article-title>
          <source>J Am Geriatr Soc</source>
          <year>2005</year>
          <volume>53</volume>
          <issue>4</issue>
          <fpage>695</fpage>
          <lpage>699</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1111/j.1532-5415.2005.53221.x"/>
          </comment>
          <pub-id pub-id-type="doi">10.1111/j.1532-5415.2005.53221.x</pub-id>
          <pub-id pub-id-type="medline">15817019</pub-id>
          <pub-id pub-id-type="pii">JGS53221</pub-id>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
