<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="review-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JMIR</journal-id>
      <journal-id journal-id-type="nlm-ta">J Med Internet Res</journal-id>
      <journal-title>Journal of Medical Internet Research</journal-title>
      <issn pub-type="epub">1438-8871</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v26i1e54557</article-id>
      <article-id pub-id-type="pmid">39608003</article-id>
      <article-id pub-id-type="doi">10.2196/54557</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Review</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Review</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Artificial Intelligence Applications to Measure Food and Nutrient Intakes: Scoping Review</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Mavragani</surname>
            <given-names>Amaryllis</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Sure</surname>
            <given-names>Tharun Anand Reddy</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Kommireddy</surname>
            <given-names>Shreeven</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Elbattah</surname>
            <given-names>Mahmoud</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Davies</surname>
            <given-names>Tazman</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Baranowski</surname>
            <given-names>Tom</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author" corresp="yes" equal-contrib="yes">
          <name name-style="western">
            <surname>Zheng</surname>
            <given-names>Jiakun</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <address>
            <institution>School of Economics and Management</institution>
            <institution>Shanghai University of Sport</institution>
            <addr-line>399 Changhai Road, Yangpu District</addr-line>
            <addr-line>Shanghai, 200438</addr-line>
            <country>China</country>
            <phone>86 13817507993</phone>
            <email>zhengjiakun07@163.com</email>
          </address>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-3054-1823</ext-link>
        </contrib>
        <contrib id="contrib2" contrib-type="author" equal-contrib="yes">
          <name name-style="western">
            <surname>Wang</surname>
            <given-names>Junjie</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-2270-7245</ext-link>
        </contrib>
        <contrib id="contrib3" contrib-type="author">
          <name name-style="western">
            <surname>Shen</surname>
            <given-names>Jing</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-9751-5454</ext-link>
        </contrib>
        <contrib id="contrib4" contrib-type="author">
          <name name-style="western">
            <surname>An</surname>
            <given-names>Ruopeng</given-names>
          </name>
          <degrees>MPP, PhD</degrees>
          <xref rid="aff4" ref-type="aff">4</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-9632-0209</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>School of Economics and Management</institution>
        <institution>Shanghai University of Sport</institution>
        <addr-line>Shanghai</addr-line>
        <country>China</country>
      </aff>
      <aff id="aff2">
        <label>2</label>
        <institution>School of Kinesiology and Health Promotion</institution>
        <institution>Dalian University of Technology</institution>
        <addr-line>Dalian</addr-line>
        <country>China</country>
      </aff>
      <aff id="aff3">
        <label>3</label>
        <institution>Department of Physical Education</institution>
        <institution>China University of Geosciences (Beijing)</institution>
        <addr-line>Beijing</addr-line>
        <country>China</country>
      </aff>
      <aff id="aff4">
        <label>4</label>
        <institution>Silver School of Social Work</institution>
        <institution>New York University</institution>
        <addr-line>New York, NY</addr-line>
        <country>United States</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Jiakun Zheng <email>zhengjiakun07@163.com</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <year>2024</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>28</day>
        <month>11</month>
        <year>2024</year>
      </pub-date>
      <volume>26</volume>
      <elocation-id>e54557</elocation-id>
      <history>
        <date date-type="received">
          <day>14</day>
          <month>11</month>
          <year>2023</year>
        </date>
        <date date-type="rev-request">
          <day>30</day>
          <month>5</month>
          <year>2024</year>
        </date>
        <date date-type="rev-recd">
          <day>18</day>
          <month>7</month>
          <year>2024</year>
        </date>
        <date date-type="accepted">
          <day>8</day>
          <month>10</month>
          <year>2024</year>
        </date>
      </history>
      <copyright-statement>©Jiakun Zheng, Junjie Wang, Jing Shen, Ruopeng An. Originally published in the Journal of Medical Internet Research (https://www.jmir.org), 28.11.2024.</copyright-statement>
      <copyright-year>2024</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in the Journal of Medical Internet Research (ISSN 1438-8871), is properly cited. The complete bibliographic information, a link to the original publication on https://www.jmir.org/, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://www.jmir.org/2024/1/e54557" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>Accurate measurement of food and nutrient intake is crucial for nutrition research, dietary surveillance, and disease management, but traditional methods such as 24-hour dietary recalls, food diaries, and food frequency questionnaires are often prone to recall error and social desirability bias, limiting their reliability. With the advancement of artificial intelligence (AI), there is potential to overcome these limitations through automated, objective, and scalable dietary assessment techniques. However, the effectiveness and challenges of AI applications in this domain remain inadequately explored.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>This study aimed to conduct a scoping review to synthesize existing literature on the efficacy, accuracy, and challenges of using AI tools in assessing food and nutrient intakes, offering insights into their current advantages and areas of improvement.</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>This review followed the PRISMA-ScR (Preferred Reporting Items for Systematic Reviews and Meta-Analyses extension for Scoping Reviews) guidelines. A comprehensive literature search was conducted in 4 databases—PubMed, Web of Science, Cochrane Library, and EBSCO—covering publications from the databases’ inception to June 30, 2023. Studies were included if they used modern AI approaches to assess food and nutrient intakes in human subjects.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>The 25 included studies, published between 2010 and 2023, involved sample sizes ranging from 10 to 38,415 participants. These studies used a variety of input data types, including food images (n=10), sound and jaw motion data from wearable devices (n=9), and text data (n=4), with 2 studies combining multiple input types. AI models applied included deep learning (eg, convolutional neural networks), machine learning (eg, support vector machines), and hybrid approaches. Applications were categorized into dietary intake assessment, food detection, nutrient estimation, and food intake prediction. Food detection accuracies ranged from 74% to 99.85%, and nutrient estimation errors varied between 10% and 15%. For instance, the RGB-D (Red, Green, Blue-Depth) fusion network achieved a mean absolute error of 15% in calorie estimation, and a sound-based classification model reached up to 94% accuracy in detecting food intake based on jaw motion and chewing patterns. In addition, AI-based systems provided real-time monitoring capabilities, improving the precision of dietary assessments and demonstrating the potential to reduce recall bias typically associated with traditional self-report methods.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>While AI demonstrated significant advantages in improving accuracy, reducing labor, and enabling real-time monitoring, challenges remain in adapting to diverse food types, ensuring algorithmic fairness, and addressing data privacy concerns. The findings suggest that AI has transformative potential for dietary assessment at both individual and population levels, supporting precision nutrition and chronic disease management. Future research should focus on enhancing the robustness of AI models across diverse dietary contexts and integrating biological sensors for a holistic dietary assessment approach.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>food</kwd>
        <kwd>nutrient</kwd>
        <kwd>diet</kwd>
        <kwd>artificial intelligence</kwd>
        <kwd>machine learning</kwd>
        <kwd>deep learning</kwd>
        <kwd>neural networks</kwd>
        <kwd>computer vision</kwd>
        <kwd>natural language processing</kwd>
        <kwd>measurement</kwd>
        <kwd>AI</kwd>
        <kwd>food intake</kwd>
        <kwd>systematic literature</kwd>
        <kwd>dietary assessments</kwd>
        <kwd>AI-based</kwd>
        <kwd>disease management</kwd>
        <kwd>mobile phone</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <p>Measuring food and nutrient intake is foundational in nutrition research, dietary surveillance, and clinical practice [<xref ref-type="bibr" rid="ref1">1</xref>]. Traditional methods, such as 24-hour dietary recalls, food diaries, and food frequency questionnaires, have been the cornerstones of such endeavors [<xref ref-type="bibr" rid="ref2">2</xref>]. However, these self-reported tools frequently encounter issues associated with recall error, where individuals inadvertently omit, underreport, or exaggerate certain food items or quantities [<xref ref-type="bibr" rid="ref3">3</xref>]. Furthermore, social desirability bias further complicates matters, with respondents potentially altering their reports to reflect what they perceive as more socially acceptable or healthier dietary habits [<xref ref-type="bibr" rid="ref4">4</xref>]. While clinical measures in controlled environments, such as laboratories, offer higher accuracy, they have drawbacks [<xref ref-type="bibr" rid="ref5">5</xref>]. These objective measures often entail labor-intensive processes, significant costs, and potential intrusiveness for participants [<xref ref-type="bibr" rid="ref6">6</xref>]. Such constraints render them less suitable for large-scale, population-level studies or individuals seeking to personally monitor their food and nutrient intake for disease management and other health-related objectives [<xref ref-type="bibr" rid="ref6">6</xref>]. In light of these challenges, there is an escalating interest in leveraging artificial intelligence (AI) to enhance the accuracy and feasibility of dietary intake assessment [<xref ref-type="bibr" rid="ref7">7</xref>].</p>
      <p>AI, a branch of computer science focusing on developing algorithms that simulate human cognitive functions, has shown transformative potential across diverse sectors [<xref ref-type="bibr" rid="ref8">8</xref>]. In health-related research, AI’s ability to process vast amounts of data at incredible speeds and its adeptness at pattern recognition has made substantial strides in medical imaging, predictive modeling of disease outbreaks, and personalized medicine [<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref10">10</xref>]. In the context of dietary assessment, AI offers several distinct advantages. First, it can potentially mitigate the biases inherent in self-reported methods by using image recognition to identify and quantify food items with minimal input from the user [<xref ref-type="bibr" rid="ref11">11</xref>]. Advanced machine learning algorithms can analyze photographs of meals and provide instant, objective assessments of portion sizes and nutrient content [<xref ref-type="bibr" rid="ref11">11</xref>,<xref ref-type="bibr" rid="ref12">12</xref>]. In addition to image-based methods, AI techniques also use sound, jaw motion from wearable devices, and text data for dietary assessment. These methods provide diverse approaches to capture dietary intake, enhancing the accuracy and comprehensiveness of assessments. Second, AI can offer continuous, real-time monitoring, bridging the temporal gap in methods like 24-hour recalls [<xref ref-type="bibr" rid="ref13">13</xref>]. Finally, while laboratory-based clinical measures are costly and labor-intensive, once developed, AI-driven tools can be scaled up relatively inexpensively, making them more feasible for large population studies and individual dietary tracking [<xref ref-type="bibr" rid="ref14">14</xref>]. Given these attributes, AI emerges as a promising candidate to revolutionize the landscape of food and nutrient intake measurement.</p>
      <p>While numerous reviews have covered objective measures of dietary intake, our review specifically focuses on the application of AI technologies in this field. This scoping review provides a comprehensive synthesis of recent advancements, highlights the unique challenges faced by AI methodologies, and identifies critical gaps that future research should address. Our work adds to the existing literature by providing a detailed analysis of AI’s role in improving the accuracy and efficiency of dietary assessment.</p>
      <p>To the best of our knowledge, a comprehensive scoping review that delves into the applications of AI for measuring food and nutrient intakes has not yet been conducted. This gap in the literature underlines the novelty and urgency of our investigation. The primary objective of this review is to explore and map out the current landscape of AI applications in dietary assessment, detailing methodologies, tools, and their associated findings.</p>
      <p>This endeavor holds transformative potential for several reasons. First, by consolidating and synthesizing the vast yet dispersed body of knowledge, researchers, clinicians, and policy makers can gain a cohesive understanding of the current state-of-the-art and its implications for the future. Second, the review will spotlight any existing limitations or gaps in the current AI methodologies, paving the way for targeted advancements in technology and research design. Finally, given the paramount importance of accurate dietary assessment in myriad health outcomes and policy decisions, our findings can directly inform best practices, promote technology adoption in clinical and research settings, and guide future funding and priorities in technological and nutritional research sectors.</p>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Overview</title>
        <p>This scoping review followed the guidelines of the PRISMA-ScR (Preferred Reporting Items for Systematic Reviews and Meta-Analyses extension for Scoping Reviews; see <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>) [<xref ref-type="bibr" rid="ref15">15</xref>].</p>
      </sec>
      <sec>
        <title>Study Selection Criteria</title>
        <p>Predefined inclusion and exclusion criteria were established and applied to all identified studies during the screening process. <xref ref-type="boxed-text" rid="box1">Textbox 1</xref> provides a detailed overview of the inclusion and exclusion criteria, outlining the study characteristics considered for eligibility in this review.</p>
        <boxed-text id="box1" position="float">
          <title>Inclusion and exclusion criteria for study selection.</title>
          <p>Inclusion criteria:</p>
          <list list-type="bullet">
            <list-item>
              <p>Study design: Experimental studies (eg, randomized controlled trials [RCTs], pre-post interventions) and observational studies (eg, cross-sectional, longitudinal).</p>
            </list-item>
            <list-item>
              <p>Analytic approach: Modern AI approaches, including machine learning (ML), deep learning (DL), and reinforcement learning (RL).</p>
            </list-item>
            <list-item>
              <p>Participants: Individuals of all ages.</p>
            </list-item>
            <list-item>
              <p>Data type: Input data, including food images, plate images, etc.</p>
            </list-item>
            <list-item>
              <p>Outcome: Measures on food and nutrient intakes.</p>
            </list-item>
            <list-item>
              <p>Article type: Original, empirical, peer-reviewed journal publications.</p>
            </list-item>
            <list-item>
              <p>Language: Articles written in English.</p>
            </list-item>
            <list-item>
              <p>Search time frame: From the inception of electronic bibliographic databases to June 30, 2023.</p>
            </list-item>
          </list>
          <p>Exclusion criteria</p>
          <list list-type="bullet">
            <list-item>
              <p>Study design: Studies that do not involve human subjects, observational or experimental design.</p>
            </list-item>
            <list-item>
              <p>Analytic approach: Studies using rule-based (“hard-coded”) approaches instead of example-based ML, DL, or RL.</p>
            </list-item>
            <list-item>
              <p>Participants: Non-human subjects.</p>
            </list-item>
            <list-item>
              <p>Data type: Studies not using dietary input data.</p>
            </list-item>
            <list-item>
              <p>Outcome: Studies without outcomes related to food and nutrient intakes.</p>
            </list-item>
            <list-item>
              <p>Article type: Letters, editorials, study or review protocols, case reports, or review articles.</p>
            </list-item>
            <list-item>
              <p>Language: Non–English-language articles.</p>
            </list-item>
            <list-item>
              <p>Search time frame: Studies published after June 30, 2023.</p>
            </list-item>
          </list>
        </boxed-text>
      </sec>
      <sec>
        <title>Search Strategy</title>
        <p>A comprehensive search was performed in 4 electronic bibliographic databases: PubMed, Web of Science, Cochrane Library, and EBSCO. The search strategy used a combination of controlled vocabulary (eg, MeSH terms in PubMed) and free-text keywords. The search terms were structured around two main concepts: (1) AI and (2) nutrition or dietary intake. The AI-related terms included: “artificial intelligence,” “machine learning,” “deep learning,” “neural networks,” “natural language processing,” “computer vision,” “algorithms,” “data mining,” “big data,” “predictive modeling,” and “automated pattern recognition.” The nutrition-related terms included: “nutrition,” “dietetics,” “nutritional sciences,” “diet,” “dietary behavior,” “beverage intake,” “food intake,” “nutrient intake,” and “healthy eating.” These keywords were combined using Boolean operators (AND, OR) to ensure a comprehensive search. The complete search strategy, including database-specific modifications and detailed search strings, is provided in <xref ref-type="supplementary-material" rid="app2">Multimedia Appendix 2</xref>. After the initial search, 2 coauthors independently screened the titles and abstracts for the articles found through the keyword search, obtained potentially relevant articles, and reviewed their full texts. The inter-rater agreement between these two authors was evaluated using Cohen κ (κ=0.85). Disagreements were settled through conversation.</p>
      </sec>
      <sec>
        <title>Data Extraction and Synthesis</title>
        <p>The following methodological and outcome variables were collected from each study using a standardized data extraction form: authors, year of publication, country or region, study objective, sample size, sample characteristics, AI models used, tasks and applications, type of input data, outcome measures, and perceived usefulness of AI technologies. No meta-analysis was feasible, given the substantial heterogeneity of the models, outcome measures, and applications. Therefore, we synthesized the study findings narratively and categorized them into distinct themes.</p>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <sec>
        <title>Identification of Studies</title>
        <p><xref rid="figure1" ref-type="fig">Figure 1</xref> illustrates the PRISMA (Preferred Reporting Items for Systematic Reviews and Meta-Analyses) flow diagram, outlining the structured literature search and selection procedure. The initial database search identified 6132 articles. After removing duplicates, 5499 unique articles were retained for preliminary screening based on their titles and abstracts. From this collection, 5456 articles were evaluated as irrelevant and, consequently, excluded from the review. Applying the study selection criteria to the remaining 43 articles resulted in the further exclusion of 18 studies due to various reasons, including lack of AI technology adoption (n=7), absence of food and nutrient intake measurements (n=6), being a commentary rather than original empirical research (n=3), and a focus on smartphone-based apps (n=2). Ultimately, 25 studies met the relevance criteria and were included in the review [<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref16">16</xref>-<xref ref-type="bibr" rid="ref38">38</xref>].</p>
        <fig id="figure1" position="float">
          <label>Figure 1</label>
          <caption>
            <p>PRISMA (Preferred Reporting Items for Systematic reviews and Meta-Analyses) flow diagram illustrating the study selection process.</p>
          </caption>
          <graphic xlink:href="jmir_v26i1e54557_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Study Characteristics</title>
        <p><xref ref-type="table" rid="table1">Table 1</xref> reports the characteristics, type of input data, outcome measures, and main findings of the 25 studies incorporated in the review (more details in <xref ref-type="supplementary-material" rid="app3">Multimedia Appendix 3</xref>). The studies spanned a range of publication years, with the earliest appearing in 2010 [<xref ref-type="bibr" rid="ref16">16</xref>] and singular studies being published in 2013 [<xref ref-type="bibr" rid="ref17">17</xref>], 2015 [<xref ref-type="bibr" rid="ref18">18</xref>], and 2023 [<xref ref-type="bibr" rid="ref38">38</xref>]. Publications in the following years were more frequent, with 2 studies each in 2016 [<xref ref-type="bibr" rid="ref19">19</xref>,<xref ref-type="bibr" rid="ref20">20</xref>], 2018 [<xref ref-type="bibr" rid="ref21">21</xref>,<xref ref-type="bibr" rid="ref22">22</xref>], and 2020 [<xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref27">27</xref>], 3 in 2021 [<xref ref-type="bibr" rid="ref28">28</xref>-<xref ref-type="bibr" rid="ref30">30</xref>], 5 in 2019 [<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref23">23</xref>-<xref ref-type="bibr" rid="ref26">26</xref>], and 7 in 2022 [<xref ref-type="bibr" rid="ref31">31</xref>-<xref ref-type="bibr" rid="ref37">37</xref>]. The geographical spread of the studies was diverse, with research conducted in several different countries: 14 in the United States [<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref16">16</xref>,<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref19">19</xref>-<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref24">24</xref>-<xref ref-type="bibr" rid="ref27">27</xref>,<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref31">31</xref>,<xref ref-type="bibr" rid="ref37">37</xref>], 4 in Switzerland [<xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref18">18</xref>,<xref ref-type="bibr" rid="ref23">23</xref>,<xref ref-type="bibr" rid="ref29">29</xref>], 2 in France [<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref36">36</xref>], and 1 each in Canada [<xref ref-type="bibr" rid="ref33">33</xref>], China [<xref ref-type="bibr" rid="ref38">38</xref>], Denmark [<xref ref-type="bibr" rid="ref34">34</xref>], Philippines [<xref ref-type="bibr" rid="ref35">35</xref>], and Slovenia [<xref ref-type="bibr" rid="ref28">28</xref>].</p>
        <table-wrap position="float" id="table1">
          <label>Table 1</label>
          <caption>
            <p>Geographic location, sample size, sample characteristics, artificial intelligence models, type of input data, task, outcome measures, and main findings in the studies included in the review.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="160"/>
            <col width="100"/>
            <col width="90"/>
            <col width="150"/>
            <col width="120"/>
            <col width="150"/>
            <col width="110"/>
            <col width="120"/>
            <thead>
              <tr valign="top">
                <td>Author, Year</td>
                <td>Country or<break/>Region</td>
                <td>Sample size</td>
                <td>Sample characteristics</td>
                <td>AI<sup>a</sup> models</td>
                <td>Type of input data</td>
                <td>Task</td>
                <td>Outcome measures</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>Lopez-Meyer et al, 2010 [<xref ref-type="bibr" rid="ref16">16</xref>]</td>
                <td>United States</td>
                <td>18</td>
                <td>Healthy adults (BMI: 28.01, SD 6.35)</td>
                <td>SVM<sup>b</sup>, RBFk<sup>c</sup></td>
                <td>Sound, strain signal</td>
                <td>Classification</td>
                <td>Food intake</td>
              </tr>
              <tr valign="top">
                <td>Fontana et al, 2013 [<xref ref-type="bibr" rid="ref17">17</xref>]</td>
                <td>United States</td>
                <td>12</td>
                <td>Healthy adults (BMI: 24.39, SD 3.81)</td>
                <td>RF<sup>d</sup></td>
                <td>Jaw motion signal, hand gesture signal, body acceleration</td>
                <td>Classification</td>
                <td>Food intake</td>
              </tr>
              <tr valign="top">
                <td>Anthimopoulos et al, 2015 [<xref ref-type="bibr" rid="ref18">18</xref>]</td>
                <td>Switzerland</td>
                <td>144</td>
                <td>Images of dishes</td>
                <td>CV<sup>e</sup>, SVM</td>
                <td>Image</td>
                <td>Regression</td>
                <td>Carbohydrates counting</td>
              </tr>
              <tr valign="top">
                <td>Farooq and Sazonov, 2016 [<xref ref-type="bibr" rid="ref19">19</xref>]</td>
                <td>United States</td>
                <td>10</td>
                <td>Healthy adults (BMI: 27.87, SD 5.51)</td>
                <td>SVM, DT<sup>f</sup></td>
                <td>Jaw motion signal, body acceleration signal</td>
                <td>Classification</td>
                <td>Food intake<break/>  <break/>  </td>
              </tr>
              <tr valign="top">
                <td>Hezarjaribi et al, 2016 [<xref ref-type="bibr" rid="ref20">20</xref>]</td>
                <td>United States</td>
                <td>10</td>
                <td>—<sup>g</sup></td>
                <td>SRM<sup>h</sup>, NLP<sup>i</sup>, SMM<sup>j</sup></td>
                <td>Audio signal</td>
                <td>Regression</td>
                <td>Calorie intake</td>
              </tr>
              <tr valign="top">
                <td>Goldstein et al, 2018 [<xref ref-type="bibr" rid="ref21">21</xref>]</td>
                <td>United States</td>
                <td>12</td>
                <td>Adults with overweight/obesity (BMI: 33.60, SD 5.66)</td>
                <td>RF, DT, Logit.Boot, BN<sup>k</sup>, Bagging, Random subspace</td>
                <td>Text</td>
                <td>Regression</td>
                <td>Dietary lapses</td>
              </tr>
              <tr valign="top">
                <td>Hezarjaribi et al, 2018 [<xref ref-type="bibr" rid="ref22">22</xref>]</td>
                <td>United States</td>
                <td>30</td>
                <td>—</td>
                <td>NLP, LA<sup>l</sup></td>
                <td>Audio signal</td>
                <td>Regression</td>
                <td>Calorie intake</td>
              </tr>
              <tr valign="top">
                <td>Lu et al, 2019 [<xref ref-type="bibr" rid="ref23">23</xref>]</td>
                <td>Switzerland</td>
                <td>644</td>
                <td>Meal images (pixel: 640×480)</td>
                <td>MTNnet<sup>m</sup>, DTM<sup>n</sup>, RANSAC algorithm</td>
                <td>Image</td>
                <td>Regression</td>
                <td>Nutrient intake</td>
              </tr>
              <tr valign="top">
                <td>Fang et al, 2019 [<xref ref-type="bibr" rid="ref12">12</xref>]</td>
                <td>United States</td>
                <td>4190</td>
                <td>Food images (pixel: 224×224)</td>
                <td>GAN<sup>o</sup>, CNN<sup>p</sup></td>
                <td>Image</td>
                <td>Regression</td>
                <td>Food energy</td>
              </tr>
              <tr valign="top">
                <td>Jia et al, 2019 [<xref ref-type="bibr" rid="ref24">24</xref>]</td>
                <td>United States</td>
                <td>38,415</td>
                <td>Images (pixel: 640×480)</td>
                <td>CNN</td>
                <td>Image</td>
                <td>Regression</td>
                <td>Dietary assessment</td>
              </tr>
              <tr valign="top">
                <td>Chin et al, 2019 [<xref ref-type="bibr" rid="ref25">25</xref>]</td>
                <td>United States</td>
                <td>567</td>
                <td>Food descriptions</td>
                <td>LASSO<sup>q</sup>, Ridge, FFNN<sup>r</sup>, XGB<sup>s</sup> models</td>
                <td>Text</td>
                <td>Regression</td>
                <td>Amount of lactose</td>
              </tr>
              <tr valign="top">
                <td>Farooq et al, 2019 [<xref ref-type="bibr" rid="ref26">26</xref>]</td>
                <td>United States</td>
                <td>40</td>
                <td>Healthy adults (BMI: 26.1, SD 5.2)</td>
                <td>NNC<sup>t</sup></td>
                <td>Hand gesture, jaw motion, body acceleration</td>
                <td>Classification</td>
                <td>Food intake</td>
              </tr>
              <tr valign="top">
                <td>Heremans et al, 2020 [<xref ref-type="bibr" rid="ref27">27</xref>]</td>
                <td>United States</td>
                <td>126</td>
                <td>Adults with dyspepsia</td>
                <td>ANN<sup>u</sup></td>
                <td>Heart rate variability signal</td>
                <td>Classification</td>
                <td>Food intake</td>
              </tr>
              <tr valign="top">
                <td>Lu et al, 2021 [<xref ref-type="bibr" rid="ref14">14</xref>]</td>
                <td>Switzerland</td>
                <td>644</td>
                <td>Meal images (pixel: 640×480)</td>
                <td>MTCNet<sup>v</sup>, FSLBC<sup>w</sup>, 3D-SCA<sup>x</sup></td>
                <td>Image</td>
                <td>Regression</td>
                <td>Nutrient intake</td>
              </tr>
              <tr valign="top">
                <td>Mezgec and Koroušić Seljak, 2021 [<xref ref-type="bibr" rid="ref28">28</xref>]</td>
                <td>Slovenia</td>
                <td>520</td>
                <td>Food images (pixel: 512×512)</td>
                <td>DNN<sup>y</sup></td>
                <td>Image</td>
                <td>Classification</td>
                <td>Dietary assessment</td>
              </tr>
              <tr valign="top">
                <td>Papathanail et al, 2021 [<xref ref-type="bibr" rid="ref29">29</xref>]</td>
                <td>Switzerland</td>
                <td>866</td>
                <td>Meal images (pixel: 640×480)</td>
                <td>CNN, PSPNet<sup>z</sup>, DeepLabv3 network</td>
                <td>Image</td>
                <td>Regression</td>
                <td>Energy, nutrient intake</td>
              </tr>
              <tr valign="top">
                <td>Taylor et al, 2021 [<xref ref-type="bibr" rid="ref30">30</xref>]</td>
                <td>United States</td>
                <td>34</td>
                <td>Healthy adults (mean BMI: 24)</td>
                <td>CNN, SMM</td>
                <td>Text, voice data</td>
                <td>Regression</td>
                <td>Energy intake</td>
              </tr>
              <tr valign="top">
                <td>Ghosh and Sazonov, 2022 [<xref ref-type="bibr" rid="ref31">31</xref>]</td>
                <td>United States</td>
                <td>17</td>
                <td>Adolescents and adults</td>
                <td>Time-CNN, ResNet<sup>aa</sup>, FCN<sup>ab</sup>, IM<sup>ac</sup>, MLP<sup>ad</sup></td>
                <td>Accelerometer, optical sensor data</td>
                <td>Classification</td>
                <td>Food intake</td>
              </tr>
              <tr valign="top">
                <td>Van Wymelbeke-Delannoy et al, 2022 [<xref ref-type="bibr" rid="ref32">32</xref>]</td>
                <td>France</td>
                <td>22,544</td>
                <td>Dishes images</td>
                <td>DNN</td>
                <td>Food image</td>
                <td>Regression</td>
                <td>Food item</td>
              </tr>
              <tr valign="top">
                <td>Pfisterer et al, 2022 [<xref ref-type="bibr" rid="ref33">33</xref>]</td>
                <td>Canada</td>
                <td>689</td>
                <td>Plate images (pixel: 640×480)</td>
                <td>Deep-CNN</td>
                <td>Plate image</td>
                <td>Regression</td>
                <td>Food intake</td>
              </tr>
              <tr valign="top">
                <td>Pedersen et al, 2022 [<xref ref-type="bibr" rid="ref34">34</xref>]</td>
                <td>Denmark</td>
                <td>100</td>
                <td>Adults with normal weight</td>
                <td>RF</td>
                <td>Psychophysiological responses</td>
                <td>Regression</td>
                <td>Food intake</td>
              </tr>
              <tr valign="top">
                <td>Siy Van et al, 2022 [<xref ref-type="bibr" rid="ref35">35</xref>]</td>
                <td>Philippines</td>
                <td>618</td>
                <td>Children</td>
                <td>RF, SVM, LDA<sup>ae</sup>, LR<sup>af</sup></td>
                <td>Text</td>
                <td>Regression</td>
                <td>Under-nutrition</td>
              </tr>
              <tr valign="top">
                <td>Granal et al, 2022 [<xref ref-type="bibr" rid="ref36">36</xref>]</td>
                <td>France</td>
                <td>375</td>
                <td>Adults with chronic kidney disease</td>
                <td>BN, BTANN<sup>ag</sup></td>
                <td>Text</td>
                <td>Regression</td>
                <td>Dietary potassium intake</td>
              </tr>
              <tr valign="top">
                <td>Nguyen et al, 2022 [<xref ref-type="bibr" rid="ref37">37</xref>]</td>
                <td>United States</td>
                <td>36</td>
                <td>Adolescent</td>
                <td>Pop-socket</td>
                <td>Image, text</td>
                <td>Regression</td>
                <td>Dietary intake</td>
              </tr>
              <tr valign="top">
                <td>Shao et al, 2023 [<xref ref-type="bibr" rid="ref38">38</xref>]</td>
                <td>China</td>
                <td>5920</td>
                <td>Food images</td>
                <td>RGB-D<sup>ah</sup> fusion network</td>
                <td>Image</td>
                <td>Regression</td>
                <td>Energy, nutrient intake</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table1fn1">
              <p><sup>a</sup>AI: artificial intelligence.</p>
            </fn>
            <fn id="table1fn2">
              <p><sup>b</sup>SVM: support vector machine.</p>
            </fn>
            <fn id="table1fn3">
              <p><sup>c</sup>RBFk: radial basis function kernels.</p>
            </fn>
            <fn id="table1fn4">
              <p><sup>d</sup>RF: random forest.</p>
            </fn>
            <fn id="table1fn5">
              <p><sup>e</sup>CV: computer vision.</p>
            </fn>
            <fn id="table1fn6">
              <p><sup>f</sup>DT: decision tree.</p>
            </fn>
            <fn id="table1fn7">
              <p><sup>g</sup>Not applicable.</p>
            </fn>
            <fn id="table1fn8">
              <p><sup>h</sup>SRM: speech recognition model.</p>
            </fn>
            <fn id="table1fn9">
              <p><sup>i</sup>NLP: natural language processing.</p>
            </fn>
            <fn id="table1fn10">
              <p><sup>j</sup>SMM: string matching module.</p>
            </fn>
            <fn id="table1fn11">
              <p><sup>k</sup>BN: Bayesian network.</p>
            </fn>
            <fn id="table1fn12">
              <p><sup>l</sup>LA: levenshtein algorithm.</p>
            </fn>
            <fn id="table1fn13">
              <p><sup>m</sup>MTNnet: multi-task neural network.</p>
            </fn>
            <fn id="table1fn14">
              <p><sup>n</sup>DTM: delaunay triangulation method.</p>
            </fn>
            <fn id="table1fn15">
              <p><sup>o</sup>GAN: generative adversarial networks.</p>
            </fn>
            <fn id="table1fn16">
              <p><sup>p</sup>CNN: convolutional neural network.</p>
            </fn>
            <fn id="table1fn17">
              <p><sup>q</sup>LASSO: least absolute shrinkage and selection operator.</p>
            </fn>
            <fn id="table1fn18">
              <p><sup>r</sup>FFNN: feed forward neural network.</p>
            </fn>
            <fn id="table1fn19">
              <p><sup>s</sup>XGB: eXtreme gradient boosting.</p>
            </fn>
            <fn id="table1fn20">
              <p><sup>t</sup>NNC: neural network classifier.</p>
            </fn>
            <fn id="table1fn21">
              <p><sup>u</sup>ANN: artificial neural network.</p>
            </fn>
            <fn id="table1fn22">
              <p><sup>v</sup>MTCNet: multi-task contextual network.</p>
            </fn>
            <fn id="table1fn23">
              <p><sup>w</sup>FSLBC: few-shot learning-based classifier.</p>
            </fn>
            <fn id="table1fn24">
              <p><sup>x</sup>SCA: surface construction algorithm.</p>
            </fn>
            <fn id="table1fn25">
              <p><sup>y</sup>DNN: deep neural network.</p>
            </fn>
            <fn id="table1fn26">
              <p><sup>z</sup>PSPNet: pyramid scene parsing network.</p>
            </fn>
            <fn id="table1fn27">
              <p><sup>aa</sup>ResNet: residual neural network.</p>
            </fn>
            <fn id="table1fn28">
              <p><sup>ab</sup>FCN: fully convolutional neural network.</p>
            </fn>
            <fn id="table1fn29">
              <p><sup>ac</sup>IM: inception network.</p>
            </fn>
            <fn id="table1fn30">
              <p><sup>ad</sup>MLP: multilayer perceptron.</p>
            </fn>
            <fn id="table1fn31">
              <p><sup>ae</sup>LDA: linear discriminant analysis.</p>
            </fn>
            <fn id="table1fn32">
              <p><sup>af</sup>LR: logistic regression.</p>
            </fn>
            <fn id="table1fn33">
              <p><sup>ag</sup>BTANN: bayesian tree augmented naive network.</p>
            </fn>
            <fn id="table1fn34">
              <p><sup>ah</sup>RGB-D: Red, Green, Blue-Depth.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
        <p>The studies varied in sample sizes, ranging from 10 to 38,415. Specifically, 10 studies had sample sizes between 10 and 99 [<xref ref-type="bibr" rid="ref16">16</xref>,<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref19">19</xref>-<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref31">31</xref>,<xref ref-type="bibr" rid="ref37">37</xref>], 3 had between 100 and 199 [<xref ref-type="bibr" rid="ref18">18</xref>,<xref ref-type="bibr" rid="ref27">27</xref>,<xref ref-type="bibr" rid="ref34">34</xref>], and the remaining 12 had sample sizes exceeding 300. Among the 25 studies, while all involved human subjects, 10 studies focused on analyzing food images, dish images, or plate images to estimate dietary intake [<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref18">18</xref>,<xref ref-type="bibr" rid="ref23">23</xref>,<xref ref-type="bibr" rid="ref24">24</xref>,<xref ref-type="bibr" rid="ref28">28</xref>,<xref ref-type="bibr" rid="ref29">29</xref>,<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref33">33</xref>,<xref ref-type="bibr" rid="ref38">38</xref>], 4 targeted healthy adults dealing with obesity or overweight [<xref ref-type="bibr" rid="ref16">16</xref>,<xref ref-type="bibr" rid="ref19">19</xref>,<xref ref-type="bibr" rid="ref21">21</xref>,<xref ref-type="bibr" rid="ref26">26</xref>], 3 focused on adults with normal weight [<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref34">34</xref>], 3 engaged with children and adolescents [<xref ref-type="bibr" rid="ref31">31</xref>,<xref ref-type="bibr" rid="ref35">35</xref>,<xref ref-type="bibr" rid="ref37">37</xref>], and 2 addressed adults with diseases [<xref ref-type="bibr" rid="ref27">27</xref>,<xref ref-type="bibr" rid="ref36">36</xref>]. Over the years, there have been notable advancements in AI-based dietary assessment. Early studies primarily focused on developing basic image recognition algorithms. More recent studies have integrated advanced machine learning models, such as deep learning and convolutional neural networks, which have significantly improved the accuracy of food recognition and nutrient estimation.</p>
        <p>Among the 25 studies, 10 used image data, 9 used sound or jaw motion data from wearable devices, 4 used text data, and the remaining 2 combined multiple types of input data for dietary assessment. We classified the applications into 4 categories: dietary intake assessment, food detection, nutrient estimation, and food intake prediction.</p>
      </sec>
      <sec>
        <title>Applications in Dietary Intake Assessment</title>
        <p>Our review identified several critical steps involved in the processing of dietary intake assessment systems, specifically for image-based methods. These steps include (1) identifying images with food, (2) identifying the foods, (3) separating the foods into separate parts, (4) estimating portion sizes served and remaining to estimate intake, and (5) estimating nutrient intake. Each of these steps involves distinct AI methodologies with varying degrees of accuracy and potential errors.</p>
        <sec>
          <title>Identifying Images With Food</title>
          <p>AI models, particularly convolutional neural networks (CNNs), are widely used for recognizing the presence of food in images. Studies, such as those by Fang et al (2019) [<xref ref-type="bibr" rid="ref12">12</xref>] and Jia et al (2019) [<xref ref-type="bibr" rid="ref24">24</xref>], have demonstrated high accuracy in detecting food presence using end-to-end image-based automatic food energy estimation techniques and real-world egocentric images, respectively.</p>
        </sec>
        <sec>
          <title>Identifying the Foods</title>
          <p>Once food is identified in an image, the next step is to classify and recognize different food items. Techniques such as support vector machines (SVMs) and deep learning models, including GANs (Generative Adversarial Networks) and advanced CNNs, are used for this purpose. For example, the GoCARB system developed by Anthimopoulos et al [<xref ref-type="bibr" rid="ref18">18</xref>] used computer vision to estimate carbohydrate content by recognizing different food items from smartphone images.</p>
        </sec>
        <sec>
          <title>Separating Foods Into Separate Parts</title>
          <p>Segmenting individual food items within an image is crucial for accurate portion size estimation. Techniques such as image segmentation using deep neural networks (DNNs) have been effective in this regard. The study by Mezgec and Koroušić Seljak [<xref ref-type="bibr" rid="ref28">28</xref>] showcased the use of DNNs for image-based dietary assessment with a classification accuracy of 86.72%.</p>
        </sec>
        <sec>
          <title>Estimating Portion Sizes Served and Remaining</title>
          <p>Estimating the portion sizes of served and remaining food requires precise volume and area measurements, which can be challenging due to varying presentation and occlusion of food items. AI models using RGB-D (Red, Green, Blue-Depth) imagery, as seen in the work by Shao et al [<xref ref-type="bibr" rid="ref38">38</xref>], have shown promise in improving the precision of such estimations by using depth information to enhance the accuracy of food volume assessments.</p>
        </sec>
        <sec>
          <title>Estimating Nutrient Intake</title>
          <p>The final step involves estimating the nutrient intake based on the identified and quantified food items. This step often leverages databases such as the US Department of Agriculture (USDA) nutritional database to map food items to their nutrient profiles. The integration of AI for this purpose is exemplified by systems like the S2NI platform, which combines speech recognition and natural language processing to monitor dietary composition from spoken data, achieving high accuracy in nutrient computation.</p>
          <p>Non–image-based dietary assessment methods, including those using sound, jaw motion from wearable devices, and text analysis, can also be categorized similarly. These methods contribute to various steps, particularly in identifying food intake and estimating nutrient content. For instance, the use of jaw motion signals analyzed by SVMs, as studied by Lopez-Meyer et al [<xref ref-type="bibr" rid="ref16">16</xref>], provides high accuracy in detecting food intake.</p>
        </sec>
      </sec>
      <sec>
        <title>Applications in Food Detection</title>
        <p>Food detection refers to the identification and recognition of food items using AI technologies. AI applications have become increasingly important in automating food detection, providing foundational advancements crucial for accurate nutrient estimation and food intake prediction. SVM and random forests are highlighted as prevalent machine learning models across the studies, aiming to achieve high food detection accuracy [<xref ref-type="bibr" rid="ref16">16</xref>,<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref19">19</xref>]. Random forest classification emphasizes the importance of time and frequency domain features in food intake detection with wearable sensor systems, focusing predominantly on jaw motion and accelerometer signals [<xref ref-type="bibr" rid="ref17">17</xref>].</p>
        <p>Another essential facet in this AI-infused dietary landscape is the integration of image-based assessments [<xref ref-type="bibr" rid="ref28">28</xref>,<xref ref-type="bibr" rid="ref33">33</xref>]. The development and validation of deep neural networks like NutriNet for food and beverage image recognition have showcased the ability of image-based approaches to identify multiple food or beverage items in a single image. Moreover, incorporating FCNs and deep residual networks (ResNet) magnifies the efficacy of segmenting food images, presenting a robust method in automated dietary assessments. Notably, Pfisterer et al [<xref ref-type="bibr" rid="ref33">33</xref>] offered insights into the application of deep convolutional encoder-decoder food networks with depth-refinement (EDFN-D) in long-term care settings, providing an automated imaging system for quantifying food intake with high precision and objectivity, addressing the existing limitations in these settings [<xref ref-type="bibr" rid="ref33">33</xref>].</p>
        <p>A noticeable trend across the studies is the use of wearable and mobile devices, demonstrating the integration of technology with daily human activities for real-time and accurate data collection [<xref ref-type="bibr" rid="ref19">19</xref>,<xref ref-type="bibr" rid="ref24">24</xref>,<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref37">37</xref>]. Wearable devices, such as the Automatic Ingestion Monitor (AIM) and other novel devices with sensors on the temporalis muscle and accelerometers, have shown potential in reducing the influence of motion artifacts and speech on food intake detection accuracy [<xref ref-type="bibr" rid="ref19">19</xref>]. Furthermore, mobile AI technologies, such as FRANI (Food Recognition Assistance and Nudging Insights), illustrate their feasibility and reliability in resource-constrained settings, offering a comparable alternative to traditional methods like weighed records (WRs) [<xref ref-type="bibr" rid="ref37">37</xref>].</p>
        <p>DNN and CNN are central in recognizing and detecting food items from images, providing an automated approach to food detection and segmentation. The FoodIntech system, using a DNN-based approach, has demonstrated reliability in recognizing a variety of dishes and assessing food consumption [<xref ref-type="bibr" rid="ref32">32</xref>]. Similarly, algorithms designed for egocentric images from wearable cameras have achieved substantial accuracy in food detection, addressing concerns related to data processing burdens and privacy [<xref ref-type="bibr" rid="ref24">24</xref>].</p>
        <p>Combining AI with RGB-D imagery is an evolving approach, showing promise in refining the precision of food nutrition estimation. The use of RGB-D fusion networks has revealed advancements in performing multimodal and multiscale feature fusion, offering a refined accuracy in nutrient analysis [<xref ref-type="bibr" rid="ref38">38</xref>]. This approach successfully estimated calories and mass with a lower percentage mean absolute error and effectively visualized the estimation results of 4 nutrients [<xref ref-type="bibr" rid="ref38">38</xref>].</p>
        <p>Despite the advancements, there is a discernable disparity in the reported accuracy and reliability among the studies, with accuracy ranging from 74% to 99.85% [<xref ref-type="bibr" rid="ref19">19</xref>,<xref ref-type="bibr" rid="ref24">24</xref>]. This variance reflects the diverse methodologies, sensor modalities, ML algorithms, and the nature of features extracted for analysis. The ongoing refinements in methods and technologies showcase the evolving nature of AI applications in food detection, signaling a step forward in automating dietary assessment in varied environments and demographic settings.</p>
      </sec>
      <sec>
        <title>Applications in Nutrient Estimation</title>
        <p>AI has been used to address the challenges associated with accurate nutrient intake assessment and dietary management for various medical conditions and patient demographics. The GoCARB system [<xref ref-type="bibr" rid="ref18">18</xref>] exemplifies how AI can assist individuals with type 1 diabetes in carbohydrate counting, using computer vision to automate the estimation process using smartphones, hence aiding in optimal insulin dosage estimations. This application relies on the segmentation and recognition of food items, calculating the carbohydrate content based on food volumes and the USDA nutritional database, demonstrating a mean absolute percentage error in carbohydrate estimation of approximately 10%.</p>
        <p>In addressing the nutrition assessment needs of hospitalized patients, an AI-based system has been developed [<xref ref-type="bibr" rid="ref23">23</xref>,<xref ref-type="bibr" rid="ref29">29</xref>] that uses RGB-D image pairs to estimate nutrient intake. These applications offer a means to counter malnutrition risks in hospital settings by delivering more accurate and automated nutrient intake assessments. The systems segment images into different food components, estimate the volume consumed, and calculate energy and macronutrient intake, showing a 15% estimation error [<xref ref-type="bibr" rid="ref23">23</xref>] and improved agreement with expert estimations compared to standard clinical procedures [<xref ref-type="bibr" rid="ref29">29</xref>].</p>
        <p>Efforts have also been made to estimate food energy values using GAN architecture [<xref ref-type="bibr" rid="ref12">12</xref>]. By mapping food images to their energy distributions, the technology has shown promise in improving the accuracy of dietary assessments, with an average error of 209 kcal per eating occasion in a real-world study setting.</p>
        <p>In the context of 24-hour food recalls, machine learning models and database matching have been instrumental in estimating nutrients not directly outputted by specific dietary assessment tools [<xref ref-type="bibr" rid="ref25">25</xref>]. For instance, lactose was relatively accurately estimated using models like XGB regressor and database matching methods.</p>
        <p>Meanwhile, studies on the interplay between behavioral and physiologic variables in predicting food intake [<xref ref-type="bibr" rid="ref34">34</xref>] have provided foundational insights. However, the predictive capability of combined or separate measures of food reward or biometric responses has not outperformed traditional models in clinical settings. The approach, however, lays the groundwork for further exploration of behavioral nutrition and personalized nutrition strategies.</p>
        <p>Furthermore, the development of predictive tools leveraging AI for patients with chronic kidney disease has exhibited the potential to estimate dietary potassium intake, emphasizing the role of AI in clinical and therapeutic management [<xref ref-type="bibr" rid="ref36">36</xref>]. This application has been noteworthy for its ability to classify potassium diet in 3 classes of potassium excretion with 74% accuracy, focusing more on clinical characteristics and renal pathology than on the potassium content of the ingested food.</p>
        <p>Using mobile platforms that incorporate speech and natural language processing to convert spoken data to nutrient information offers a lens into the transformative potential of voice-based solutions [<xref ref-type="bibr" rid="ref20">20</xref>,<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref30">30</xref>]. These solutions, such as S2NI, Speech2Health, and the COCO Nutritionist app, achieve substantial accuracy in computing calorie intake, emphasizing the importance of real-time and pervasive monitoring. They demonstrate an integrated approach to capture dietary information more frequently, revealing the user preference toward voice-based interfaces over text-based and image-based nutrition monitoring due to their ease of use and accessibility.</p>
      </sec>
      <sec>
        <title>Applications in Food Intake Prediction</title>
        <p>Food intake prediction involves estimating the amount and type of food consumed based on detected items. Advancements in AI are significantly shaping the landscape of food intake prediction by offering various innovative solutions and techniques. For instance, ML techniques in predicting dietary lapses during weight loss interventions have demonstrated the potential to augment adherence to dietary guidelines and offer real-time interventions, providing a comprehensive perspective on combining individual and group-level data to enrich predictions [<xref ref-type="bibr" rid="ref21">21</xref>].</p>
        <p>The adaptability and efficiency of ML are further highlighted in the studies focusing on detecting food intake using various sensor technologies and algorithms. Developing and validating sensor-based food intake detection methods, such as AIM, have illustrated high accuracy and reliability, presenting a promising future for food intake monitoring in unconstrained environments [<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref31">31</xref>]. SVMs have been effectively used in monitoring ingestive behavior, yielding up to 94% accuracy in detecting food intake by analyzing chews and swallows [<xref ref-type="bibr" rid="ref16">16</xref>].</p>
        <p>In particular, the utility of DL algorithms, like ResNet and Fully Convolutional Neural Network (FCN), is revealed to be paramount in differentiating food intake from other activities using sensor signals. The competitive performance of these algorithms indicates the significance of selecting appropriate methods for precise classifications in real-world scenarios, establishing their importance in the evolving field of dietary monitoring and health interventions [<xref ref-type="bibr" rid="ref31">31</xref>].</p>
        <p>The exploration of DNN in automatic food intake detection through dynamic analysis of heart rate variability has opened avenues for addressing meal-related disorders. The notable accuracy of DNN, especially in neuromodulation treatments for conditions like obesity and diabetes, establishes the potential of ML in contributing to varied health care settings [<xref ref-type="bibr" rid="ref27">27</xref>].</p>
        <p>Furthermore, the studies using ML algorithms like the random forest have provided a robust method for identifying and comparing nutritional risk, offering valuable insights into developing targeted nutritional interventions and effectively addressing undernutrition. Such approaches are crucial in considering local dietary culture and delivering more nuanced and culturally competent health care solutions [<xref ref-type="bibr" rid="ref35">35</xref>].</p>
        <p>Another essential facet in this AI-infused dietary landscape is the integration of image-based assessments [<xref ref-type="bibr" rid="ref28">28</xref>,<xref ref-type="bibr" rid="ref33">33</xref>]. The development and validation of deep neural networks like NutriNet for food and beverage image recognition have showcased the ability of image-based approaches to identify multiple food or beverage items in a single image. Furthermore, incorporating FCNs and deep residual networks (ResNet) magnifies the efficacy of segmenting food images, presenting a robust method in automated dietary assessments. Notably, Pfisterer et al [<xref ref-type="bibr" rid="ref33">33</xref>] offered insights into the application of deep convolutional EDFN-D in long-term care settings, providing an automated imaging system for quantifying food intake with high precision and objectivity, addressing the existing limitations in these settings [<xref ref-type="bibr" rid="ref33">33</xref>].</p>
      </sec>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <sec>
        <title>Principal Findings</title>
        <p>The increasing intersection of AI with dietary assessment has emerged as a transformative trend, as evidenced by our scoping review. Our literature search revealed 25 pertinent studies published between 2010 and 2023. These studies spanned several nations, diverse demographics, and a spectrum of methodologies. At its core, AI has primarily been used in 3 domains: food detection, nutrient estimation, and food intake prediction. Machine learning models like SVMs and random forests and deep learning models like CNNs have proved instrumental in enhancing the accuracy of food detection and nutrient estimation, often integrated with wearable devices and mobile platforms. Another observation was the use of AI in designing user-friendly interfaces, such as voice-based inputs, to improve adherence to dietary tracking. User experience with AI-based dietary assessment tools varies, but studies indicate generally positive feedback regarding ease of use and convenience. Users appreciate the real-time feedback and reduced burden of manual input. However, there are concerns about accuracy and privacy. Enhanced user training and transparent data privacy policies could improve user trust and interaction with these tools. The collective findings underscore the potential of AI to revolutionize dietary assessment, providing robust accuracy and user-centric solutions. This amalgamation of technology and nutrition research addresses the inherent limitations of traditional methods and charts a path for more personalized, accurate, and real-time dietary assessments in varied settings.</p>
        <p>As illustrated by the reviewed studies, integrating AI into food and nutrient intake assessments showcases a marked advancement over traditional methodologies commonly used in nutritional science [<xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref37">37</xref>]. Historically, methods such as 24-hour recalls, food frequency questionnaires, and dietary records have been the mainstream of dietary assessments [<xref ref-type="bibr" rid="ref2">2</xref>]. While these methods have provided invaluable insights, they have inherent limitations like recall bias, inaccuracies stemming from self-reporting, and the logistical challenges of frequent, detailed data recording [<xref ref-type="bibr" rid="ref3">3</xref>]. The reviewed studies, however, highlighted the significant potential of AI to alleviate some of these concerns. For instance, AI-backed systems such as FRANI have been shown to offer a reliable alternative to weighed records, which, although thorough, can be burdensome for participants [<xref ref-type="bibr" rid="ref37">37</xref>]. Similarly, tools like the GoCARB system automate carbohydrate counting, which, if done manually, demands meticulous attention and can be prone to errors, especially for individuals with conditions like diabetes [<xref ref-type="bibr" rid="ref18">18</xref>].</p>
        <p>Furthermore, the versatility of AI applications across various nutritional assessments is evident from the reviewed literature. For instance, SVMs and random forests, when deployed in monitoring ingestive behaviors, have demonstrated high accuracy in detecting food intake by analyzing nuances such as chews and swallows [<xref ref-type="bibr" rid="ref16">16</xref>]. This level of precision is difficult to attain through manual observation or self-reports. Applying DNNs to recognize food items from images underscores another leap, automating a process that traditionally demands human expertise. Furthermore, the intersection of AI with RGB-D imagery suggests an improved accuracy in nutrient analysis, an area where traditional methods may not always yield precise results [<xref ref-type="bibr" rid="ref38">38</xref>]. However, it is crucial to note the variability in reported accuracy among studies, which underscores the importance of refining methodologies and recognizing the evolving nature of AI applications. Despite this, the current trajectory indicates that AI is poised to bring a paradigm shift in automating dietary assessment, melding accuracy with efficiency [<xref ref-type="bibr" rid="ref36">36</xref>,<xref ref-type="bibr" rid="ref37">37</xref>]. Wearable technology that detects food intake based on chews and swallows offers significant benefits in real-time dietary monitoring, particularly in clinical and research settings. These devices can be integrated with mobile applications and other wearable sensors to provide comprehensive dietary assessments. While continuous camera use may not be practical for all users, advancements in discreet wearable sensors and intermittent image capture can enhance user compliance and accuracy.</p>
        <p>While AI’s promise in food and nutrient intake measurement is evident, its application comes with intrinsic challenges and limitations. The reviewed studies, as well as the broader literature, highlight some consistent concerns. First, the AI models heavily depend on the quality and breadth of training data [<xref ref-type="bibr" rid="ref39">39</xref>]. A model trained on a limited dataset may not recognize diverse food items, particularly those from various global cuisines or those prepared using unique methods [<xref ref-type="bibr" rid="ref40">40</xref>]. This can lead to inaccuracies in nutrient estimation. Common biases include algorithmic biases resulting from non-diverse training datasets that fail to represent global food diversity. In addition, limitations in image-based recognition systems often stem from varying image quality and presentation, which can affect the accuracy of food and nutrient estimations. The variability in food presentation, portion sizes, and the physical environment in which the food is captured (eg, lighting conditions) can pose challenges for image-based recognition systems [<xref ref-type="bibr" rid="ref41">41</xref>,<xref ref-type="bibr" rid="ref42">42</xref>]. Furthermore, while tools like FRANI and GoCARB show promise, they also underscore the current limitations in recognizing mixed dishes or deciphering layered foods with multiple ingredients [<xref ref-type="bibr" rid="ref18">18</xref>,<xref ref-type="bibr" rid="ref37">37</xref>]. It is also worth noting that AI systems while reducing human biases, introduce computational biases that may arise from algorithmic designs or training datasets [<xref ref-type="bibr" rid="ref43">43</xref>,<xref ref-type="bibr" rid="ref44">44</xref>]. These challenges highlight the need for more comprehensive datasets and improved image processing techniques to enhance AI model reliability. Finally, a potential digital divide exists, where populations without access to advanced technology or those not adept at using it might be excluded from AI-based dietary assessments, thereby limiting its universal applicability [<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref46">46</xref>].</p>
        <p>Many AI-based dietary assessment tools rely on dietitians to validate and estimate dietary intake from images due to the complexities involved in accurate food identification and portion size estimation. With the constant addition of new food items, maintaining up-to-date nutrient databases is challenging. Some studies have focused narrowly on estimating energy intake or working with a limited set of foods under controlled conditions, which limits the generalizability of their findings. Future research should focus on developing scalable AI models that can handle a broader range of foods and integrate real-time updates to nutrient databases. In addition, enhancing the collaboration between AI technologies and dietitians can help improve the accuracy and applicability of these tools.</p>
        <p>Current objective methods face significant limitations, including inaccuracies in nutrient composition tables, the complexity of multi-ingredient dishes, and variability in nutrient composition of commercially available foods. In addition, these methods do not account for individual metabolic differences in nutrient processing. Integrating biological sensors with AI technologies could offer a more definitive approach by providing real-time data on circulating nutrients and individual metabolic responses, thereby improving the accuracy of dietary assessments.</p>
        <p>The sequential nature of AI-based dietary assessment introduces cumulative errors, where inaccuracies at each stage—from food detection to nutrient estimation—can compound, leading to significant overall errors. Biological sensors that measure circulating nutrients in real-time offer a promising solution to overcome these limitations, as they provide direct data on nutrient absorption and metabolism, reducing reliance on intermediate estimations and improving overall accuracy.</p>
        <p>Our search strategy, while comprehensive, may not have captured all studies involving AI and dietary assessment. Despite significant advancements, several gaps remain in the application of AI for dietary assessment. Future research should focus on enhancing the diversity of training datasets to reduce algorithmic biases and improve the accuracy of AI models in recognizing a wide variety of food items. In addition, integrating real-time metabolic data with dietary assessments could offer more comprehensive insights into individual nutritional statuses. Among the AI tools evaluated, image-based recognition systems like the GoCARB system are highly effective for carbohydrate counting in diabetes management, while wearable devices monitoring jaw motion offer promising real-time intake data, particularly useful in clinical settings.</p>
        <p>Ethical considerations in AI-based dietary assessment are paramount. Data privacy concerns arise from the extensive personal data required for accurate assessments, necessitating robust security measures and transparent consent processes. Algorithmic biases can lead to inaccuracies and unfair outcomes, highlighting the need for diverse training datasets. In addition, the digital divide poses a significant challenge, as populations without access to advanced technologies may be excluded from the benefits of AI. Addressing these issues requires comprehensive strategies, including inclusive technology design and stringent ethical standards in data handling and algorithm development.</p>
        <p>As AI continues to evolve, there is vast potential for revolutionary enhancements in dietary and nutrient intake measurement. Based on current trajectories in nutrition science and AI advancements, we might anticipate a future where AI systems can recognize food items with high precision and factor in variables like cooking methods, regional variations, and the bioavailability of nutrients. These AI systems could be trained on increasingly diverse datasets, capturing the nuances of global diets and potentially integrating real-time metabolic and physiological data from wearable devices to provide a more comprehensive view of an individual’s nutrient absorption [<xref ref-type="bibr" rid="ref47">47</xref>,<xref ref-type="bibr" rid="ref48">48</xref>]. AI could facilitate large-scale dietary assessment studies on a population level, helping researchers discern dietary patterns, nutrient deficiencies, and even epidemiological correlations faster and more accurately [<xref ref-type="bibr" rid="ref49">49</xref>,<xref ref-type="bibr" rid="ref50">50</xref>]. With the rise of precision nutrition, AI might enable personalized dietary recommendations, considering an individual's genetic, metabolic, and health profile [<xref ref-type="bibr" rid="ref51">51</xref>]. This tailored approach could radically improve disease management, particularly for conditions like diabetes or cardiovascular diseases, where dietary interventions play a pivotal role [<xref ref-type="bibr" rid="ref52">52</xref>].</p>
      </sec>
      <sec>
        <title>Conclusion</title>
        <p>In conclusion, the scoping review highlighted the burgeoning role of AI in advancing the measurement of food and nutrient intakes, with notable advancements in accuracy and efficiency compared to traditional methods. However, while the potential of AI in this domain is substantial, it is imperative to acknowledge its current limitations and areas requiring refinement. As the nexus between nutrition science and technology continues to strengthen, future research must focus on refining AI methodologies, ensuring their applicability across diverse populations, and integrating them into broader nutritional and health studies. This interdisciplinary collaboration promises a future where dietary assessments are accurate and instrumental in shaping individual and public health outcomes.</p>
      </sec>
    </sec>
  </body>
  <back>
    <app-group>
      <supplementary-material id="app1">
        <label>Multimedia Appendix 1</label>
        <p>PRISMA (Preferred Reporting Items for Systematic Reviews and Meta-Analyses) checklist (Note that our paper is a scoping review rather than a systematic review, so some criteria in the checklist may not apply and are thus omitted).</p>
        <media xlink:href="jmir_v26i1e54557_app1.pdf" xlink:title="PDF File  (Adobe PDF File), 80 KB"/>
      </supplementary-material>
      <supplementary-material id="app2">
        <label>Multimedia Appendix 2</label>
        <p>Database search algorithms.</p>
        <media xlink:href="jmir_v26i1e54557_app2.docx" xlink:title="DOCX File , 15 KB"/>
      </supplementary-material>
      <supplementary-material id="app3">
        <label>Multimedia Appendix 3</label>
        <p>Main findings in the studies included in the review.</p>
        <media xlink:href="jmir_v26i1e54557_app3.docx" xlink:title="DOCX File , 21 KB"/>
      </supplementary-material>
    </app-group>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">AI</term>
          <def>
            <p>artificial intelligence</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">AIM</term>
          <def>
            <p>Automatic Ingestion Monitor</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">CNN</term>
          <def>
            <p>convolutional neural network</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb4">DNN</term>
          <def>
            <p>deep neural network</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb5">EDFN-D</term>
          <def>
            <p>encoder-decoder food networks with depth-refinement</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb6">FCN</term>
          <def>
            <p>Fully Convolutional Neural Network</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb7">FRANI</term>
          <def>
            <p>Food Recognition Assistance and Nudging Insights</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb8">GAN</term>
          <def>
            <p>Generative Adversarial Network</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb9">PRISMA</term>
          <def>
            <p>Preferred Reporting Items for Systematic Reviews and Meta-Analyses</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb10">PRISMA-ScR</term>
          <def>
            <p>Preferred Reporting Items for Systematic Reviews and Meta-Analyses extension for Scoping Reviews</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb11">ResNet</term>
          <def>
            <p>residual networks</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb12">RGB-D</term>
          <def>
            <p>Red, Green, Blue-Depth</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb13">SVM</term>
          <def>
            <p>support vector machine</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb14">USDA</term>
          <def>
            <p>US Department of Agriculture</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb15">WR</term>
          <def>
            <p>weighed record</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>This research received no external funding.</p>
    </ack>
    <notes>
      <sec>
        <title>Data Availability</title>
        <p>Data sharing is not applicable to this article as no datasets were generated or analyzed during this study.</p>
      </sec>
    </notes>
    <fn-group>
      <fn fn-type="con">
        <p>JZ and RA contributed to conceptualization. JW contributed to the methodology. JS handled the software. JZ, JW, and JS performed validation. JW and JS conducted the formal analysis. JZ and RA conducted the investigation. RA handled the resources. JW and JS performed data curation. JZ and RA contributed to writing—original draft preparation. JW and JS contributed to writing—review and editing. JW performed visualization. RA performed supervision. RA contributed to project administration.</p>
      </fn>
      <fn fn-type="conflict">
        <p>None declared.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kirkpatrick</surname>
              <given-names>SI</given-names>
            </name>
            <name name-style="western">
              <surname>Collins</surname>
              <given-names>CE</given-names>
            </name>
          </person-group>
          <article-title>Assessment of nutrient intakes: introduction to the special issue</article-title>
          <source>Nutrients</source>
          <year>2016</year>
          <volume>8</volume>
          <issue>4</issue>
          <fpage>184</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=nu8040184"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/nu8040184</pub-id>
          <pub-id pub-id-type="medline">27023598</pub-id>
          <pub-id pub-id-type="pii">nu8040184</pub-id>
          <pub-id pub-id-type="pmcid">PMC4848653</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Shim</surname>
              <given-names>JS</given-names>
            </name>
            <name name-style="western">
              <surname>Oh</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Kim</surname>
              <given-names>HC</given-names>
            </name>
          </person-group>
          <article-title>Dietary assessment methods in epidemiologic studies</article-title>
          <source>Epidemiol Health</source>
          <year>2014</year>
          <volume>36</volume>
          <fpage>e2014009</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/25078382"/>
          </comment>
          <pub-id pub-id-type="doi">10.4178/epih/e2014009</pub-id>
          <pub-id pub-id-type="medline">25078382</pub-id>
          <pub-id pub-id-type="pii">epih/e2014009</pub-id>
          <pub-id pub-id-type="pmcid">PMC4154347</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ravelli</surname>
              <given-names>MN</given-names>
            </name>
            <name name-style="western">
              <surname>Schoeller</surname>
              <given-names>DA</given-names>
            </name>
          </person-group>
          <article-title>Traditional self-reported dietary instruments are prone to inaccuracies and new approaches are needed</article-title>
          <source>Front Nutr</source>
          <year>2020</year>
          <volume>7</volume>
          <fpage>90</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/32719809"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/fnut.2020.00090</pub-id>
          <pub-id pub-id-type="medline">32719809</pub-id>
          <pub-id pub-id-type="pmcid">PMC7350526</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hebert</surname>
              <given-names>J R</given-names>
            </name>
            <name name-style="western">
              <surname>Clemow</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Pbert</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Ockene</surname>
              <given-names>IS</given-names>
            </name>
            <name name-style="western">
              <surname>Ockene</surname>
              <given-names>JK</given-names>
            </name>
          </person-group>
          <article-title>Social desirability bias in dietary self-report may compromise the validity of dietary intake measures</article-title>
          <source>Int J Epidemiol</source>
          <year>1995</year>
          <volume>24</volume>
          <issue>2</issue>
          <fpage>389</fpage>
          <lpage>398</lpage>
          <pub-id pub-id-type="doi">10.1093/ije/24.2.389</pub-id>
          <pub-id pub-id-type="medline">7635601</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Masterton</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Hardman</surname>
              <given-names>CA</given-names>
            </name>
            <name name-style="western">
              <surname>Boyland</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Robinson</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Makin</surname>
              <given-names>HE</given-names>
            </name>
            <name name-style="western">
              <surname>Jones</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Are commonly used lab-based measures of food value and choice predictive of self-reported real-world snacking? An ecological momentary assessment study</article-title>
          <source>Br J Health Psychol</source>
          <year>2023</year>
          <volume>28</volume>
          <issue>1</issue>
          <fpage>237</fpage>
          <lpage>251</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/36000399"/>
          </comment>
          <pub-id pub-id-type="doi">10.1111/bjhp.12622</pub-id>
          <pub-id pub-id-type="medline">36000399</pub-id>
          <pub-id pub-id-type="pmcid">PMC10086796</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Jobarteh</surname>
              <given-names>ML</given-names>
            </name>
            <name name-style="western">
              <surname>McCrory</surname>
              <given-names>MA</given-names>
            </name>
            <name name-style="western">
              <surname>Lo</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Sun</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Sazonov</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Anderson</surname>
              <given-names>AK</given-names>
            </name>
            <name name-style="western">
              <surname>Jia</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Maitland</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Qiu</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Steiner-Asiedu</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Higgins</surname>
              <given-names>JA</given-names>
            </name>
            <name name-style="western">
              <surname>Baranowski</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Olupot-Olupot</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Frost</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Development and validation of an objective, passive dietary assessment method for estimating food and nutrient intake in households in low- and middle-income countries: a study protocol</article-title>
          <source>Curr Dev Nutr</source>
          <year>2020</year>
          <volume>4</volume>
          <issue>2</issue>
          <fpage>nzaa020</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S2475-2991(22)11981-5"/>
          </comment>
          <pub-id pub-id-type="doi">10.1093/cdn/nzaa020</pub-id>
          <pub-id pub-id-type="medline">32099953</pub-id>
          <pub-id pub-id-type="pii">S2475-2991(22)11981-5</pub-id>
          <pub-id pub-id-type="pmcid">PMC7031207</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Oliveira Chaves</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Gomes Domingos</surname>
              <given-names>AL</given-names>
            </name>
            <name name-style="western">
              <surname>Louzada Fernandes</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Ribeiro Cerqueira</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Siqueira-Batista</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Bressan</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Applicability of machine learning techniques in food intake assessment: a systematic review</article-title>
          <source>Crit Rev Food Sci Nutr</source>
          <year>2023</year>
          <volume>63</volume>
          <issue>7</issue>
          <fpage>902</fpage>
          <lpage>919</lpage>
          <pub-id pub-id-type="doi">10.1080/10408398.2021.1956425</pub-id>
          <pub-id pub-id-type="medline">34323627</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Collins</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Dennehy</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Conboy</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Mikalef</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Artificial intelligence in information systems research: a systematic literature review and research agenda</article-title>
          <source>International Journal of Information Management</source>
          <year>2021</year>
          <volume>60</volume>
          <fpage>102383</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1016/j.ijinfomgt.2021.102383"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.ijinfomgt.2021.102383</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Schork</surname>
              <given-names>NJ</given-names>
            </name>
          </person-group>
          <article-title>Artificial intelligence and personalized medicine</article-title>
          <source>Cancer Treat Res</source>
          <year>2019</year>
          <volume>178</volume>
          <fpage>265</fpage>
          <lpage>283</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/31209850"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/978-3-030-16391-4_11</pub-id>
          <pub-id pub-id-type="medline">31209850</pub-id>
          <pub-id pub-id-type="pmcid">PMC7580505</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hosny</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Parmar</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Quackenbush</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Schwartz</surname>
              <given-names>LH</given-names>
            </name>
            <name name-style="western">
              <surname>Aerts</surname>
              <given-names>HJWL</given-names>
            </name>
          </person-group>
          <article-title>Artificial intelligence in radiology</article-title>
          <source>Nat Rev Cancer</source>
          <year>2018</year>
          <volume>18</volume>
          <issue>8</issue>
          <fpage>500</fpage>
          <lpage>510</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/29777175"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41568-018-0016-5</pub-id>
          <pub-id pub-id-type="medline">29777175</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41568-018-0016-5</pub-id>
          <pub-id pub-id-type="pmcid">PMC6268174</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sudo</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Murasaki</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Kinebuchi</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Kimura</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Waki</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Machine learning-based screening of healthy meals from image analysis: system development and pilot study</article-title>
          <source>JMIR Form Res</source>
          <year>2020</year>
          <volume>4</volume>
          <issue>10</issue>
          <fpage>e18507</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://formative.jmir.org/2020/10/e18507/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/18507</pub-id>
          <pub-id pub-id-type="medline">33104010</pub-id>
          <pub-id pub-id-type="pii">v4i10e18507</pub-id>
          <pub-id pub-id-type="pmcid">PMC7652690</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Fang</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Shao</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Kerr</surname>
              <given-names>DA</given-names>
            </name>
            <name name-style="western">
              <surname>Boushey</surname>
              <given-names>CJ</given-names>
            </name>
            <name name-style="western">
              <surname>Zhu</surname>
              <given-names>F</given-names>
            </name>
          </person-group>
          <article-title>An end-to-end image-based automatic food energy estimation technique based on learned energy distribution images: protocol and methodology</article-title>
          <source>Nutrients</source>
          <year>2019</year>
          <volume>11</volume>
          <issue>4</issue>
          <fpage>877</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=nu11040877"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/nu11040877</pub-id>
          <pub-id pub-id-type="medline">31003547</pub-id>
          <pub-id pub-id-type="pii">nu11040877</pub-id>
          <pub-id pub-id-type="pmcid">PMC6521161</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Folson</surname>
              <given-names>GK</given-names>
            </name>
            <name name-style="western">
              <surname>Bannerman</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Atadze</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Ador</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Kolt</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>McCloskey</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Gangupantulu</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Arrieta</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Braga</surname>
              <given-names>BC</given-names>
            </name>
            <name name-style="western">
              <surname>Arsenault</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Kehs</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Doyle</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Tran</surname>
              <given-names>LM</given-names>
            </name>
            <name name-style="western">
              <surname>Hoang</surname>
              <given-names>NT</given-names>
            </name>
            <name name-style="western">
              <surname>Hughes</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Nguyen</surname>
              <given-names>PH</given-names>
            </name>
            <name name-style="western">
              <surname>Gelli</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Validation of mobile artificial intelligence technology-assisted dietary assessment tool against weighed records and 24-Hour recall in adolescent females in Ghana</article-title>
          <source>J Nutr</source>
          <year>2023</year>
          <volume>153</volume>
          <issue>8</issue>
          <fpage>2328</fpage>
          <lpage>2338</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S0022-3166(23)72406-1"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.tjnut.2023.06.001</pub-id>
          <pub-id pub-id-type="medline">37276939</pub-id>
          <pub-id pub-id-type="pii">S0022-3166(23)72406-1</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lu</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Stathopoulou</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Vasiloglou</surname>
              <given-names>MF</given-names>
            </name>
            <name name-style="western">
              <surname>Christodoulidis</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Stanga</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Mougiakakou</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>An artificial intelligence-based system to assess nutrient intake for hospitalised patients</article-title>
          <source>IEEE Trans. Multimedia</source>
          <year>2021</year>
          <volume>23</volume>
          <fpage>1136</fpage>
          <lpage>1147</lpage>
          <pub-id pub-id-type="doi">10.1109/tmm.2020.2993948</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Tricco</surname>
              <given-names>AC</given-names>
            </name>
            <name name-style="western">
              <surname>Lillie</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Zarin</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>O'Brien</surname>
              <given-names>KK</given-names>
            </name>
            <name name-style="western">
              <surname>Colquhoun</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Levac</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Moher</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Peters</surname>
              <given-names>MDJ</given-names>
            </name>
            <name name-style="western">
              <surname>Horsley</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Weeks</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Hempel</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Akl</surname>
              <given-names>EA</given-names>
            </name>
            <name name-style="western">
              <surname>Chang</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>McGowan</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Stewart</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Hartling</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Aldcroft</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Wilson</surname>
              <given-names>MG</given-names>
            </name>
            <name name-style="western">
              <surname>Garritty</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Lewin</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Godfrey</surname>
              <given-names>CM</given-names>
            </name>
            <name name-style="western">
              <surname>Macdonald</surname>
              <given-names>MT</given-names>
            </name>
            <name name-style="western">
              <surname>Langlois</surname>
              <given-names>EV</given-names>
            </name>
            <name name-style="western">
              <surname>Soares-Weiser</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Moriarty</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Clifford</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Tunçalp</surname>
              <given-names>Ö</given-names>
            </name>
            <name name-style="western">
              <surname>Straus</surname>
              <given-names>SE</given-names>
            </name>
          </person-group>
          <article-title>PRISMA extension for scoping reviews (PRISMA-ScR): checklist and explanation</article-title>
          <source>Ann Intern Med</source>
          <year>2018</year>
          <volume>169</volume>
          <issue>7</issue>
          <fpage>467</fpage>
          <lpage>473</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.acpjournals.org/doi/abs/10.7326/M18-0850?url_ver=Z39.88-2003&#38;rfr_id=ori:rid:crossref.org&#38;rfr_dat=cr_pub  0pubmed"/>
          </comment>
          <pub-id pub-id-type="doi">10.7326/M18-0850</pub-id>
          <pub-id pub-id-type="medline">30178033</pub-id>
          <pub-id pub-id-type="pii">2700389</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lopez-Meyer</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Schuckers</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Makeyev</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Sazonov</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>Detection of periods of food intake using Support Vector Machines</article-title>
          <source>Annu Int Conf IEEE Eng Med Biol Soc</source>
          <year>2010</year>
          <volume>2010</volume>
          <fpage>1004</fpage>
          <lpage>1007</lpage>
          <pub-id pub-id-type="doi">10.1109/IEMBS.2010.5627796</pub-id>
          <pub-id pub-id-type="medline">21096991</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Fontana</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Farooq</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Sazonov</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>Estimation of feature importance for food intake detection based on random forests classification</article-title>
          <source>Annu Int Conf IEEE Eng Med Biol Soc</source>
          <year>2013</year>
          <volume>2013</volume>
          <fpage>6756</fpage>
          <lpage>6759</lpage>
          <pub-id pub-id-type="doi">10.1109/EMBC.2013.6611107</pub-id>
          <pub-id pub-id-type="medline">24111294</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Anthimopoulos</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Dehais</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Shevchik</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Ransford</surname>
              <given-names>BH</given-names>
            </name>
            <name name-style="western">
              <surname>Duke</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Diem</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Mougiakakou</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Computer vision-based carbohydrate estimation for type 1 patients with diabetes using smartphones</article-title>
          <source>J Diabetes Sci Technol</source>
          <year>2015</year>
          <volume>9</volume>
          <issue>3</issue>
          <fpage>507</fpage>
          <lpage>515</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/25883163"/>
          </comment>
          <pub-id pub-id-type="doi">10.1177/1932296815580159</pub-id>
          <pub-id pub-id-type="medline">25883163</pub-id>
          <pub-id pub-id-type="pii">1932296815580159</pub-id>
          <pub-id pub-id-type="pmcid">PMC4604531</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Farooq</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Sazonov</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>A novel wearable device for food intake and physical activity recognition</article-title>
          <source>Sensors (Basel)</source>
          <year>2016</year>
          <volume>16</volume>
          <issue>7</issue>
          <fpage>1067</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s16071067"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s16071067</pub-id>
          <pub-id pub-id-type="medline">27409622</pub-id>
          <pub-id pub-id-type="pii">s16071067</pub-id>
          <pub-id pub-id-type="pmcid">PMC4970114</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hezarjaribi</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Reynolds</surname>
              <given-names>CA</given-names>
            </name>
            <name name-style="western">
              <surname>Miller</surname>
              <given-names>DT</given-names>
            </name>
            <name name-style="western">
              <surname>Chaytor</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Ghasemzadeh</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>S2NI: a mobile platform for nutrition monitoring from spoken data</article-title>
          <source>Annu Int Conf IEEE Eng Med Biol Soc</source>
          <year>2016</year>
          <volume>2016</volume>
          <fpage>1991</fpage>
          <lpage>1994</lpage>
          <pub-id pub-id-type="doi">10.1109/EMBC.2016.7591115</pub-id>
          <pub-id pub-id-type="medline">28268720</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Goldstein</surname>
              <given-names>SP</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Thomas</surname>
              <given-names>JG</given-names>
            </name>
            <name name-style="western">
              <surname>Butryn</surname>
              <given-names>ML</given-names>
            </name>
            <name name-style="western">
              <surname>Herbert</surname>
              <given-names>JD</given-names>
            </name>
            <name name-style="western">
              <surname>Forman</surname>
              <given-names>EM</given-names>
            </name>
          </person-group>
          <article-title>Application of machine learning to predict dietary lapses during weight loss</article-title>
          <source>J Diabetes Sci Technol</source>
          <year>2018</year>
          <volume>12</volume>
          <issue>5</issue>
          <fpage>1045</fpage>
          <lpage>1052</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/29792067"/>
          </comment>
          <pub-id pub-id-type="doi">10.1177/1932296818775757</pub-id>
          <pub-id pub-id-type="medline">29792067</pub-id>
          <pub-id pub-id-type="pmcid">PMC6134608</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hezarjaribi</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Mazrouee</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Ghasemzadeh</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Speech2Health: a mobile framework for monitoring dietary composition from spoken data</article-title>
          <source>IEEE J Biomed Health Inform</source>
          <year>2018</year>
          <volume>22</volume>
          <issue>1</issue>
          <fpage>252</fpage>
          <lpage>264</lpage>
          <pub-id pub-id-type="doi">10.1109/JBHI.2017.2709333</pub-id>
          <pub-id pub-id-type="medline">29300701</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lu</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Stathopoulou</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Vasiloglou</surname>
              <given-names>MF</given-names>
            </name>
            <name name-style="western">
              <surname>Christodoulidis</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Blum</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Walser</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Meier</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Stanga</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Mougiakakou</surname>
              <given-names>SG</given-names>
            </name>
          </person-group>
          <article-title>An artificial intelligence-based system for nutrient intake assessment of hospitalised patients</article-title>
          <source>Annu Int Conf IEEE Eng Med Biol Soc</source>
          <year>2019</year>
          <volume>2019</volume>
          <fpage>5696</fpage>
          <lpage>5699</lpage>
          <pub-id pub-id-type="doi">10.1109/EMBC.2019.8856889</pub-id>
          <pub-id pub-id-type="medline">31947145</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Jia</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Qu</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Baranowski</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Burke</surname>
              <given-names>LE</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Bai</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Mancino</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Xu</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Mao</surname>
              <given-names>ZH</given-names>
            </name>
            <name name-style="western">
              <surname>Sun</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Automatic food detection in egocentric images using artificial intelligence technology</article-title>
          <source>Public Health Nutr</source>
          <year>2019</year>
          <volume>22</volume>
          <issue>7</issue>
          <fpage>1168</fpage>
          <lpage>1179</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/29576027"/>
          </comment>
          <pub-id pub-id-type="doi">10.1017/S1368980018000538</pub-id>
          <pub-id pub-id-type="medline">29576027</pub-id>
          <pub-id pub-id-type="pii">S1368980018000538</pub-id>
          <pub-id pub-id-type="pmcid">PMC8114337</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Chin</surname>
              <given-names>EL</given-names>
            </name>
            <name name-style="western">
              <surname>Simmons</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Bouzid</surname>
              <given-names>YY</given-names>
            </name>
            <name name-style="western">
              <surname>Kan</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Burnett</surname>
              <given-names>DJ</given-names>
            </name>
            <name name-style="western">
              <surname>Tagkopoulos</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Lemay</surname>
              <given-names>DG</given-names>
            </name>
          </person-group>
          <article-title>Nutrient estimation from 24-hour food recalls using machine learning and database mapping: a case study with lactose</article-title>
          <source>Nutrients</source>
          <year>2019</year>
          <volume>11</volume>
          <issue>12</issue>
          <fpage>3045</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=nu11123045"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/nu11123045</pub-id>
          <pub-id pub-id-type="medline">31847188</pub-id>
          <pub-id pub-id-type="pii">nu11123045</pub-id>
          <pub-id pub-id-type="pmcid">PMC6950225</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref26">
        <label>26</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Farooq</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Doulah</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Parton</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>McCrory</surname>
              <given-names>MA</given-names>
            </name>
            <name name-style="western">
              <surname>Higgins</surname>
              <given-names>JA</given-names>
            </name>
            <name name-style="western">
              <surname>Sazonov</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>Validation of sensor-based food intake detection by multicamera video observation in an unconstrained environment</article-title>
          <source>Nutrients</source>
          <year>2019</year>
          <volume>11</volume>
          <issue>3</issue>
          <fpage>609</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=nu11030609"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/nu11030609</pub-id>
          <pub-id pub-id-type="medline">30871173</pub-id>
          <pub-id pub-id-type="pii">nu11030609</pub-id>
          <pub-id pub-id-type="pmcid">PMC6472006</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref27">
        <label>27</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Heremans</surname>
              <given-names>ERM</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>AS</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Cheng</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Xu</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Martinez</surname>
              <given-names>AE</given-names>
            </name>
            <name name-style="western">
              <surname>Lazaridis</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Van Huffel</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>JDZ</given-names>
            </name>
          </person-group>
          <article-title>Artificial neural network-based automatic detection of food intake for neuromodulation in treating obesity and diabetes</article-title>
          <source>Obes Surg</source>
          <year>2020</year>
          <volume>30</volume>
          <issue>7</issue>
          <fpage>2547</fpage>
          <lpage>2557</lpage>
          <pub-id pub-id-type="doi">10.1007/s11695-020-04511-6</pub-id>
          <pub-id pub-id-type="medline">32103435</pub-id>
          <pub-id pub-id-type="pii">10.1007/s11695-020-04511-6</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref28">
        <label>28</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mezgec</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Koroušić Seljak</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <article-title>Deep neural networks for image-based dietary assessment</article-title>
          <source>J Vis Exp</source>
          <year>2021</year>
          <issue>169</issue>
          <fpage>e61906</fpage>
          <pub-id pub-id-type="doi">10.3791/61906</pub-id>
          <pub-id pub-id-type="medline">33779595</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref29">
        <label>29</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Papathanail</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Brühlmann</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Vasiloglou</surname>
              <given-names>MF</given-names>
            </name>
            <name name-style="western">
              <surname>Stathopoulou</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Exadaktylos</surname>
              <given-names>AK</given-names>
            </name>
            <name name-style="western">
              <surname>Stanga</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Münzer</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Mougiakakou</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Evaluation of a novel artificial intelligence system to monitor and assess energy and macronutrient intake in hospitalised older patients</article-title>
          <source>Nutrients</source>
          <year>2021</year>
          <volume>13</volume>
          <issue>12</issue>
          <fpage>4539</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://boris.unibe.ch/id/eprint/163630"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/nu13124539</pub-id>
          <pub-id pub-id-type="medline">34960091</pub-id>
          <pub-id pub-id-type="pii">nu13124539</pub-id>
          <pub-id pub-id-type="pmcid">PMC8706142</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref30">
        <label>30</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Taylor</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Korpusik</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Das</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Gilhooly</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Simpson</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Glass</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Roberts</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Use of natural spoken language with automated mapping of self-reported food intake to food composition data for low-burden real-time dietary assessment: method comparison study</article-title>
          <source>J Med Internet Res</source>
          <year>2021</year>
          <volume>23</volume>
          <issue>12</issue>
          <fpage>e26988</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.jmir.org/2021/12/e26988/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/26988</pub-id>
          <pub-id pub-id-type="medline">34874885</pub-id>
          <pub-id pub-id-type="pii">v23i12e26988</pub-id>
          <pub-id pub-id-type="pmcid">PMC8691405</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref31">
        <label>31</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ghosh</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Sazonov</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>A comparative study of deep learning algorithms for detecting food intake</article-title>
          <source>Annu Int Conf IEEE Eng Med Biol Soc</source>
          <year>2022</year>
          <volume>2022</volume>
          <fpage>2993</fpage>
          <lpage>2996</lpage>
          <pub-id pub-id-type="doi">10.1109/EMBC48229.2022.9871278</pub-id>
          <pub-id pub-id-type="medline">36085821</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref32">
        <label>32</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Van Wymelbeke-Delannoy</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Juhel</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Bole</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Sow</surname>
              <given-names>AK</given-names>
            </name>
            <name name-style="western">
              <surname>Guyot</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Belbaghdadi</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Brousse</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Paindavoine</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>A cross-sectional reproducibility study of a standard camera sensor using artificial intelligence to assess food items: the foodIntech project</article-title>
          <source>Nutrients</source>
          <year>2022</year>
          <volume>14</volume>
          <issue>1</issue>
          <fpage>221</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=nu14010221"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/nu14010221</pub-id>
          <pub-id pub-id-type="medline">35011096</pub-id>
          <pub-id pub-id-type="pii">nu14010221</pub-id>
          <pub-id pub-id-type="pmcid">PMC8747564</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref33">
        <label>33</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pfisterer</surname>
              <given-names>KJ</given-names>
            </name>
            <name name-style="western">
              <surname>Amelard</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Chung</surname>
              <given-names>AG</given-names>
            </name>
            <name name-style="western">
              <surname>Syrnyk</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>MacLean</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Keller</surname>
              <given-names>HH</given-names>
            </name>
            <name name-style="western">
              <surname>Wong</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Automated food intake tracking requires depth-refined semantic segmentation to rectify visual-volume discordance in long-term care homes</article-title>
          <source>Sci Rep</source>
          <year>2022</year>
          <volume>12</volume>
          <issue>1</issue>
          <fpage>83</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1038/s41598-021-03972-8"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41598-021-03972-8</pub-id>
          <pub-id pub-id-type="medline">34997022</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41598-021-03972-8</pub-id>
          <pub-id pub-id-type="pmcid">PMC8742067</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref34">
        <label>34</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pedersen</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Diaz</surname>
              <given-names>LJ</given-names>
            </name>
            <name name-style="western">
              <surname>Clemmensen</surname>
              <given-names>KKB</given-names>
            </name>
            <name name-style="western">
              <surname>Jensen</surname>
              <given-names>MM</given-names>
            </name>
            <name name-style="western">
              <surname>Jørgensen</surname>
              <given-names>ME</given-names>
            </name>
            <name name-style="western">
              <surname>Finlayson</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Quist</surname>
              <given-names>JS</given-names>
            </name>
            <name name-style="western">
              <surname>Vistisen</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Færch</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Predicting food intake from food reward and biometric responses to food cues in adults with normal weight using machine learning</article-title>
          <source>J Nutr</source>
          <year>2022</year>
          <volume>152</volume>
          <issue>6</issue>
          <fpage>1574</fpage>
          <lpage>1581</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S0022-3166(22)00652-6"/>
          </comment>
          <pub-id pub-id-type="doi">10.1093/jn/nxac053</pub-id>
          <pub-id pub-id-type="medline">35325189</pub-id>
          <pub-id pub-id-type="pii">S0022-3166(22)00652-6</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref35">
        <label>35</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Siy Van</surname>
              <given-names>VT</given-names>
            </name>
            <name name-style="western">
              <surname>Antonio</surname>
              <given-names>VA</given-names>
            </name>
            <name name-style="western">
              <surname>Siguin</surname>
              <given-names>CP</given-names>
            </name>
            <name name-style="western">
              <surname>Gordoncillo</surname>
              <given-names>NP</given-names>
            </name>
            <name name-style="western">
              <surname>Sescon</surname>
              <given-names>JT</given-names>
            </name>
            <name name-style="western">
              <surname>Go</surname>
              <given-names>CC</given-names>
            </name>
            <name name-style="western">
              <surname>Miro</surname>
              <given-names>EP</given-names>
            </name>
          </person-group>
          <article-title>Predicting undernutrition among elementary schoolchildren in the Philippines using machine learning algorithms</article-title>
          <source>Nutrition</source>
          <year>2022</year>
          <volume>96</volume>
          <fpage>111571</fpage>
          <pub-id pub-id-type="doi">10.1016/j.nut.2021.111571</pub-id>
          <pub-id pub-id-type="medline">35063809</pub-id>
          <pub-id pub-id-type="pii">S0899-9007(21)00433-0</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref36">
        <label>36</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Granal</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Slimani</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Florens</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Sens</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Pelletier</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Pszczolinski</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Casiez</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Kalbacher</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Jolivot</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Dubourg</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Lemoine</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Pasian</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Ducher</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Fauvel</surname>
              <given-names>JP</given-names>
            </name>
          </person-group>
          <article-title>Prediction tool to estimate potassium diet in chronic kidney disease patients developed using a machine learning tool: the univerSel study</article-title>
          <source>Nutrients</source>
          <year>2022</year>
          <volume>14</volume>
          <issue>12</issue>
          <fpage>2419</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=nu14122419"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/nu14122419</pub-id>
          <pub-id pub-id-type="medline">35745151</pub-id>
          <pub-id pub-id-type="pii">nu14122419</pub-id>
          <pub-id pub-id-type="pmcid">PMC9228360</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref37">
        <label>37</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Nguyen</surname>
              <given-names>PH</given-names>
            </name>
            <name name-style="western">
              <surname>Tran</surname>
              <given-names>LM</given-names>
            </name>
            <name name-style="western">
              <surname>Hoang</surname>
              <given-names>NT</given-names>
            </name>
            <name name-style="western">
              <surname>Trương</surname>
              <given-names>DTT</given-names>
            </name>
            <name name-style="western">
              <surname>Tran</surname>
              <given-names>THT</given-names>
            </name>
            <name name-style="western">
              <surname>Huynh</surname>
              <given-names>PN</given-names>
            </name>
            <name name-style="western">
              <surname>Koch</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>McCloskey</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Gangupantulu</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Folson</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Bannerman</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Arrieta</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Braga</surname>
              <given-names>BC</given-names>
            </name>
            <name name-style="western">
              <surname>Arsenault</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Kehs</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Doyle</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Hughes</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Gelli</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Relative validity of a mobile AI-technology-assisted dietary assessment in adolescent females in Vietnam</article-title>
          <source>Am J Clin Nutr</source>
          <year>2022</year>
          <volume>116</volume>
          <issue>4</issue>
          <fpage>992</fpage>
          <lpage>1001</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S0002-9165(23)03617-1"/>
          </comment>
          <pub-id pub-id-type="doi">10.1093/ajcn/nqac216</pub-id>
          <pub-id pub-id-type="medline">35945309</pub-id>
          <pub-id pub-id-type="pii">S0002-9165(23)03617-1</pub-id>
          <pub-id pub-id-type="pmcid">PMC9535545</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref38">
        <label>38</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Shao</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Min</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Hou</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Luo</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Zheng</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Jiang</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Vision-based food nutrition estimation via RGB-D fusion network</article-title>
          <source>Food Chem</source>
          <year>2023</year>
          <volume>424</volume>
          <fpage>136309</fpage>
          <pub-id pub-id-type="doi">10.1016/j.foodchem.2023.136309</pub-id>
          <pub-id pub-id-type="medline">37207601</pub-id>
          <pub-id pub-id-type="pii">S0308-8146(23)00927-5</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref39">
        <label>39</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Aldoseri</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Al-Khalifa</surname>
              <given-names>KN</given-names>
            </name>
            <name name-style="western">
              <surname>Hamouda</surname>
              <given-names>AM</given-names>
            </name>
          </person-group>
          <article-title>Re-Thinking data strategy and integration for artificial intelligence: concepts, opportunities, and challenges</article-title>
          <source>Applied Sciences</source>
          <year>2023</year>
          <volume>13</volume>
          <issue>12</issue>
          <fpage>7082</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.3390/app13127082"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/app13127082</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref40">
        <label>40</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mezgec</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Eftimov</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Bucher</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Koroušić Seljak</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <article-title>Mixed deep learning and natural language processing method for fake-food image recognition and standardization to help automated dietary assessment</article-title>
          <source>Public Health Nutr</source>
          <year>2019</year>
          <volume>22</volume>
          <issue>7</issue>
          <fpage>1193</fpage>
          <lpage>1202</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/29623869"/>
          </comment>
          <pub-id pub-id-type="doi">10.1017/S1368980018000708</pub-id>
          <pub-id pub-id-type="medline">29623869</pub-id>
          <pub-id pub-id-type="pii">S1368980018000708</pub-id>
          <pub-id pub-id-type="pmcid">PMC6536832</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref41">
        <label>41</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Allegra</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Battiato</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Ortis</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Urso</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Polosa</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>A review on food recognition technology for health applications</article-title>
          <source>Health Psychol Res</source>
          <year>2020</year>
          <volume>8</volume>
          <issue>3</issue>
          <fpage>9297</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/33553793"/>
          </comment>
          <pub-id pub-id-type="doi">10.4081/hpr.2020.9297</pub-id>
          <pub-id pub-id-type="medline">33553793</pub-id>
          <pub-id pub-id-type="pmcid">PMC7859960</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref42">
        <label>42</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Tahir</surname>
              <given-names>GA</given-names>
            </name>
            <name name-style="western">
              <surname>Loo</surname>
              <given-names>CK</given-names>
            </name>
          </person-group>
          <article-title>A comprehensive survey of image-based food recognition and volume estimation methods for dietary assessment</article-title>
          <source>Healthcare (Basel)</source>
          <year>2021</year>
          <volume>9</volume>
          <issue>12</issue>
          <fpage>1676</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=healthcare9121676"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/healthcare9121676</pub-id>
          <pub-id pub-id-type="medline">34946400</pub-id>
          <pub-id pub-id-type="pii">healthcare9121676</pub-id>
          <pub-id pub-id-type="pmcid">PMC8700885</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref43">
        <label>43</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Detopoulou</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Voulgaridou</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Moschos</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Levidi</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Anastasiou</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Dedes</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Diplari</surname>
              <given-names>EM</given-names>
            </name>
            <name name-style="western">
              <surname>Fourfouri</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Giaginis</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Panoutsopoulos</surname>
              <given-names>GI</given-names>
            </name>
            <name name-style="western">
              <surname>Papadopoulou</surname>
              <given-names>SK</given-names>
            </name>
          </person-group>
          <article-title>Artificial intelligence, nutrition, and ethical issues: a mini-review</article-title>
          <source>Clinical Nutrition Open Science</source>
          <year>2023</year>
          <volume>50</volume>
          <fpage>46</fpage>
          <lpage>56</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1016/j.nutos.2023.07.001"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.nutos.2023.07.001</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref44">
        <label>44</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Belenguer</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <article-title>AI bias: exploring discriminatory algorithmic decision-making models and the application of possible machine-centric solutions adapted from the pharmaceutical industry</article-title>
          <source>AI Ethics</source>
          <year>2022</year>
          <volume>2</volume>
          <issue>4</issue>
          <fpage>771</fpage>
          <lpage>787</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/35194591"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s43681-022-00138-8</pub-id>
          <pub-id pub-id-type="medline">35194591</pub-id>
          <pub-id pub-id-type="pii">138</pub-id>
          <pub-id pub-id-type="pmcid">PMC8830968</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref45">
        <label>45</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Papathanail</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Abdur Rahman</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Brigato</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Bez</surname>
              <given-names>NS</given-names>
            </name>
            <name name-style="western">
              <surname>Vasiloglou</surname>
              <given-names>MF</given-names>
            </name>
            <name name-style="western">
              <surname>van der Horst</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Mougiakakou</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>The nutritional content of meal images in free-living conditions-automatic assessment with goFOOD</article-title>
          <source>Nutrients</source>
          <year>2023</year>
          <volume>15</volume>
          <issue>17</issue>
          <fpage>3835</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://boris.unibe.ch/id/eprint/186190"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/nu15173835</pub-id>
          <pub-id pub-id-type="medline">37686866</pub-id>
          <pub-id pub-id-type="pii">nu15173835</pub-id>
          <pub-id pub-id-type="pmcid">PMC10490087</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref46">
        <label>46</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zhou</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Yang</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Research on the influence mechanism and governance mechanism of digital divide for the elderly on wisdom healthcare: the role of artificial intelligence and big data</article-title>
          <source>Front Public Health</source>
          <year>2022</year>
          <volume>10</volume>
          <fpage>837238</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/36062111"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/fpubh.2022.837238</pub-id>
          <pub-id pub-id-type="medline">36062111</pub-id>
          <pub-id pub-id-type="pmcid">PMC9428348</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref47">
        <label>47</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Romero-Tapiador</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Lacruz-Pleguezuelos</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Tolosana</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Freixer</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Daza</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Fernández-Díaz</surname>
              <given-names>CM</given-names>
            </name>
            <name name-style="western">
              <surname>Aguilar-Aguilar</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Fernández-Cabezas</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Cruz-Gil</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Molina</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Crespo</surname>
              <given-names>MC</given-names>
            </name>
            <name name-style="western">
              <surname>Laguna</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Marcos-Zambrano</surname>
              <given-names>LJ</given-names>
            </name>
            <name name-style="western">
              <surname>Vera-Rodriguez</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Fierrez</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ramírez de Molina</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Ortega-Garcia</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Espinosa-Salinas</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Morales</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Carrillo de Santa Pau</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>AI4FoodDB: a database for personalized e-Health nutrition and lifestyle through wearable devices and artificial intelligence</article-title>
          <source>Database (Oxford)</source>
          <year>2023</year>
          <volume>2023</volume>
          <fpage>baad049</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/37465917"/>
          </comment>
          <pub-id pub-id-type="doi">10.1093/database/baad049</pub-id>
          <pub-id pub-id-type="medline">37465917</pub-id>
          <pub-id pub-id-type="pii">7226275</pub-id>
          <pub-id pub-id-type="pmcid">PMC10354505</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref48">
        <label>48</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Shei</surname>
              <given-names>RJ</given-names>
            </name>
            <name name-style="western">
              <surname>Holder</surname>
              <given-names>IG</given-names>
            </name>
            <name name-style="western">
              <surname>Oumsang</surname>
              <given-names>AS</given-names>
            </name>
            <name name-style="western">
              <surname>Paris</surname>
              <given-names>BA</given-names>
            </name>
            <name name-style="western">
              <surname>Paris</surname>
              <given-names>HL</given-names>
            </name>
          </person-group>
          <article-title>Wearable activity trackers-advanced technology or advanced marketing?</article-title>
          <source>Eur J Appl Physiol</source>
          <year>2022</year>
          <volume>122</volume>
          <issue>9</issue>
          <fpage>1975</fpage>
          <lpage>1990</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/35445837"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s00421-022-04951-1</pub-id>
          <pub-id pub-id-type="medline">35445837</pub-id>
          <pub-id pub-id-type="pii">10.1007/s00421-022-04951-1</pub-id>
          <pub-id pub-id-type="pmcid">PMC9022022</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref49">
        <label>49</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sak</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Suchodolska</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Artificial intelligence in nutrients science research: a review</article-title>
          <source>Nutrients</source>
          <year>2021</year>
          <volume>13</volume>
          <issue>2</issue>
          <fpage>322</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=nu13020322"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/nu13020322</pub-id>
          <pub-id pub-id-type="medline">33499405</pub-id>
          <pub-id pub-id-type="pii">nu13020322</pub-id>
          <pub-id pub-id-type="pmcid">PMC7911928</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref50">
        <label>50</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kirk</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Kok</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Tufano</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Tekinerdogan</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Feskens</surname>
              <given-names>EJM</given-names>
            </name>
            <name name-style="western">
              <surname>Camps</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Machine learning in nutrition research</article-title>
          <source>Adv Nutr</source>
          <year>2022</year>
          <volume>13</volume>
          <issue>6</issue>
          <fpage>2573</fpage>
          <lpage>2589</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S2161-8313(23)00092-3"/>
          </comment>
          <pub-id pub-id-type="doi">10.1093/advances/nmac103</pub-id>
          <pub-id pub-id-type="medline">36166846</pub-id>
          <pub-id pub-id-type="pii">S2161-8313(23)00092-3</pub-id>
          <pub-id pub-id-type="pmcid">PMC9776646</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref51">
        <label>51</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>de Toro-Martín</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Arsenault</surname>
              <given-names>BJ</given-names>
            </name>
            <name name-style="western">
              <surname>Després</surname>
              <given-names>JP</given-names>
            </name>
            <name name-style="western">
              <surname>Vohl</surname>
              <given-names>MC</given-names>
            </name>
          </person-group>
          <article-title>Precision nutrition: a review of personalized nutritional approaches for the prevention and management of metabolic syndrome</article-title>
          <source>Nutrients</source>
          <year>2017</year>
          <volume>9</volume>
          <issue>8</issue>
          <fpage>913</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=nu9080913"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/nu9080913</pub-id>
          <pub-id pub-id-type="medline">28829397</pub-id>
          <pub-id pub-id-type="pii">nu9080913</pub-id>
          <pub-id pub-id-type="pmcid">PMC5579706</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref52">
        <label>52</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Livingstone</surname>
              <given-names>KM</given-names>
            </name>
            <name name-style="western">
              <surname>Ramos-Lopez</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Pérusse</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Kato</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Ordovas</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Martínez</surname>
              <given-names>JA</given-names>
            </name>
          </person-group>
          <article-title>Precision nutrition: a review of current approaches and future endeavors</article-title>
          <source>Trends in Food Science &#38; Technology</source>
          <year>2022</year>
          <volume>128</volume>
          <fpage>253</fpage>
          <lpage>264</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1016/j.tifs.2022.08.017"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.tifs.2022.08.017</pub-id>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
