<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JMIR</journal-id>
      <journal-id journal-id-type="nlm-ta">J Med Internet Res</journal-id>
      <journal-title>Journal of Medical Internet Research</journal-title>
      <issn pub-type="epub">1438-8871</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
    <article-id pub-id-type="publisher-id">v20i11e11144</article-id>
    <article-id pub-id-type="pmid">30429111</article-id>
    <article-id pub-id-type="doi">10.2196/11144</article-id>
    <article-categories>
      <subj-group subj-group-type="heading">
        <subject>Original Paper</subject>
      </subj-group>
      <subj-group subj-group-type="article-type">
        <subject>Original Paper</subject>
      </subj-group>
    </article-categories>
    <title-group>
      <article-title>An Interpretable and Expandable Deep Learning Diagnostic System for Multiple Ocular Diseases: Qualitative Study</article-title>
    </title-group>
    <contrib-group>
      <contrib contrib-type="editor">
        <name>
          <surname>Eysenbach</surname>
          <given-names>Gunther</given-names>
        </name>
      </contrib>
    </contrib-group>
    <contrib-group>
      <contrib contrib-type="reviewer">
        <name>
          <surname>Banf</surname>
          <given-names>Michael</given-names>
        </name>
      </contrib>
      <contrib contrib-type="reviewer">
        <name>
          <surname>Zhang</surname>
          <given-names>Liangliang</given-names>
        </name>
      </contrib>
      <contrib contrib-type="reviewer">
        <name>
          <surname>Pan</surname>
          <given-names>Qiong</given-names>
        </name>
      </contrib>
    </contrib-group>
    <contrib-group>
      <contrib contrib-type="author" id="contrib1">
        <name name-style="western">
          <surname>Zhang</surname>
          <given-names>Kai</given-names>
        </name>
        <degrees>MS</degrees>
        <xref rid="aff1" ref-type="aff">1</xref>
        <xref rid="aff2" ref-type="aff">2</xref>
        <ext-link ext-link-type="orcid">http://orcid.org/0000-0001-9054-288X</ext-link>
      </contrib>
      <contrib contrib-type="author" id="contrib2">
        <name name-style="western">
          <surname>Liu</surname>
          <given-names>Xiyang</given-names>
        </name>
        <degrees>PhD</degrees>
        <xref rid="aff1" ref-type="aff">1</xref>
        <xref rid="aff3" ref-type="aff">3</xref>
        <xref rid="aff4" ref-type="aff">4</xref>
        <ext-link ext-link-type="orcid">http://orcid.org/0000-0001-5214-3677</ext-link>
      </contrib>
      <contrib contrib-type="author" id="contrib3">
        <name name-style="western">
          <surname>Liu</surname>
          <given-names>Fan</given-names>
        </name>
        <degrees>BSc</degrees>
        <xref rid="aff3" ref-type="aff">3</xref>
        <ext-link ext-link-type="orcid">http://orcid.org/0000-0002-9627-1125</ext-link>
      </contrib>
      <contrib contrib-type="author" id="contrib4">
        <name name-style="western">
          <surname>He</surname>
          <given-names>Lin</given-names>
        </name>
        <degrees>BSc</degrees>
        <xref rid="aff1" ref-type="aff">1</xref>
        <ext-link ext-link-type="orcid">http://orcid.org/0000-0003-0588-4098</ext-link>
      </contrib>
      <contrib contrib-type="author" id="contrib5">
        <name name-style="western">
          <surname>Zhang</surname>
          <given-names>Lei</given-names>
        </name>
        <degrees>BSc</degrees>
        <xref rid="aff1" ref-type="aff">1</xref>
        <ext-link ext-link-type="orcid">http://orcid.org/0000-0002-6428-3287</ext-link>
      </contrib>
      <contrib contrib-type="author" id="contrib6">
        <name name-style="western">
          <surname>Yang</surname>
          <given-names>Yahan</given-names>
        </name>
        <degrees>BSc</degrees>
        <xref rid="aff2" ref-type="aff">2</xref>
        <ext-link ext-link-type="orcid">http://orcid.org/0000-0003-3870-1235</ext-link>
      </contrib>
      <contrib contrib-type="author" id="contrib7">
        <name name-style="western">
          <surname>Li</surname>
          <given-names>Wangting</given-names>
        </name>
        <degrees>BSc</degrees>
        <xref rid="aff2" ref-type="aff">2</xref>
        <ext-link ext-link-type="orcid">http://orcid.org/0000-0001-7070-5768</ext-link>
      </contrib>
      <contrib contrib-type="author" id="contrib8">
        <name name-style="western">
          <surname>Wang</surname>
          <given-names>Shuai</given-names>
        </name>
        <degrees>BSc</degrees>
        <xref rid="aff3" ref-type="aff">3</xref>
        <ext-link ext-link-type="orcid">http://orcid.org/0000-0003-2751-0812</ext-link>
      </contrib>
      <contrib contrib-type="author" id="contrib9">
        <name name-style="western">
          <surname>Liu</surname>
          <given-names>Lin</given-names>
        </name>
        <degrees>BSc</degrees>
        <xref rid="aff1" ref-type="aff">1</xref>
        <ext-link ext-link-type="orcid">http://orcid.org/0000-0003-2940-9830</ext-link>
      </contrib>
      <contrib contrib-type="author" id="contrib10">
        <name name-style="western">
          <surname>Liu</surname>
          <given-names>Zhenzhen</given-names>
        </name>
        <degrees>MD, PhD</degrees>
        <xref rid="aff2" ref-type="aff">2</xref>
        <ext-link ext-link-type="orcid">http://orcid.org/0000-0002-4853-2474</ext-link>
      </contrib>
      <contrib contrib-type="author" id="contrib11">
        <name name-style="western">
          <surname>Wu</surname>
          <given-names>Xiaohang</given-names>
        </name>
        <degrees>MD, MPH</degrees>
        <xref rid="aff2" ref-type="aff">2</xref>
        <ext-link ext-link-type="orcid">http://orcid.org/0000-0002-9398-4330</ext-link>
      </contrib>
      <contrib contrib-type="author" id="contrib12" corresp="yes">
      <name name-style="western">
        <surname>Lin</surname>
        <given-names>Haotian</given-names>
      </name>
      <degrees>MD, PhD</degrees>
      <xref rid="aff2" ref-type="aff">2</xref>
      <address>
        <institution>State Key Laboratory of Ophthalmology</institution>
        <institution>Zhongshan Ophthalmic Center</institution>
        <institution>Sun Yat-sen University</institution>
        <addr-line>54 Xian Lie South Road</addr-line>
        <addr-line>Guangzhou,</addr-line>
        <country>China</country>
        <phone>86 (020)87330</phone>
        <email>haot.lin@hotmail.com</email>
      </address>  
      <ext-link ext-link-type="orcid">http://orcid.org/0000-0003-4672-9721</ext-link></contrib>
    </contrib-group>
    <aff id="aff1">
    <label>1</label>
    <institution>School of Computer Science and Technology</institution>
    <institution>Xidian University</institution>  
    <addr-line>Xi'an</addr-line>
    <country>China</country></aff>
    <aff id="aff2">
    <label>2</label>
    <institution>State Key Laboratory of Ophthalmology</institution>
    <institution>Zhongshan Ophthalmic Center</institution>  
    <institution>Sun Yat-sen University</institution>  
    <addr-line>Guangzhou</addr-line>
    <country>China</country></aff>
    <aff id="aff3">
    <label>3</label>
    <institution>School of Software</institution>
    <institution>Xidian University</institution>  
    <addr-line>Xi'an</addr-line>
    <country>China</country></aff>
    <aff id="aff4">
    <label>4</label>
    <institution>Institute of Software Engineering</institution>
    <institution>Xidian University</institution>  
    <addr-line>Xi'an</addr-line>
    <country>China</country></aff>
    <author-notes>
      <corresp>Corresponding Author: Haotian Lin 
      <email>haot.lin@hotmail.com</email></corresp>
    </author-notes>
    <pub-date pub-type="collection"><month>11</month><year>2018</year></pub-date>
    <pub-date pub-type="epub">
      <day>14</day>
      <month>11</month>
      <year>2018</year>
    </pub-date>
    <volume>20</volume>
    <issue>11</issue>
    <elocation-id>e11144</elocation-id>
    <!--history from ojs - api-xml-->
    <history>
      <date date-type="received">
        <day>26</day>
        <month>5</month>
        <year>2018</year>
      </date>
      <date date-type="rev-request">
        <day>12</day>
        <month>7</month>
        <year>2018</year>
      </date>
      <date date-type="rev-recd">
        <day>31</day>
        <month>7</month>
        <year>2018</year>
      </date>
      <date date-type="accepted">
        <day>2</day>
        <month>8</month>
        <year>2018</year>
      </date>
    </history>
    <!--(c) the authors - correct author names and publication date here if necessary. Date in form ', dd.mm.yyyy' after jmir.org-->
    <copyright-statement>©Kai Zhang, Xiyang Liu, Fan Liu, Lin He, Lei Zhang, Yahan Yang, Wangting Li, Shuai Wang, Lin Liu, Zhenzhen Liu, Xiaohang Wu, Haotian Lin. Originally published in the Journal of Medical Internet Research (http://www.jmir.org), 14.11.2018.</copyright-statement>
    <copyright-year>2018</copyright-year>
    <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
      <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in the Journal of Medical Internet Research, is properly cited. The complete bibliographic information, a link to the original publication on http://www.jmir.org/, as well as this copyright and license information must be included.</p>
    </license>  
    <self-uri xlink:href="http://www.jmir.org/2018/11/e11144/" xlink:type="simple"/>
    <abstract>
      <sec sec-type="background">
        <title>Background</title>
        <p>Although artificial intelligence performs promisingly in medicine, few automatic disease diagnosis platforms can clearly explain why a specific medical decision is made.</p>
      </sec>
      <sec sec-type="objective">
        <title>Objective</title>
        <p>We aimed to devise and develop an interpretable and expandable diagnosis framework for automatically diagnosing multiple ocular diseases and providing treatment recommendations for the particular illness of a specific patient.</p>
      </sec>
      <sec sec-type="methods">
        <title>Methods</title>
        <p>As the diagnosis of ocular diseases highly depends on observing medical images, we chose ophthalmic images as research material. All medical images were labeled to 4 types of diseases or normal (total 5 classes); each image was decomposed into different parts according to the anatomical knowledge and then annotated. This process yields the positions and primary information on different anatomical parts and foci observed in medical images, thereby bridging the gap between medical image and diagnostic process. Next, we applied images and the information produced during the annotation process to implement an interpretable and expandable automatic diagnostic framework with deep learning.</p>
      </sec>
      <sec sec-type="results">
        <title>Results</title>
        <p>This diagnosis framework comprises 4 stages. The first stage identifies the type of disease (identification accuracy, 93%). The second stage localizes the anatomical parts and foci of the eye (localization accuracy: images under natural light without fluorescein sodium eye drops, 82%; images under cobalt blue light or natural light with fluorescein sodium eye drops, 90%). The third stage carefully classifies the specific condition of each anatomical part or focus with the result from the second stage (average accuracy for multiple classification problems, 79%-98%). The last stage provides treatment advice according to medical experience and artificial intelligence, which is merely involved with pterygium (accuracy, &#62;95%). Based on this, we developed a telemedical system that can show detailed reasons for a particular diagnosis to doctors and patients to help doctors with medical decision making. This system can carefully analyze medical images and provide treatment advices according to the analysis results and consultation between a doctor and a patient.</p>
      </sec>
      <sec sec-type="conclusions">
        <title>Conclusions</title>
        <p>The interpretable and expandable medical artificial intelligence platform was successfully built; this system can identify the disease, distinguish different anatomical parts and foci, discern the diagnostic information relevant to the diagnosis of diseases, and provide treatment suggestions. During this process, the whole diagnostic flow becomes clear and understandable to both doctors and their patients. Moreover, other diseases can be seamlessly integrated into this system without any influence on existing modules or diseases. Furthermore, this framework can assist in the clinical training of junior doctors. Owing to the rare high-grade medical resource, it is impossible that everyone receives high-quality professional diagnosis and treatment service. This framework can not only be applied in hospitals with insufficient medical resources to decrease the pressure on experienced doctors but also deployed in remote areas to help doctors diagnose common ocular diseases.</p>
      </sec>
    </abstract>
    <kwd-group>
      <kwd>deep learning</kwd>
      <kwd>object localization</kwd>
      <kwd>multiple ocular diseases</kwd>
      <kwd>interpretable and expandable diagnosis framework</kwd>
      <kwd>making medical decisions</kwd>
    </kwd-group></article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <p>Although there have been many artificial intelligence-based automatic diagnostic platforms, the diagnostic results produced by such computer systems cannot be easily understood. Artificial intelligence that obtains diagnostic results from the computational perspective cannot provide the reason that is depicted as clinical practice for a given diagnosis. Some researchers have attempted to make the conclusion obtained from artificial intelligence methods explainable, such as Raccuglia et al used a decision tree to understand the classification result from the support vector machine [<xref ref-type="bibr" rid="ref1">1</xref>]. Hazlett et al used a deep belief network, a reverse trackable neural network, to find diagnostic evidence of autism [<xref ref-type="bibr" rid="ref2">2</xref>]. Zhou et al used the output of the last full-connected layer of the convolution neural network to infer which part of an image causes the final classification result, which also provides the evidence of classification [<xref ref-type="bibr" rid="ref3">3</xref>]. In addition, Zeiler et al used occlusion test to study which parts of images produce a given classification result [<xref ref-type="bibr" rid="ref4">4</xref>]. These studies made great achievements in explainable artificial intelligence, but readily explainable automatic diagnostic systems are still rare. The primary cause is that these explainable methods did not explain their result according to human thought patterns. Therefore, this research aims to make additional progress based on previous studies.</p>
      <p>There are many existing works about the automatic diagnosis of different types of diseases with medical imaging, but all these works are isolated; those cannot regard all diseases shown in a specific format of medical images with a unified perspective, which is common in natural image processing and practical medical scenes. On the other hand, once all diseases are regarded as unified, the extensibility for integrating other types of medical imaging or disease will be easy. The diagnosis of ophthalmic diseases is highly dependent on observing medical images, so this work selected ophthalmic images that represent multiple ocular diseases as material and treated them with a consistent view. Of note, the unified automatic diagnostic procedure is the simulation of the work flow of doctors. An explainable artificial intelligence-based automatic diagnosis platform offers many advantages. First, it can increase the confidence in the diagnostic results. Second, it assists doctors to perfect the diagnosing thinking. Third, it helps medical students deepen the medical knowledge. Finally, it can clear a path toward diagnosing higher numbers of diseases from a unified perspective.</p>
      <p>Besides, doctors can diagnose diseases by observing medical images, but doctors from many specialties and subspecialties cannot tackle all diseases. If a patient suffers from more than one type of disease, the system can tackle these diseases simultaneously. This work plans to integrate the experience of doctors from many subspecialties to construct an omnipotent ophthalmologist.</p>
      <p>Thus, to create an explainable automatic diagnostic system with artificial intelligence, we simulated the workflow of doctors to help artificial intelligence follow the patterns of human thought. This research aims to apply artificial intelligence techniques to fully simulate the diagnostic process of doctors so that reasons for a given diagnosis can be illustrated directly to doctors and patients.</p>
      <p>In this research, we designed an interpretable and expandable framework for multiple ocular diseases. There are 4 stages in this diagnostic framework: primary classification of disease, detection of each anatomical parts and foci, judging the conditions of anatomical parts, and foci and providing treatment recommendations. The accuracies of all stages surpass 93%, 82%-87%, 79%-98%, and 95%, respectively. Not only is this system an interpretable diagnostic tool for doctors and patients but it also facilitates the accumulation of medical knowledge for medical students. Moreover, this system can be enriched to cover more ophthalmic diseases or more diseases of other specialties to provide more services as the workflow of doctors. Telemedicine [<xref ref-type="bibr" rid="ref5">5</xref>] can combine medical experts and patients with considerable low cost. This research develops an interpretable and expandable telemedical artificial intelligence diagnostic system, which can also effectively improve the undesirable condition that medical resource with high quality is not adequate and the distribution of it is not even. Finally, the health level of people all over the world and the medical condition of underdeveloped countries can be improved with the help of a computer network.</p>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Data Preparation</title>
        <p>Data are important for data-driven research [<xref ref-type="bibr" rid="ref6">6</xref>]. The dataset is examined by all members of our team. Besides, we developed some programs to facilitate the examination of data. All images were collected in the Sun Yat-sen University Zhongshan Ophthalmic Center, which is the leading ophthalmic hospital in China [<xref ref-type="bibr" rid="ref7">7</xref>]. In order to simulate the experience and diagnostic process of doctors, all images were segmented into several parts according to anatomical knowledge or diagnostic experiences and, then, were annotated. Next, multiple attributes of all parts were classified as the actual states of these parts (including foci). All the relevant aspects of the data (images, coordinates of each part, and the attribute information) were used to train an artificial intelligence system. This data preparation process can not only help simulate the diagnostic process of doctors but also facilitate many follow-up studies such as medical image segmentation, clinical experience mining, and integration of refined diagnosing of multiple diseases.</p>
        <p>We collected 1513 images that can be classified into 5 classes (normal, pterygium [<xref ref-type="bibr" rid="ref9">9</xref>], keratitis [<xref ref-type="bibr" rid="ref10">10</xref>], subconjunctival hemorrhage [<xref ref-type="bibr" rid="ref11">11</xref>], and cataract [<xref ref-type="bibr" rid="ref12">12</xref>]). <xref ref-type="fig" rid="figure1">Figure 1</xref> lists the number of images of each class. Furthermore, the examples of objects to be detected in images are shown in <xref ref-type="fig" rid="figure2">Figure 2</xref>; for fundus images (the last row), the localized objects include an artery (blue), vein (green), the macula (black), the optic disc (light purple), hard exudate (yellow), and so on. For other types of images, the objects to be localized include the eyelid (red), eyelash (green), keratitis focus (yellow), cornea and iris zone with keratitis (pink), the pupil zone (blue), conjunctiva and sclera zone with hyperemia (orange), the conjunctiva and sclera zone with edema (light blue), the conjunctiva and sclera zone with hemorrhage (brown), the pupil zone with cataracts (white), the slit arc of the cornea (black), cornea and iris zone (dark green), the conjunctiva and sclera zone (purple), pterygium (gray), the slit arc of keratitis focus (dark red), and the slit arc of the iris (light brown). <xref ref-type="table" rid="table1">Table 1</xref> lists the detailed diagnostic attributes to be classified, and each diagnostic information corresponds to a classification problem. The diagnostic information in <xref ref-type="table" rid="table1">Table 1</xref> is corresponding to stage 3 (see Methods). This information is essential and fundamental for diagnosing and providing treatment advice and will be determined in stage 3 of the interpretable artificial intelligence system (see Methods). All information (object annotation and diagnostic information) was double-blind marked by the annotation team, which consisted of 5 experienced ophthalmic doctors and 20 medical students. The annotation of fundus images was completed; however, the experiments on fundus images were not finished. Because of the intrinsic characteristics of the fundus image, the output of the annotation method for fundus image is suitable for semantic segmentation.</p>
      </sec>
      <sec>
        <title>Methodology</title>
        <p>The framework consists of 4 functional stages as follows: (1) judging the class of disease, preliminary diagnosis that is completed with original image without any processing; (2) detecting each part of image, localization of anatomical parts, and foci that are used to discern different parts with different appearance so that more careful checking can be guaranteed; (3) classifying the attributes of each part, severity and illness assessment, which is closely connected to the second stage, is used to determine the condition of the illness; and (4) providing treatment advice according to the results from the first, second, and third stages, except for the treatment advice of a pterygium is from artificial intelligence, whereas treatment advice of other diseases is from experiences of doctors. First, the disease is primarily identified during stage 1. Second, all anatomical parts and foci are localized during stage 2, and important parts (cornea and iris zone with keratitis and pterygium) are segmented for the analysis in stage 3. Then, the attributes of all anatomical parts and foci are determined during stage 3. Then, the treatment advice is provided in stage 4. The whole process imitates the diagnostic procedure of doctors so that the reasons for a given diagnosis can be tracked and used to construct an evidence-based diagnostic report. Finally, treatment advice can be provided according to the full workflow presented above. <xref ref-type="fig" rid="figure3">Figure 3</xref> shows the flowchart of this system. The analysis of fundus images is coming soon and will be easily integrated into this system quickly as the same idea with existing images. The first, second, and third function is fully based on artificial intelligence, which is trained with dataset; the fourth function is dependent on both artificial intelligence and the experience of doctors.</p>
        <fig id="figure1" position="float">
          <label>Figure 1</label>
          <caption>
            <p>Information of image dataset.</p>
          </caption>
          <graphic xlink:href="jmir_v20i11e11144_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <fig id="figure2" position="float">
          <label>Figure 2</label>
          <caption>
            <p>Examples of each object in terms of each type of disease or normal eye.</p>
          </caption>
          <graphic xlink:href="jmir_v20i11e11144_fig2.PNG" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <table-wrap position="float" id="table1">
          <label>Table 1</label>
          <caption>
            <p>Detailed diagnostic information regarding the dataset.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="100"/>
            <col width="300"/>
            <col width="300"/>
            <col width="300"/>
            <thead>
              <tr valign="top">
                <td>Disease</td>
                <td>Diagnostic information (Number of classification problems)</td>
                <td>Values of diagnostic information</td>
                <td>Type of image</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>Pterygium</td>
                <td><list list-type="order"><list-item><p>Whether the body of the pterygium is hypertrophied</p></list-item><list-item><p>Whether pseudo pterygium is present</p></list-item><list-item><p>Whether the head of the pterygium is uplifted</p></list-item><list-item><p>Whether the head and body of the pterygium is hyperemic</p></list-item><list-item><p>Whether the pterygium is in the progressive period</p></list-item></list></td>
                <td>Yes or no</td>
                <td>Images under natural light without fluorescein sodium eye drops</td>
              </tr>
              <tr valign="top">
                <td>Keratitis</td>
                <td><list list-type="order"><list-item><p>Turbidity degree of the cornea</p></list-item><list-item><p>Stage of keratitis</p></list-item><list-item><p>Corneal neovascularization</p></list-item><list-item><p>Edge of foci is clear</p></list-item><list-item><p>The condition of illness based on dyeing</p></list-item></list></td>
                <td><list list-type="order"><list-item><p>Pupil zone is invaded by turbidity or not</p></list-item><list-item><p>Infiltration stage and ulcer stage, perforation stage, or convalescence</p></list-item><list-item><p>Yes or no</p></list-item><list-item><p>No dyeing and dot staining, sheet dyeing, or dyeing with coloboma</p></list-item></list></td>
                <td>Images under cobalt blue light or natural light with fluorescein sodium eye drops [<xref ref-type="bibr" rid="ref8">8</xref>]</td>
              </tr>
            </tbody>
          </table>
        </table-wrap>
        <p>Machine learning, especially deep learning technique represented by the convolutional neural network (CNN), is becoming the effective computer vision tool for automatically diagnosing diseases using biomedical images. It has been widely applied in the medical image classification and automatic diagnosis of disease, such as the diagnosis of attention deficit hyperactivity disorder with functional magnetic resonance imaging [<xref ref-type="bibr" rid="ref13">13</xref>]; gradation of brain tumor [<xref ref-type="bibr" rid="ref14">14</xref>], breast cancer [<xref ref-type="bibr" rid="ref15">15</xref>], and lung cancer [<xref ref-type="bibr" rid="ref16">16</xref>]; and diagnosis of skin disease [<xref ref-type="bibr" rid="ref17">17</xref>], kidney disease [<xref ref-type="bibr" rid="ref18">18</xref>], and ophthalmic diseases [<xref ref-type="bibr" rid="ref19">19</xref>-<xref ref-type="bibr" rid="ref23">23</xref>]. In this research, inception_v4 [<xref ref-type="bibr" rid="ref24">24</xref>] and residual network (Resnet) [<xref ref-type="bibr" rid="ref25">25</xref>] (101 layers) were used to carry out stage 1 and stages 3 and 4, respectively. While stage 1 (inception_v4) can give a general diagnostic conclusion, stages 3 (Resnet) and 4 (Resnet) can provide further information about diseases and treatment recommendations. In this research, cost-sensitive CNN was adopted because the imbalanced classification is common in this research. Inception_v4 is a wider and deeper CNN that is suitable for careful classification (the difference between all classes is easily neglected sometimes). Resnet is a type of thin CNN, the architecture of which is full of cross-layer connections. The objective function is transformed to fit the residual function so that the performance of Resnet is improved considerably. In addition, Resnet is suitable for rough classification (the difference between all classes does not need to be carefully analyzed). In addition, we chose Resnet with 101 layers whose volume is adequate for the classification problems in this research. Stage 1 is a 5-classes classification, with some classes being very similar in color and shape; thus, inception_v4 is chosen in stage 1. As other classification problems are limited in one specific disease, Resnet is selected in stages 3 and 4. Furthermore, the chain rule of derivatives based on the stochastic gradient descent algorithm [<xref ref-type="bibr" rid="ref26">26</xref>] was used to minimize the loss function.</p>
        
        <fig id="figure3" position="float">
          <label>Figure 3</label>
          <caption>
            <p>Architecture of the overall framework for interpretable diagnosis of multiple ocular diseases. AI: artificial intelligence.</p>
          </caption>
          <graphic xlink:href="jmir_v20i11e11144_fig3.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        
        <p>Faster-region based convolutional neural network (RCNN), an effective and efficiency approach, was adopted to localize the anatomical parts and foci (Stage 2). Faster-RCNN [<xref ref-type="bibr" rid="ref27">27</xref>] is developed on the basis of RCNN [<xref ref-type="bibr" rid="ref28">28</xref>] and Fast-RCNN [<xref ref-type="bibr" rid="ref29">29</xref>], which originally applied superpixel segmentation algorithm to produce proposal regions, whereas Faster-RCNN uses an anchor mechanism to generate region proposals quickly and then adopts 2-stage training to obtain the transformations of bounding box repressor and classifier. The first stage of Faster-RCNN is region proposal network, which is responsible for generating region proposals. Then, whether the proposals are objects or not are judged, and the coordinates of each object are primary regressed. The second stage is judging the class of each object and eventually regressing the coordinate of each object, which is the same as RCNN and Fast-RCNN. In this research, pretrained ZF (Zeiler and Fergus [<xref ref-type="bibr" rid="ref4">4</xref>]) network was exploited to save training time.</p>
        
      </sec>
      <sec>
      <title>Experimental Settings</title>
      <p>This system was implemented with convolutional architecture for fast feature embedding [<xref ref-type="bibr" rid="ref30">30</xref>] (Berkeley Vision and Learning Center deep learning framework) and Tensorflow [<xref ref-type="bibr" rid="ref31">31</xref>]; all models were trained in parallel on four NVIDIA TITAN X GPUs. For the classification problem, indicators applied to evaluate the performance are as follows:</p>
      <graphic xlink:href="jmir_v20i11e11144_fig11.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      <disp-quote>
        <p>Precisioni= TPi/(TPi+ FPi</p>
        <attrib>TPi+ FPi</attrib>
      </disp-quote>
      <disp-quote>
        <p>SensitivityiTPR, RecallTPi/(TPi+ FNi</p>
        <attrib>TPi+ FNi</attrib>
      </disp-quote>
      <disp-quote>
        <p>FNRifalse-negative rateFNiTPFNi</p>
        <attrib>TP + FNi</attrib>
      </disp-quote>
      <disp-quote>
        <p>Specificityi= TNi/TNi+ FPi</p>
        <attrib>TNi+ FPi</attrib>
      </disp-quote>
      <disp-quote>
        <p>FPRi(false-positive rate) = FPi/TNiFPi</p>
        <attrib>TNi + FPi</attrib>
      </disp-quote>
      <p>where <italic>N</italic> is the total number of samples; <italic>P</italic><sub><italic>i</italic> </sub> indicates the number of correctly classified samples of <italic>i</italic> th class; <italic>k</italic> is the number of classes in specific classification problem;<italic>TP</italic><sub><italic>i</italic> </sub> denotes the number of samples that are correctly classified as <italic>i</italic> th class; <italic>FP</italic><sub><italic>i</italic> </sub> is the number of samples that are wrongly recognized as <italic>i</italic> th class; <italic>FN</italic><sub><italic>i</italic> </sub> denotes the number of samples that are classified as <italic>j</italic> th class, <italic>j</italic> ϵ [1,c]/<italic>i</italic>; <italic>TN</italic><sub><italic>i</italic> </sub> is the number of samples recognized as negative <italic>j</italic> th class, <italic>j</italic> ϵ [1,c]/<italic>i</italic>. All the above performance indicators can be computed with a confusion matrix. In addition, the receiver operating characteristics (ROC) curve, which indicates how many samples of <italic>i</italic> th class are recognized conditioned on a specific number of <italic>j</italic> th class (<italic>j</italic> ϵ [1,c]/<italic>i</italic>), are classified as <italic>i</italic> th class, PR (precision recall) curve, which illustrates how many samples of <italic>j</italic> th class are recognized as samples of <italic>i</italic> th class conditioned on a specific number of <italic>j</italic> th class (<italic>j</italic> ϵ [1,c]/<italic>i</italic>), are classified as <italic>i</italic> th class and area under the ROC curve (AUC), which means the area of the zone under the ROC curve was also adopted to assess the performance [<xref ref-type="bibr" rid="ref32">32</xref>]. The indicators (precision, sensitivity, specificity, ROC curve with AUC, and PR curve) were only used to evaluate the performance of binary classification problems. Furthermore, accuracy and confusion matrix were used to evaluate the performance of multiclass classification problems.</p>
      <p>For object localization problem, the interpolated average precision is always used to evaluate the performance [<xref ref-type="bibr" rid="ref33">33</xref>]. The interpolated average precision is computed with the PR curve using the equation presented below:</p>  
      <graphic xlink:href="jmir_v20i11e11144_fig10.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      <p>In the equation, p(η) is the measured precision at specific recall η. In this research, 4-fold cross-validation was used to evaluate the performance of this system firmly for all classification problems and localization problems. The application of the cost-sensitive CNN is dependent on the distribution of the dataset in specific classification problems. Except for the classification problems 1, 6, and 8, other classification problems in stages 3 and 4 were completed with the cost-sensitive CNN.</p></sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <sec>
        <title>Performance of Stages 1 and 2</title>
        <p>All stages and the whole work flow of this system were completed with acceptable performance. The 4 stages in the framework were separately trained and validated, and all relevant results in stages 1 and 2 are shown in <xref ref-type="fig" rid="figure4">Figures 4</xref> and <xref ref-type="fig" rid="figure5">5</xref>. The rows and columns of all heat maps stand for ground truth labels and predicted labels, respectively. <xref ref-type="fig" rid="figure4">Figure 4</xref> shows the heat map of stage 1; the accuracy reaches 92%. <xref ref-type="fig" rid="figure5">Figure 5</xref> shows the detection performance of Faster-RCNN in recognizing anatomical parts and foci; the mean value of average precision over all classes surpasses 82% and 90% for images under natural light without fluorescein sodium eye drops, and images under cobalt blue light or natural light with fluorescein sodium eye drops, respectively. The left image in <xref ref-type="fig" rid="figure5">Figure 5</xref> is the performance for localizing objects in images without fluorescein sodium eye drops during stage 2, where I-VX represent the cornea and iris zone with keratitis, the focus of keratitis, the conjunctiva and sclera zone, the slit arc of the cornea, the slit arc of keratitis focus, the eyelid, the slit arc of the iris, the conjunctiva and sclera zone with hyperemia, the conjunctiva and sclera zone with edema, cornea and iris zone, pterygium, eyelash, pupil zone, the conjunctiva and sclera zone with hemorrhage, and the pupil zone with cataracts, respectively. The right image in <xref ref-type="fig" rid="figure5">Figure 5</xref> presents the performance for localizing the objects in images with fluorescein sodium eye drops during stage 2, where I-VII represent the cornea and iris zone with keratitis, the focus of keratitis, the slit arc of the cornea, the slit arc of keratitis focus, the slit arc of the iris, the eyelid, and the eyelash, respectively. The statistical results of stage 2 are shown in <xref ref-type="app" rid="app1">Multimedia Appendix 1</xref>.</p>
        <fig id="figure4" position="float">
          <label>Figure 4</label>
          <caption>
            <p>Performance of stage 1.</p>
          </caption>
          <graphic xlink:href="jmir_v20i11e11144_fig5.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        
        <fig id="figure5" position="float">
          <label>Figure 5</label>
          <caption>
            <p>Performance of stage 2. AP: average precision.</p>
          </caption>
          <graphic xlink:href="jmir_v20i11e11144_fig5.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        
      </sec>
      <sec>
        <title>Performance of Stages 3 and 4</title>
        <p>Stage 3 was decomposed into 10 classification problems, and the relevant results are shown in <xref ref-type="fig" rid="figure6">Figure 6</xref>, including the boxplots for the accuracy, specificity and sensitivity, ROC curve with the AUC, PR curve for all binary classification problems, and the heat maps with accuracy for all multiclass classification problems. <xref ref-type="fig" rid="figure6">Figure 6</xref> also shows the classification performance of stage 4, which includes boxplot for the accuracy, sensitivity and specificity, ROC curve with the AUC value and PR curve. The only one classification problem addressed by stage 4 is whether a patient who suffers from pterygium needs surgery. In stage 2, the detection rate of some objects is low because Faster-RCNN cannot effectively detect some small objects. We will overcome this issue by adjusting the parameters of Faster-RCNN. In spite of this, stage 3 will not be affected by this drawback because the detection rate of the cornea and iris zone with keratitis and pterygium (the relevant anatomical parts and foci), which is involved with stage 3, is considerably high. In addition, the detection performance of the pupil zone, which is related to vision is also satisfactory. In stage 3, the specificity of classification problems 1, 3, 4, and 5 is slightly low; the application scene of this system is hospitals where doctors pay more attention to sensitivity than specificity. The result of all classification problems is satisfactory and acceptable. Furthermore, the performance of classification problems 1, 3, 4, and 5 can be improved with more samples under the circumstance of Web-based learning. The statistical results of stages 3 and 4 are shown in <xref ref-type="app" rid="app1">Multimedia Appendix 1</xref>.</p>
        
      </sec>
      <sec>
        <title>Performance of Stage 3 and 4 with Original Images</title>
        <p>To study which anatomical parts are essential for automatic diagnostic, stages 3 and 4 were repeated with original medical images without processing; all parameters were same as the original parameters used in stages 3 and 4. The relevant results are shown in <xref ref-type="fig" rid="figure7">Figure 7</xref>. The classification performance close to that of the classification with anatomical parts and foci. In other words, the important parts, the cornea and iris zone with keratitis and pterygium, are essential for automatic diagnosis. The statistical results of stages 3 and 4 with original images are shown in <xref ref-type="app" rid="app1">Multimedia Appendix 1</xref>.</p>
        <fig id="figure6" position="float">
          <label>Figure 6</label>
          <caption>
            <p>Performance of stage 3 and 4. PR: precision recall; ROC: receiver operating characteristics.</p>
          </caption>
          <graphic xlink:href="jmir_v20i11e11144_fig7.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <fig id="figure7" position="float">
          <label>Figure 7</label>
          <caption>
            <p>Performance of stage 3 and 4 with original images. PR: precision recall; ROC: receiver operating characteristics; AUC: area under the curve.</p>
          </caption>
          <graphic xlink:href="jmir_v20i11e11144_fig8.PNG" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Web-Based Automatic Diagnostic System</title>
        <p>We applied Django framework [<xref ref-type="bibr" rid="ref34">34</xref>] to develop a telemedical decision-making and automatic diagnosing system to facilitate doctors and patients; this system can analyze inputted medical images, show the diagnostic result as the working process of doctors, and provide treatment advice by producing an examination report. In addition, this telemedical system can finely analyze medical images and provide treatment advice with a diagnostic report (a PDF file) that includes treatment suggestion according to the analysis result and the consultation between a doctor and a patient. The format of the diagnostic report is shown in <xref ref-type="app" rid="app1">Multimedia Appendix 1</xref>. All diagnostic information can be shown to a doctor and a patient by storing into a database. Administrators and doctors can handle all information and contact patients conveniently. Furthermore, this system can be deployed in multiple hospitals and medical centers to screen common diseases and collect more medical data, which can be used to improve the diagnosis performance. The website is available in <xref ref-type="app" rid="app1">Multimedia Appendix 1</xref>.</p>
      </sec>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <p>In this study, we constructed an explainable artificial intelligence system for the automatic diagnosis of multiple ophthalmic diseases. This system carefully mimics the work flow of doctors so that reasons for specific diagnosis can be explained to doctors and patients with high performance. Besides, this system accelerates the application of telemedicine with the assistance of computer network and helps develop the health level and medical condition. Moreover, this system can be easily expanded to cover more diseases as long as the diagnostic processes of other diseases are simulated seamlessly. In addition, this system can help medical students to understand diagnosis and diseases. In the future, considerable progress can be made in this field. In this research, we did not consider a multilabel classification for those patients with multiple diseases. In the future, multiple-label classification can be adopted to make this system closer to real clinical circumstances. Moreover, because the bound box is not suitable for some anatomical parts, semantic segmentation can be applied in this system for segmenting medical images more accurately.</p>
    </sec>
  </body>
  <back>
    <app-group>
      <app id="app1">
        <title>Multimedia Appendix 1</title>
        <p>Relevant material.</p>
        <media xlink:href="jmir_v20i11e11144_app1.pdf" xlink:title="DOCX File, 42KB"/>
      </app>
    </app-group>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">AUC</term>
          <def>
            <p>area under receiver operating characteristics curve</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">CNN</term>
          <def>
            <p>Convolutional Neural Network</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">PR</term>
          <def>
            <p>precision recall</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb4">RCNN</term>
          <def>
            <p>region based convolutional neural network</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb5">Resnet</term>
          <def>
            <p>residual network</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb6">ROC</term>
          <def>
            <p>receiver operating characteristics</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>This study was funded by the National Key Research and Development Program (2018YFC0116500); the National Science Foundation of China (#91546101, #61472311, #11401454, #61502371, and #81770967), National Defense Basic Research Project of China (jcky2016110c006), the Guangdong Provincial Natural Science Foundation (#YQ2015006, #2014A030306030, #2014TQ01R573, and #2013B020400003), the Natural Science Foundation of Guangzhou City (#2014J2200060), The Guangdong Provincial Natural Science Foundation for Distinguished Young Scholars of China (2014A030306030), the Science and Technology Planning Projects of Guangdong Province (2017B030314025), the Key Research Plan for the National Natural Science Foundation of China in Cultivation Project (#91546101), the Ministry of Science and Technology of China Grants (2015CB964600), and the Fundamental Research Funds for the Central Universities (#16ykjc28). We gratefully thank the volunteers of AINIST (medical artificial intelligence alliance of Zhongshan School of Medicine, Sun Yat-sen University).</p>
    </ack>
    <fn-group>
      <fn fn-type="con">
        <p>XL and HL designed the research; KZ conducted the study; WL, ZL, and XW collected the data and prepared the relevant information; KZ, FL, LH, LZ, LL, and SW were responsible for coding; LH and LZ developed the Web-based system; KZ analyzed and completed the experimental results; and KZ, WL, HL, and XL cowrote the manuscript. HL critically revised the manuscript.</p>
      </fn>
      <fn fn-type="conflict">
        <p>None declared.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Raccuglia</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Elbert</surname>
            <given-names>KC</given-names>
          </name>
          <name name-style="western">
            <surname>Adler</surname>
            <given-names>PDF</given-names>
          </name>
          <name name-style="western">
            <surname>Falk</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Wenny</surname>
            <given-names>MB</given-names>
          </name>
          <name name-style="western">
            <surname>Mollo</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Zeller</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Friedler</surname>
            <given-names>SA</given-names>
          </name>
          <name name-style="western">
            <surname>Schrier</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Norquist</surname>
            <given-names>AJ</given-names>
          </name>
        </person-group>
        <article-title>Machine-learning-assisted materials discovery using failed experiments</article-title>
        <source>Nature</source>  
        <year>2016</year>  
        <month>05</month>  
        <day>05</day>  
        <volume>533</volume>  
        <issue>7601</issue>  
        <fpage>73</fpage>  
        <lpage>6</lpage>  
        <pub-id pub-id-type="doi">10.1038/nature17439</pub-id>
        <pub-id pub-id-type="medline">27147027</pub-id>
        <pub-id pub-id-type="pii">nature17439</pub-id></nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Hazlett</surname>
            <given-names>HC</given-names>
          </name>
          <name name-style="western">
            <surname>Gu</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Munsell</surname>
            <given-names>BC</given-names>
          </name>
          <name name-style="western">
            <surname>Kim</surname>
            <given-names>SH</given-names>
          </name>
          <name name-style="western">
            <surname>Styner</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Wolff</surname>
            <given-names>JJ</given-names>
          </name>
          <name name-style="western">
            <surname>Elison</surname>
            <given-names>JT</given-names>
          </name>
          <name name-style="western">
            <surname>Swanson</surname>
            <given-names>MR</given-names>
          </name>
          <name name-style="western">
            <surname>Zhu</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Botteron</surname>
            <given-names>KN</given-names>
          </name>
          <name name-style="western">
            <surname>Collins</surname>
            <given-names>DL</given-names>
          </name>
          <name name-style="western">
            <surname>Constantino</surname>
            <given-names>JN</given-names>
          </name>
          <name name-style="western">
            <surname>Dager</surname>
            <given-names>SR</given-names>
          </name>
          <name name-style="western">
            <surname>Estes</surname>
            <given-names>AM</given-names>
          </name>
          <name name-style="western">
            <surname>Evans</surname>
            <given-names>AC</given-names>
          </name>
          <name name-style="western">
            <surname>Fonov</surname>
            <given-names>VS</given-names>
          </name>
          <name name-style="western">
            <surname>Gerig</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Kostopoulos</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>McKinstry</surname>
            <given-names>RC</given-names>
          </name>
          <name name-style="western">
            <surname>Pandey</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Paterson</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Pruett</surname>
            <given-names>JR</given-names>
          </name>
          <name name-style="western">
            <surname>Schultz</surname>
            <given-names>RT</given-names>
          </name>
          <name name-style="western">
            <surname>Shaw</surname>
            <given-names>DW</given-names>
          </name>
          <name name-style="western">
            <surname>Zwaigenbaum</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Piven</surname>
            <given-names>J</given-names>
          </name>
          <collab>IBIS Network</collab>
          <collab>Clinical Sites</collab>
          <collab>Data Coordinating Center</collab>
          <collab>Image Processing Core</collab>
          <collab>Statistical Analysis</collab>
        </person-group>
        <article-title>Early brain development in infants at high risk for autism spectrum disorder</article-title>
        <source>Nature</source>  
        <year>2017</year>  
        <month>12</month>  
        <day>15</day>  
        <volume>542</volume>  
        <issue>7641</issue>  
        <fpage>348</fpage>  
        <lpage>351</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/28202961"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1038/nature21369</pub-id>
        <pub-id pub-id-type="medline">28202961</pub-id>
        <pub-id pub-id-type="pii">nature21369</pub-id>
        <pub-id pub-id-type="pmcid">PMC5336143</pub-id></nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Zhou</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Khosla</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Lapedriza</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Oliva</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Torralba</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Learning Deep Features for Discriminative Localization</article-title>
        <year>2016</year>  
        <month>6</month>  
        <day>26</day>  
        <conf-name>Internaltional Conference on Computer Vision and Pattern Recogintion</conf-name>
        <conf-date>June 26th - July 1st, 2016</conf-date>
        <conf-loc>Las Vegas</conf-loc>
        <fpage>2015</fpage>  
        <lpage>9</lpage> </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Zeiler</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Fergus</surname>
            <given-names>R</given-names>
          </name>
        </person-group>
        <article-title>Visualizing and Understanding Convolutional Networks</article-title>
        <year>2014</year>  
        <month>9</month>  
        <day>6</day>  
        <conf-name>European Conference on Computer Vision</conf-name>
        <conf-date>September 6th-12th, 2014</conf-date>
        <conf-loc>Zurich</conf-loc></nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Perednia</surname>
            <given-names>DA</given-names>
          </name>
          <name name-style="western">
            <surname>Allen</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Telemedicine technology and clinical applications</article-title>
        <source>JAMA</source>  
        <year>1995</year>  
        <month>02</month>  
        <day>08</day>  
        <volume>273</volume>  
        <issue>6</issue>  
        <fpage>483</fpage>  
        <lpage>8</lpage>  
        <pub-id pub-id-type="medline">7837367</pub-id></nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Vibhu</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Zhang</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Zhu</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Fang</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Cheng</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Hong</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Shah</surname>
            <given-names>N</given-names>
          </name>
        </person-group>
        <article-title>Impact of Predicting Health Care Utilization Via Web Search Behavior: A Data-Driven Analysis</article-title>
        <source>Journal of Medical Internet Research</source>  
        <year>2016</year>  
        <month>9</month>  
        <day>21</day>  
        <volume>18</volume>  
        <issue>9</issue>  
        <fpage>A</fpage> </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Lin</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Long</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>W</given-names>
          </name>
          <name name-style="western">
            <surname>Liu</surname>
            <given-names>Y</given-names>
          </name>
        </person-group>
        <article-title>Documenting rare disease data in China</article-title>
        <source>Science</source>  
        <year>2015</year>  
        <month>09</month>  
        <day>04</day>  
        <volume>349</volume>  
        <issue>6252</issue>  
        <fpage>1064</fpage>  
        <pub-id pub-id-type="doi">10.1126/science.349.6252.1064-b</pub-id>
        <pub-id pub-id-type="medline">26339020</pub-id>
        <pub-id pub-id-type="pii">349/6252/1064-b</pub-id></nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Amparo</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Wang</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Yin</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Marmalidou</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Dana</surname>
            <given-names>R</given-names>
          </name>
        </person-group>
        <article-title>Evaluating Corneal Fluorescein Staining Using a Novel Automated Method</article-title>
        <source>Investigative Ophthalmology &#38; Visual Science</source>  
        <year>2017</year>  
        <month>7</month>  
        <day>15</day>  
        <volume>58</volume>  
        <issue>6</issue>  
        <fpage>BIO168</fpage>  
        <lpage>BIO173</lpage> </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Hirst</surname>
            <given-names>LW</given-names>
          </name>
        </person-group>
        <article-title>Treatment of pterygium</article-title>
        <source>Aust N Z J Ophthalmol</source>  
        <year>1998</year>  
        <month>11</month>  
        <volume>26</volume>  
        <issue>4</issue>  
        <fpage>269</fpage>  
        <lpage>70</lpage>  
        <pub-id pub-id-type="medline">9843252</pub-id></nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Prajna</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Krishnan</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Rajaraman</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Patel</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Shah</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Srinivasan</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Das</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Ray</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Oldenburg</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>McLeod</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Zegans</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Acharya</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Lietman</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Rose-Nussbaumer</surname>
            <given-names>J</given-names>
          </name>
          <collab>Mycotic Ulcer Treatment Trial Group</collab>
        </person-group>
        <article-title>Predictors of Corneal Perforation or Need for Therapeutic Keratoplasty in Severe Fungal Keratitis: A Secondary Analysis of the Mycotic Ulcer Treatment Trial II</article-title>
        <source>JAMA Ophthalmol</source>  
        <year>2017</year>  
        <month>09</month>  
        <day>01</day>  
        <volume>135</volume>  
        <issue>9</issue>  
        <fpage>987</fpage>  
        <lpage>991</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/28817744"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1001/jamaophthalmol.2017.2914</pub-id>
        <pub-id pub-id-type="medline">28817744</pub-id>
        <pub-id pub-id-type="pii">2648266</pub-id>
        <pub-id pub-id-type="pmcid">PMC6038821</pub-id></nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Welch</surname>
            <given-names>JF</given-names>
          </name>
          <name name-style="western">
            <surname>Dickie</surname>
            <given-names>AK</given-names>
          </name>
        </person-group>
        <article-title>Red Alert: diagnosis and management of the acute red eye</article-title>
        <source>J R Nav Med Serv</source>  
        <year>2014</year>  
        <volume>100</volume>  
        <issue>1</issue>  
        <fpage>42</fpage>  
        <lpage>6</lpage>  
        <pub-id pub-id-type="medline">24881426</pub-id></nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Lin</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Ouyang</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Zhu</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Huang</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Liu</surname>
            <given-names>Z</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Cao</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Li</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Signer</surname>
            <given-names>RAJ</given-names>
          </name>
          <name name-style="western">
            <surname>Xu</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Chung</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Zhang</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Lin</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Patel</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Wu</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Cai</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Hou</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Wen</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Jafari</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Liu</surname>
            <given-names>X</given-names>
          </name>
          <name name-style="western">
            <surname>Luo</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Zhu</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Qiu</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Hou</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Granet</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Heichel</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Shang</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Li</surname>
            <given-names>X</given-names>
          </name>
          <name name-style="western">
            <surname>Krawczyk</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Skowronska-Krawczyk</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Wang</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Shi</surname>
            <given-names>W</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Zhong</surname>
            <given-names>Z</given-names>
          </name>
          <name name-style="western">
            <surname>Zhong</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Zhang</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Morrison</surname>
            <given-names>SJ</given-names>
          </name>
          <name name-style="western">
            <surname>Maas</surname>
            <given-names>RL</given-names>
          </name>
          <name name-style="western">
            <surname>Zhang</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Liu</surname>
            <given-names>Y</given-names>
          </name>
        </person-group>
        <article-title>Lens regeneration using endogenous stem cells with gain of visual function</article-title>
        <source>Nature</source>  
        <year>2016</year>  
        <month>03</month>  
        <day>17</day>  
        <volume>531</volume>  
        <issue>7594</issue>  
        <fpage>323</fpage>  
        <lpage>8</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/26958831"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1038/nature17181</pub-id>
        <pub-id pub-id-type="medline">26958831</pub-id>
        <pub-id pub-id-type="pii">nature17181</pub-id>
        <pub-id pub-id-type="pmcid">PMC6061995</pub-id></nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Riaz</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Asad</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Alonso</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Slabaugh</surname>
            <given-names>G</given-names>
          </name>
        </person-group>
        <article-title>Fusion of fMRI and non-imaging data for ADHD classification</article-title>
        <source>Comput Med Imaging Graph</source>  
        <year>2018</year>  
        <month>04</month>  
        <volume>65</volume>  
        <fpage>115</fpage>  
        <lpage>128</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.compmedimag.2017.10.002</pub-id>
        <pub-id pub-id-type="medline">29137838</pub-id>
        <pub-id pub-id-type="pii">S0895-6111(17)30098-8</pub-id></nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Mohan</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Subashini</surname>
            <given-names>MM</given-names>
          </name>
        </person-group>
        <article-title>MRI based medical image analysisurvey on brain tumor grade classification</article-title>
        <source>Biomedical Signal Processing &#38; Control</source>  
        <year>2018</year>  
        <volume>39</volume>  
        <issue>1</issue>  
        <fpage>139</fpage>  
        <lpage>61</lpage> </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Golden</surname>
            <given-names>JA</given-names>
          </name>
        </person-group>
        <article-title>Deep Learning Algorithms for Detection of Lymph Node Metastases From Breast Cancer: Helping Artificial Intelligence Be Seen</article-title>
        <source>JAMA</source>  
        <year>2017</year>  
        <month>12</month>  
        <day>12</day>  
        <volume>318</volume>  
        <issue>22</issue>  
        <fpage>2184</fpage>  
        <lpage>2186</lpage>  
        <pub-id pub-id-type="doi">10.1001/jama.2017.14580</pub-id>
        <pub-id pub-id-type="medline">29234791</pub-id>
        <pub-id pub-id-type="pii">2665757</pub-id></nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Yu</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Zhang</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Berry</surname>
            <given-names>GJ</given-names>
          </name>
          <name name-style="western">
            <surname>Altman</surname>
            <given-names>RB</given-names>
          </name>
          <name name-style="western">
            <surname>Ré</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Rubin</surname>
            <given-names>DL</given-names>
          </name>
          <name name-style="western">
            <surname>Snyder</surname>
            <given-names>M</given-names>
          </name>
        </person-group>
        <article-title>Predicting non-small cell lung cancer prognosis by fully automated microscopic pathology image features</article-title>
        <source>Nat Commun</source>  
        <year>2016</year>  
        <month>12</month>  
        <day>16</day>  
        <volume>7</volume>  
        <fpage>12474</fpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://www.nature.com/articles/ncomms12474#supplementary-information"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1038/ncomms12474</pub-id>
        <pub-id pub-id-type="medline">27527408</pub-id>
        <pub-id pub-id-type="pii">ncomms12474</pub-id>
        <pub-id pub-id-type="pmcid">PMC4990706</pub-id></nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Esteva</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Kuprel</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Novoa</surname>
            <given-names>RA</given-names>
          </name>
          <name name-style="western">
            <surname>Ko</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Swetter</surname>
            <given-names>SM</given-names>
          </name>
          <name name-style="western">
            <surname>Blau</surname>
            <given-names>HM</given-names>
          </name>
          <name name-style="western">
            <surname>Thrun</surname>
            <given-names>S</given-names>
          </name>
        </person-group>
        <article-title>Dermatologist-level classification of skin cancer with deep neural networks</article-title>
        <source>Nature</source>  
        <year>2017</year>  
        <month>12</month>  
        <day>02</day>  
        <volume>542</volume>  
        <issue>7639</issue>  
        <fpage>115</fpage>  
        <lpage>118</lpage>  
        <pub-id pub-id-type="doi">10.1038/nature21056</pub-id>
        <pub-id pub-id-type="medline">28117445</pub-id>
        <pub-id pub-id-type="pii">nature21056</pub-id></nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Kolachalama</surname>
            <given-names>VB</given-names>
          </name>
          <name name-style="western">
            <surname>Singh</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Lin</surname>
            <given-names>CQ</given-names>
          </name>
          <name name-style="western">
            <surname>Mun</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Belghasem</surname>
            <given-names>ME</given-names>
          </name>
          <name name-style="western">
            <surname>Henderson</surname>
            <given-names>JM</given-names>
          </name>
          <name name-style="western">
            <surname>Francis</surname>
            <given-names>JM</given-names>
          </name>
          <name name-style="western">
            <surname>Salant</surname>
            <given-names>DJ</given-names>
          </name>
          <name name-style="western">
            <surname>Chitalia</surname>
            <given-names>VC</given-names>
          </name>
        </person-group>
        <article-title>Association of Pathological Fibrosis With Renal Survival Using Deep Neural Networks</article-title>
        <source>Kidney Int Rep</source>  
        <year>2018</year>  
        <month>03</month>  
        <volume>3</volume>  
        <issue>2</issue>  
        <fpage>464</fpage>  
        <lpage>475</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S2468-0249(17)30437-0"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1016/j.ekir.2017.11.002</pub-id>
        <pub-id pub-id-type="medline">29725651</pub-id>
        <pub-id pub-id-type="pii">S2468-0249(17)30437-0</pub-id>
        <pub-id pub-id-type="pmcid">PMC5932308</pub-id></nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Long</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Lin</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Liu</surname>
            <given-names>Z</given-names>
          </name>
          <name name-style="western">
            <surname>Wu</surname>
            <given-names>X</given-names>
          </name>
          <name name-style="western">
            <surname>Wang</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Jiang</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>An</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Lin</surname>
            <given-names>Z</given-names>
          </name>
          <name name-style="western">
            <surname>Li</surname>
            <given-names>X</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Li</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Cao</surname>
            <given-names>Q</given-names>
          </name>
          <name name-style="western">
            <surname>Wang</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Liu</surname>
            <given-names>X</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>W</given-names>
          </name>
          <name name-style="western">
            <surname>Liu</surname>
            <given-names>Y</given-names>
          </name>
        </person-group>
        <article-title>An artificial intelligence platform for the multihospital collaborative management of congenital cataracts</article-title>
        <source>Nature Biomedical Engineering</source>  
        <year>2017</year>  
        <month>1</month>  
        <day>30</day>  
        <volume>1</volume>  
        <issue>2</issue>  
        <fpage>0024</fpage> </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Gargeya</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Leng</surname>
            <given-names>T</given-names>
          </name>
        </person-group>
        <article-title>Automated Identification of Diabetic Retinopathy Using Deep Learning</article-title>
        <source>Ophthalmology</source>  
        <year>2017</year>  
        <month>12</month>  
        <volume>124</volume>  
        <issue>7</issue>  
        <fpage>962</fpage>  
        <lpage>969</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.ophtha.2017.02.008</pub-id>
        <pub-id pub-id-type="medline">28359545</pub-id>
        <pub-id pub-id-type="pii">S0161-6420(16)31774-2</pub-id></nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Gulshan</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Peng</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Coram</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Stumpe</surname>
            <given-names>MC</given-names>
          </name>
          <name name-style="western">
            <surname>Wu</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Narayanaswamy</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Venugopalan</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Widner</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Madams</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Cuadros</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Kim</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Raman</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Nelson</surname>
            <given-names>PC</given-names>
          </name>
          <name name-style="western">
            <surname>Mega</surname>
            <given-names>JL</given-names>
          </name>
          <name name-style="western">
            <surname>Webster</surname>
            <given-names>DR</given-names>
          </name>
        </person-group>
        <article-title>Development and Validation of a Deep Learning Algorithm for Detection of Diabetic Retinopathy in Retinal Fundus Photographs</article-title>
        <source>JAMA</source>  
        <year>2016</year>  
        <month>12</month>  
        <day>13</day>  
        <volume>316</volume>  
        <issue>22</issue>  
        <fpage>2402</fpage>  
        <lpage>2410</lpage>  
        <pub-id pub-id-type="doi">10.1001/jama.2016.17216</pub-id>
        <pub-id pub-id-type="medline">27898976</pub-id>
        <pub-id pub-id-type="pii">2588763</pub-id></nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Giancardo</surname>
            <given-names>Luca</given-names>
          </name>
          <name name-style="western">
            <surname>Meriaudeau</surname>
            <given-names>Fabrice</given-names>
          </name>
          <name name-style="western">
            <surname>Karnowski</surname>
            <given-names>Thomas P</given-names>
          </name>
          <name name-style="western">
            <surname>Li</surname>
            <given-names>Yaqin</given-names>
          </name>
          <name name-style="western">
            <surname>Garg</surname>
            <given-names>Seema</given-names>
          </name>
          <name name-style="western">
            <surname>Tobin</surname>
            <given-names>Kenneth W</given-names>
          </name>
          <name name-style="western">
            <surname>Chaum</surname>
            <given-names>Edward</given-names>
          </name>
        </person-group>
        <article-title>Exudate-based diabetic macular edema detection in fundus images using publicly available datasets</article-title>
        <source>Med Image Anal</source>  
        <year>2012</year>  
        <month>01</month>  
        <volume>16</volume>  
        <issue>1</issue>  
        <fpage>216</fpage>  
        <lpage>26</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.media.2011.07.004</pub-id>
        <pub-id pub-id-type="medline">21865074</pub-id>
        <pub-id pub-id-type="pii">S1361-8415(11)00101-0</pub-id></nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ting</surname>
            <given-names>DSW</given-names>
          </name>
          <name name-style="western">
            <surname>Cheung</surname>
            <given-names>CY</given-names>
          </name>
          <name name-style="western">
            <surname>Lim</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Tan</surname>
            <given-names>GSW</given-names>
          </name>
          <name name-style="western">
            <surname>Quang</surname>
            <given-names>ND</given-names>
          </name>
          <name name-style="western">
            <surname>Gan</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Hamzah</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Garcia-Franco</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>San</surname>
            <given-names>YIY</given-names>
          </name>
          <name name-style="western">
            <surname>Lee</surname>
            <given-names>SY</given-names>
          </name>
          <name name-style="western">
            <surname>Wong</surname>
            <given-names>EYM</given-names>
          </name>
          <name name-style="western">
            <surname>Sabanayagam</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Baskaran</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Ibrahim</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Tan</surname>
            <given-names>NC</given-names>
          </name>
          <name name-style="western">
            <surname>Finkelstein</surname>
            <given-names>EA</given-names>
          </name>
          <name name-style="western">
            <surname>Lamoureux</surname>
            <given-names>EL</given-names>
          </name>
          <name name-style="western">
            <surname>Wong</surname>
            <given-names>IY</given-names>
          </name>
          <name name-style="western">
            <surname>Bressler</surname>
            <given-names>NM</given-names>
          </name>
          <name name-style="western">
            <surname>Sivaprasad</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Varma</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Jonas</surname>
            <given-names>JB</given-names>
          </name>
          <name name-style="western">
            <surname>He</surname>
            <given-names>MG</given-names>
          </name>
          <name name-style="western">
            <surname>Cheng</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Cheung</surname>
            <given-names>GCM</given-names>
          </name>
          <name name-style="western">
            <surname>Aung</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Hsu</surname>
            <given-names>W</given-names>
          </name>
          <name name-style="western">
            <surname>Lee</surname>
            <given-names>ML</given-names>
          </name>
          <name name-style="western">
            <surname>Wong</surname>
            <given-names>TY</given-names>
          </name>
        </person-group>
        <article-title>Development and Validation of a Deep Learning System for Diabetic Retinopathy and Related Eye Diseases Using Retinal Images From Multiethnic Populations With Diabetes</article-title>
        <source>JAMA</source>  
        <year>2017</year>  
        <month>12</month>  
        <day>12</day>  
        <volume>318</volume>  
        <issue>22</issue>  
        <fpage>2211</fpage>  
        <lpage>2223</lpage>  
        <comment>
          <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/29234807"/>
        </comment>  
        <pub-id pub-id-type="doi">10.1001/jama.2017.18152</pub-id>
        <pub-id pub-id-type="medline">29234807</pub-id>
        <pub-id pub-id-type="pii">2665775</pub-id>
        <pub-id pub-id-type="pmcid">PMC5820739</pub-id></nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Szegedy</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Ioffe</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Vanhoucke</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Alemi</surname>
            <given-names>A</given-names>
          </name>
        </person-group>
        <article-title>Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning</article-title>
        <year>2017</year>  
        <month>2</month>  
        <day>4</day>  
        <conf-name>AAAI Conference on Artificial Intelligence</conf-name>
        <conf-date>Febuary 4th- 9th, 2017</conf-date>
        <conf-loc>San Fransisco, USA</conf-loc></nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>He</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Zhang</surname>
            <given-names>X</given-names>
          </name>
          <name name-style="western">
            <surname>Ren</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Sun</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>Deep Residual Learning for Image Recognition</article-title>
        <year>2016</year>  
        <month>6</month>  
        <day>26</day>  
        <conf-name>International Conference on Computer Vision and Pattern Recognition</conf-name>
        <conf-date>June 26th - July 1st, 2016</conf-date>
        <conf-loc>Las Vegas, USA</conf-loc></nlm-citation>
      </ref>
      <ref id="ref26">
        <label>26</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ketkar</surname>
            <given-names>N</given-names>
          </name>
        </person-group>
        <article-title>Parallelized stochastic gradient descent</article-title>
        <year>2010</year>  
        <conf-name>Advances in neural information processing systems</conf-name>
        <conf-date>December 6TH -11th, 2010</conf-date>
        <conf-loc>Vancouver， Canada</conf-loc></nlm-citation>
      </ref>
      <ref id="ref27">
        <label>27</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Ren</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>He</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Girshick</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Sun</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>Faster R-CNN: towards real-time object detection with region proposal networks</article-title>
        <year>2015</year>  
        <conf-name>Advances in Neural Information Processing Systems</conf-name>
        <conf-date>December 7th -12th, 2015</conf-date>
        <conf-loc>Montreal， Canada</conf-loc>
        <publisher-name>International Conference on Neural Information Processing Systems</publisher-name></nlm-citation>
      </ref>
      <ref id="ref28">
        <label>28</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Girshick</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Donahue</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Darrell</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Malik</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>Region-Based Convolutional Networks for Accurate Object Detection and Segmentation</article-title>
        <source>IEEE Trans Pattern Anal Mach Intell</source>  
        <year>2016</year>  
        <month>01</month>  
        <volume>38</volume>  
        <issue>1</issue>  
        <fpage>142</fpage>  
        <lpage>58</lpage>  
        <pub-id pub-id-type="doi">10.1109/TPAMI.2015.2437384</pub-id>
        <pub-id pub-id-type="medline">26656583</pub-id></nlm-citation>
      </ref>
      <ref id="ref29">
        <label>29</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Girshick</surname>
            <given-names>R</given-names>
          </name>
        </person-group>
        <article-title>Fast R-CNN</article-title>
        <year>2015</year>  
        <month>12</month>  
        <day>13</day>  
        <conf-name>IEEE International Conference on Computer Vision</conf-name>
        <conf-date>December 13th - 16th, 2015</conf-date>
        <conf-loc>Santiago, Chile</conf-loc></nlm-citation>
      </ref>
      <ref id="ref30">
        <label>30</label>
        <nlm-citation citation-type="confproc">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Jia</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Shelhamer</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Donahue</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Karayev</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Long</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Girshick</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Sergio</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Trevor</surname>
            <given-names>D</given-names>
          </name>
        </person-group>
        <article-title>Caffe: Convolutional Architecture for Fast Feature Embedding</article-title>
        <source>Caffe: Convolutional Architecture for Fast Feature Embedding</source>  
        <year>2014</year>  
        <month>11</month>  
        <day>3</day>  
        <conf-name>Acm International Conference on Multimedia</conf-name>
        <conf-date>November 3rd - 7th, 2014</conf-date>
        <conf-loc>Orlando, Florida, USA</conf-loc>
        <publisher-loc>Caffe</publisher-loc>
        <publisher-name>Acm International Conference on Multimedia</publisher-name></nlm-citation>
      </ref>
      <ref id="ref31">
        <label>31</label>
        <nlm-citation citation-type="web">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Abadi</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Agarwal</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Barham</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Brevdo</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Chen</surname>
            <given-names>Z</given-names>
          </name>
          <name name-style="western">
            <surname>Citro</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Corrado</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Davis</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Dean</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Devin</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Ghemawat</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Goodfellow</surname>
            <given-names>I</given-names>
          </name>
          <name name-style="western">
            <surname>Harp</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Irving</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Isard</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Jia</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Jozefowicz</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Kaiser</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Kudlur</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Levenberg</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Mane</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Monga</surname>
            <given-names>R</given-names>
          </name>
          <name name-style="western">
            <surname>Moore</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Murray</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Olah</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Schuster</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Shlens</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Steiner</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Sutskever</surname>
            <given-names>I</given-names>
          </name>
          <name name-style="western">
            <surname>Talwar</surname>
            <given-names>K</given-names>
          </name>
          <name name-style="western">
            <surname>Tucker</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Vanhoucke</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Vasudevan</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Viegas</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Vinyals</surname>
            <given-names>O</given-names>
          </name>
          <name name-style="western">
            <surname>Warden</surname>
            <given-names>P</given-names>
          </name>
          <name name-style="western">
            <surname>Wattenberg</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Wicke</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Yu</surname>
            <given-names>Y</given-names>
          </name>
          <name name-style="western">
            <surname>Zheng</surname>
            <given-names>X</given-names>
          </name>
        </person-group>
        <source>eprint arXiv.0</source>  
        <year>2016</year>  
        <access-date>2018-09-24</access-date>
        <comment>TensorFlow: Large-Scale Machine Learning on Heterogeneous Distributed Systems 
        <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:type="simple" xlink:href="https://arxiv.org/abs/1603.04467">https://arxiv.org/abs/1603.04467</ext-link>
        <ext-link ext-link-type="webcite" xlink:href="72fPxz3sU"/></comment> </nlm-citation>
      </ref>
      <ref id="ref32">
        <label>32</label>
        <nlm-citation citation-type="journal">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Shi</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Pun</surname>
            <given-names>CM</given-names>
          </name>
        </person-group>
        <article-title>Superpixel-based 3D deep neural networks for hyperspectral image classification</article-title>
        <source>Pattern Recognition</source>  
        <year>2018</year>  
        <month>02</month>  
        <volume>74</volume>  
        <fpage>600</fpage>  
        <lpage>616</lpage>  
        <pub-id pub-id-type="doi">10.1016/j.patcog.2017.09.007</pub-id></nlm-citation>
      </ref>
      <ref id="ref33">
        <label>33</label>
        <nlm-citation citation-type="book">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Everingham</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Zisserman</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Williams</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Gool</surname>
            <given-names>L</given-names>
          </name>
          <name name-style="western">
            <surname>Allan</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Bishop</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Chapelle</surname>
            <given-names>O</given-names>
          </name>
          <name name-style="western">
            <surname>Dalal</surname>
            <given-names>N</given-names>
          </name>
          <name name-style="western">
            <surname>Deselaers</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Dork&#38;acute;o</surname>
            <given-names>G</given-names>
          </name>
          <name name-style="western">
            <surname>Duffner</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Eichhorn</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Farquhar</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Fritz</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Garcia</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Griffiths</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Jurie</surname>
            <given-names>F</given-names>
          </name>
          <name name-style="western">
            <surname>Keysers</surname>
            <given-names>T</given-names>
          </name>
          <name name-style="western">
            <surname>Koskela</surname>
            <given-names>M</given-names>
          </name>
          <name name-style="western">
            <surname>Laaksonen</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Larlus</surname>
            <given-names>D</given-names>
          </name>
          <name name-style="western">
            <surname>Leibe</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Meng</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Ney</surname>
            <given-names>H</given-names>
          </name>
          <name name-style="western">
            <surname>Schiele</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Schmid</surname>
            <given-names>C</given-names>
          </name>
          <name name-style="western">
            <surname>Seemann</surname>
            <given-names>E</given-names>
          </name>
          <name name-style="western">
            <surname>Shawe-Taylor</surname>
            <given-names>J</given-names>
          </name>
          <name name-style="western">
            <surname>Storkey</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Szedmak</surname>
            <given-names>S</given-names>
          </name>
          <name name-style="western">
            <surname>Triggs</surname>
            <given-names>B</given-names>
          </name>
          <name name-style="western">
            <surname>Ulusoy</surname>
            <given-names>I</given-names>
          </name>
          <name name-style="western">
            <surname>Viitaniemi</surname>
            <given-names>V</given-names>
          </name>
          <name name-style="western">
            <surname>Zhang</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>The 2005 PASCAL Visual Object Classes Challenge</article-title>
        <source>The 2005 PASCAL Visual Object Classes Challenge</source>  
        <year>2006</year>  
        <publisher-loc>Berlin, Heidelberg</publisher-loc>
        <publisher-name>Springer</publisher-name>
        <fpage>117</fpage>  
        <lpage>176</lpage> </nlm-citation>
      </ref>
      <ref id="ref34">
        <label>34</label>
        <nlm-citation citation-type="book">
        <person-group person-group-type="author">
          <name name-style="western">
            <surname>Holovaty</surname>
            <given-names>A</given-names>
          </name>
          <name name-style="western">
            <surname>Kaplan-Moss</surname>
            <given-names>J</given-names>
          </name>
        </person-group>
        <article-title>The Definitive Guide to Django: Web development done right</article-title>
        <source>The Definitive Guide to Django: Web development done right</source>  
        <year>2009</year>  
        <publisher-loc>California, American</publisher-loc>
        <publisher-name>Apress</publisher-name></nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>