<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JMIR</journal-id>
      <journal-id journal-id-type="nlm-ta">J Med Internet Res</journal-id>
      <journal-title>Journal of Medical Internet Research</journal-title>
      <issn pub-type="epub">1438-8871</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v25i1e48142</article-id>
      <article-id pub-id-type="pmid">38019564</article-id>
      <article-id pub-id-type="doi">10.2196/48142</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Original Paper</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Original Paper</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Developing and Evaluating an AI-Based Computer-Aided Diagnosis System for Retinal Disease: Diagnostic Study for Central Serous Chorioretinopathy</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Eysenbach</surname>
            <given-names>Gunther</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Zheng</surname>
            <given-names>Yuanzhang</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Yang</surname>
            <given-names>Weihua</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Wei</surname>
            <given-names>Rong</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Li</surname>
            <given-names>Zhongqiang</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author" equal-contrib="yes">
          <name name-style="western">
            <surname>Yoon</surname>
            <given-names>Jeewoo</given-names>
          </name>
          <degrees>DPhil</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-9067-8653</ext-link>
        </contrib>
        <contrib id="contrib2" contrib-type="author" equal-contrib="yes">
          <name name-style="western">
            <surname>Han</surname>
            <given-names>Jinyoung</given-names>
          </name>
          <degrees>DPhil</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-8911-2791</ext-link>
        </contrib>
        <contrib id="contrib3" contrib-type="author">
          <name name-style="western">
            <surname>Ko</surname>
            <given-names>Junseo</given-names>
          </name>
          <degrees>MSc</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-4458-3987</ext-link>
        </contrib>
        <contrib id="contrib4" contrib-type="author">
          <name name-style="western">
            <surname>Choi</surname>
            <given-names>Seong</given-names>
          </name>
          <degrees>MSc</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-2721-4706</ext-link>
        </contrib>
        <contrib id="contrib5" contrib-type="author">
          <name name-style="western">
            <surname>Park</surname>
            <given-names>Ji In</given-names>
          </name>
          <degrees>MD, DPhil</degrees>
          <xref rid="aff4" ref-type="aff">4</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-4662-3759</ext-link>
        </contrib>
        <contrib id="contrib6" contrib-type="author">
          <name name-style="western">
            <surname>Hwang</surname>
            <given-names>Joon Seo</given-names>
          </name>
          <degrees>MD</degrees>
          <xref rid="aff5" ref-type="aff">5</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-1175-7693</ext-link>
        </contrib>
        <contrib id="contrib7" contrib-type="author">
          <name name-style="western">
            <surname>Han</surname>
            <given-names>Jeong Mo</given-names>
          </name>
          <degrees>MD</degrees>
          <xref rid="aff6" ref-type="aff">6</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-6379-4536</ext-link>
        </contrib>
        <contrib id="contrib8" contrib-type="author" corresp="yes">
          <name name-style="western">
            <surname>Hwang</surname>
            <given-names>Daniel Duck-Jin</given-names>
          </name>
          <degrees>MD, DPhil</degrees>
          <xref rid="aff7" ref-type="aff">7</xref>
          <address>
            <institution>Department of Ophthalmology</institution>
            <institution>Hangil Eye Hospital</institution>
            <addr-line>35 Bupyeong-daero, Bupyeong-gu, Incheon</addr-line>
            <addr-line>Incheon, 21388</addr-line>
            <country>Republic of Korea</country>
            <phone>82 327175808</phone>
            <email>daniel.dj.hwang@gmail.com</email>
          </address>
          <xref rid="aff8" ref-type="aff">8</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-1808-3169</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>Department of Applied Artificial Intelligence</institution>
        <institution>Sungkyunkwan University</institution>
        <addr-line>Seoul</addr-line>
        <country>Republic of Korea</country>
      </aff>
      <aff id="aff2">
        <label>2</label>
        <institution>Raondata</institution>
        <addr-line>Seoul</addr-line>
        <country>Republic of Korea</country>
      </aff>
      <aff id="aff3">
        <label>3</label>
        <institution>Department of Human-Artificial Intelligence Interaction</institution>
        <institution>Sungkyunkwan University</institution>
        <addr-line>Seoul</addr-line>
        <country>Republic of Korea</country>
      </aff>
      <aff id="aff4">
        <label>4</label>
        <institution>Department of Medicine</institution>
        <institution>Kangwon National University School of Medicine</institution>
        <institution>Kangwon National University Hospital</institution>
        <addr-line>Chuncheon</addr-line>
        <country>Republic of Korea</country>
      </aff>
      <aff id="aff5">
        <label>5</label>
        <institution>Seoul Plus Eye Clinic</institution>
        <addr-line>Seoul</addr-line>
        <country>Republic of Korea</country>
      </aff>
      <aff id="aff6">
        <label>6</label>
        <institution>Seoul Bombit Eye Clinic</institution>
        <addr-line>Sejong</addr-line>
        <country>Republic of Korea</country>
      </aff>
      <aff id="aff7">
        <label>7</label>
        <institution>Department of Ophthalmology</institution>
        <institution>Hangil Eye Hospital</institution>
        <addr-line>Incheon</addr-line>
        <country>Republic of Korea</country>
      </aff>
      <aff id="aff8">
        <label>8</label>
        <institution>Lux Mind</institution>
        <addr-line>Incheon</addr-line>
        <country>Republic of Korea</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Daniel Duck-Jin Hwang <email>daniel.dj.hwang@gmail.com</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <year>2023</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>29</day>
        <month>11</month>
        <year>2023</year>
      </pub-date>
      <volume>25</volume>
      <elocation-id>e48142</elocation-id>
      <history>
        <date date-type="received">
          <day>14</day>
          <month>4</month>
          <year>2023</year>
        </date>
        <date date-type="rev-request">
          <day>25</day>
          <month>7</month>
          <year>2023</year>
        </date>
        <date date-type="rev-recd">
          <day>29</day>
          <month>10</month>
          <year>2023</year>
        </date>
        <date date-type="accepted">
          <day>5</day>
          <month>11</month>
          <year>2023</year>
        </date>
      </history>
      <copyright-statement>©Jeewoo Yoon, Jinyoung Han, Junseo Ko, Seong Choi, Ji In Park, Joon Seo Hwang, Jeong Mo Han, Daniel Duck-Jin Hwang. Originally published in the Journal of Medical Internet Research (https://www.jmir.org), 29.11.2023.</copyright-statement>
      <copyright-year>2023</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in the Journal of Medical Internet Research, is properly cited. The complete bibliographic information, a link to the original publication on https://www.jmir.org/, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://www.jmir.org/2023/1/e48142" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>Although previous research has made substantial progress in developing high-performance artificial intelligence (AI)–based computer-aided diagnosis (AI-CAD) systems in various medical domains, little attention has been paid to developing and evaluating AI-CAD system in ophthalmology, particularly for diagnosing retinal diseases using optical coherence tomography (OCT) images.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>This diagnostic study aimed to determine the usefulness of a proposed AI-CAD system in assisting ophthalmologists with the diagnosis of central serous chorioretinopathy (CSC), which is known to be difficult to diagnose, using OCT images.</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>For the training and evaluation of the proposed deep learning model, 1693 OCT images were collected and annotated. The data set included 929 and 764 cases of acute and chronic CSC, respectively. In total, 66 ophthalmologists (2 groups: 36 retina and 30 nonretina specialists) participated in the observer performance test. To evaluate the deep learning algorithm used in the proposed AI-CAD system, the training, validation, and test sets were split in an 8:1:1 ratio. Further, 100 randomly sampled OCT images from the test set were used for the observer performance test, and the participants were instructed to select a CSC subtype for each of these images. Each image was provided under different conditions: (1) without AI assistance, (2) with AI assistance with a probability score, and (3) with AI assistance with a probability score and visual evidence heatmap. The sensitivity, specificity, and area under the receiver operating characteristic curve were used to measure the diagnostic performance of the model and ophthalmologists.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>The proposed system achieved a high detection performance (99% of the area under the curve) for CSC, outperforming the 66 ophthalmologists who participated in the observer performance test. In both groups, ophthalmologists with the support of AI assistance with a probability score and visual evidence heatmap achieved the highest mean diagnostic performance compared with that of those subjected to other conditions (without AI assistance or with AI assistance with a probability score). Nonretina specialists achieved expert-level diagnostic performance with the support of the proposed AI-CAD system.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>Our proposed AI-CAD system improved the diagnosis of CSC by ophthalmologists, which may support decision-making regarding retinal disease detection and alleviate the workload of ophthalmologists.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>computer aided diagnosis</kwd>
        <kwd>ophthalmology</kwd>
        <kwd>deep learning</kwd>
        <kwd>artificial intelligence</kwd>
        <kwd>computer vision</kwd>
        <kwd>imaging informatics</kwd>
        <kwd>retinal disease</kwd>
        <kwd>central serous chorioretinopathy</kwd>
        <kwd>diagnostic study</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <p>Computer-aided diagnosis (CAD) is a software system that assists in the diagnostic decision-making of clinicians [<xref ref-type="bibr" rid="ref1">1</xref>]. CAD systems can be used to support clinicians in various tasks, such as detecting breast cancer [<xref ref-type="bibr" rid="ref2">2</xref>], lung cancer [<xref ref-type="bibr" rid="ref3">3</xref>], colorectal cancer [<xref ref-type="bibr" rid="ref4">4</xref>], and even Alzheimer disease [<xref ref-type="bibr" rid="ref5">5</xref>]. Thus, these systems potentially alleviate the heavy workload of clinicians, resulting in the improved quality of clinical services [<xref ref-type="bibr" rid="ref6">6</xref>,<xref ref-type="bibr" rid="ref7">7</xref>].</p>
      <p>With recent advancements in computer vision and deep learning techniques, deep neural networks have been reported to achieve expert-level performance in clinical diagnoses [<xref ref-type="bibr" rid="ref8">8</xref>-<xref ref-type="bibr" rid="ref10">10</xref>]. This, in turn, has led researchers to construct CAD systems involving artificial intelligence (AI) models, such as AI-based computer-aided diagnosis (AI-CAD), to assist with clinical diagnosis, for example, by detecting major thoracic diseases on chest radiographs [<xref ref-type="bibr" rid="ref6">6</xref>] and classifying skin cancer using skin photographs [<xref ref-type="bibr" rid="ref11">11</xref>]. Although prior studies have made valuable progress in developing high-performance AI-CAD systems in various medical domains, minimal attention has been focused on developing and evaluating AI-CAD systems in ophthalmology, especially for the diagnosis of retinal diseases using optical coherence tomography (OCT) images.</p>
      <p>Following age-related macular degeneration (AMD), diabetic retinopathy, and branch retinal vein occlusion, central serous chorioretinopathy (CSC) is the fourth most prevalent vision-threatening retinopathy and is characterized by serous detachment of the neurosensory retina at the posterior pole [<xref ref-type="bibr" rid="ref8">8</xref>,<xref ref-type="bibr" rid="ref12">12</xref>]. Most patients with CSC are male, and they experience decreased or distorted vision with altered color sensitivity and persistent subretinal fluid (SRF) damage to the retinal outer layer, resulting in permanent vision loss, which degrades their quality of life [<xref ref-type="bibr" rid="ref13">13</xref>,<xref ref-type="bibr" rid="ref14">14</xref>]. When diagnosing CSC, assessing the chronicity of the disease is difficult but critical for the formulation of a treatment strategy or the prediction of its prognosis [<xref ref-type="bibr" rid="ref8">8</xref>,<xref ref-type="bibr" rid="ref15">15</xref>]. A patient with chronic CSC with or without sustained sensory retinal detachment may already have irreversible poor vision or require active intervention; hence, preventing permanent visual disturbance that can reduce a patient’s quality of life [<xref ref-type="bibr" rid="ref15">15</xref>] is important.</p>
      <p>In ophthalmology, OCT is a noninvasive, rapid, and accurate test that produces highly reproducible outcomes [<xref ref-type="bibr" rid="ref8">8</xref>,<xref ref-type="bibr" rid="ref16">16</xref>,<xref ref-type="bibr" rid="ref17">17</xref>]. It is frequently used to evaluate structural abnormalities associated with retinal disease, including CSC, without requiring physical contact [<xref ref-type="bibr" rid="ref17">17</xref>]. It is now considered the imaging modality of choice for the diagnosis and follow-up of patients with CSC [<xref ref-type="bibr" rid="ref18">18</xref>,<xref ref-type="bibr" rid="ref19">19</xref>]. OCT has been used to examine the alterations in CSC's retinal pigment epithelium (RPE) and outer retina morphology [<xref ref-type="bibr" rid="ref20">20</xref>]. Further, OCT can assess and quantify the presence of SRF, which can aid in estimating the episode duration and determine the subsequent treatment [<xref ref-type="bibr" rid="ref17">17</xref>].</p>
      <p>Herein, we propose an AI-CAD system that can alleviate the heavy workloads and improve the diagnostic performance of retinal disease for ophthalmologists. We tried to find out whether AI could really help ophthalmologists’ diagnostic activities through a CAD system in the field of ophthalmology, and we selected CSC, one of the representative macular diseases, and built a CAD system. In particular, the proposed AI-CAD system may support ophthalmologists in distinguishing the subtypes of CSC. To investigate the effectiveness of the proposed system, we conducted a within-subject user study involving 66 ophthalmologists.</p>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Ethical Considerations</title>
        <p>This study was conducted in accordance with the 1964 Declaration of Helsinki guidelines. The Ethics Committee of Hangil Eye Hospital approved the research protocol (IRB 21018) and its implementation and waived the requirement for informed consent as this study was retrospective and observational in nature and used medical records to extract the required data.</p>
      </sec>
      <sec>
        <title>AI-CAD System Construction for CSC-Subtype Detection</title>
        <sec>
          <title>Data Collection and CSC Labeling</title>
          <p>To train and evaluate the proposed deep learning model, 1693 OCT images of patients who visited Hangil Eye Hospital between June 2017 and June 2021 were collected and annotated. This study aimed to construct an AI-CAD system that identifies CSC subtypes.</p>
          <p>All CSC cases were diagnosed by independent retinal specialists using fundus examinations, fluorescein angiography (FA), indocyanine green angiography (ICGA), and OCT images. On all CSC cases, FA and ICGA were performed simultaneously using a Heidelberg Retina Angiograph (Heidelberg Engineering) confocal scanning laser ophthalmoscope. Other potentially conflicting retinal pathologies such as AMD, polypoidal choroidal vasculopathy, pachychoroid neovasculopathy, and pachychoroid pigment epitheliopathy were excluded from our analysis.</p>
          <p>Acute CSC was diagnosed based on the presence of serous retinal detachment involving the macula, as demonstrated by OCT, and the leakage at the level of the RPE on FA [<xref ref-type="bibr" rid="ref16">16</xref>,<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref21">21</xref>]. In the acute CSC cohort, only classic, acute CSC with a symptom duration of less than 4 months since the first episode was included. Chronic CSC was diagnosed based on the RPE status and was defined as chronic chorioretinopathy with widespread RPE decompensation, with or without subretinal detachment, and with or without an active leakage site, according to the Daruich et al [<xref ref-type="bibr" rid="ref22">22</xref>] classification scheme [<xref ref-type="bibr" rid="ref21">21</xref>]. Chronic CSC was diagnosed when extensive RPE atrophy was observed, independently of SRF, according to their definition [<xref ref-type="bibr" rid="ref15">15</xref>,<xref ref-type="bibr" rid="ref22">22</xref>]. Further, 2 retina experts (JSH and DDJH) reviewed the images from OCT, FA, and ICGA imaging techniques and also assessed the medical records. If there was a difference in opinions, another retina expert (JMH) stepped in to identify the inconsistency and consulted with the others. Any differences were settled through mutual agreement. Representative CSC cases are illustrated in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>.</p>
        </sec>
        <sec>
          <title>User Interface of AI-CAD System</title>
          <p>The proposed AI-CAD system formulated in this study (<xref rid="figure1" ref-type="fig">Figure 1</xref>) comprises three components: (1) an AI probability panel, (2) an evidence heatmap panel, and (3) a status panel. Further, we designed the user interface of the proposed AI-CAD system using HTML, CSS, and JavaScript, while implementing the server-side functionality with Python and Flask [<xref ref-type="bibr" rid="ref23">23</xref>].</p>
          <fig id="figure1" position="float">
            <label>Figure 1</label>
            <caption>
              <p>An illustration of the proposed AI-CAD system. (A) AI probability panel, (B) AI evidence heatmap panel, and (C) status panel. AI: artificial intelligence; AI-CAD: artificial intelligence–based computer-aided diagnosis; CSC: central serous chorioretinopathy; M: male.</p>
            </caption>
            <graphic xlink:href="jmir_v25i1e48142_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
          <p>The AI probability panel displays the probability score for each retinal disease (acute or chronic CSC). These scores are generated from the last fully connected layer of the proposed deep learning model using the softmax activation function and allow users to measure the confidence of the AI model with its decision. The probabilities are illustrated with progress bars to enable users to intuitively perceive the model’s confidence.</p>
          <p>The evidence heatmap panel reveals important regions in the OCT image while the model classifies the target label (eg, acute or chronic CSC). Gradient-weighted class activation mapping was adopted to highlight the important regions [<xref ref-type="bibr" rid="ref24">24</xref>]. The activated regions were calculated using the feature-map gradients of the convolutional neural network (CNN) layer. The heatmap highlights the area of the image wherein the proposed model was used for classification. Moreover, users can zoom in or zoom out of the OCT images in the panel to observe the details of the pathologic regions.</p>
          <p>The status panel displays the patient information of the current sample. Patient information included identification number, sex, and age. Users were able to identify the demographic information of a patient while analyzing a given image.</p>
        </sec>
        <sec>
          <title>AI-CAD System CSC-Subtype Detection Model</title>
          <p>To automatically classify a given OCT image into 2 different CSC subtypes, we use CNN-based architecture, VGG-16 [<xref ref-type="bibr" rid="ref25">25</xref>]. The convolutional filters in CNN layers learn local patterns such as edges and textures, which is crucial for image recognition. Although other well-known CNN architectures, including VGG-19 and Resnet-50 [<xref ref-type="bibr" rid="ref26">26</xref>], have been used previously, VGG-16 was selected in this study as it outperforms the others in our validation set. The proposed model uses spectral domain OCT (SD-OCT) images as input and predicts 1 of the 2 subtypes, that is, acute or chronic CSC. The detailed architecture of the proposed model is illustrated in <xref ref-type="supplementary-material" rid="app2">Multimedia Appendix 2</xref>.</p>
          <p>To train and evaluate the AI model, the data were randomly split into the training, validation, and test sets in an 8:1:1 ratio. The validation set was exclusively used to tune the hyperparameters of the model, and the test set was singularly used to evaluate the final performance of the model. We trained the proposed model using batch sizes of 64 and 30 epochs and Adam optimization [<xref ref-type="bibr" rid="ref27">27</xref>] (learning rate: 0.0002). Moreover, we leverage transfer learning method to avoid overfitting. The details of transfer learning, data set construction, including collection, labeling, and preprocessing, are described in <xref ref-type="supplementary-material" rid="app3">Multimedia Appendix 3</xref>.</p>
        </sec>
      </sec>
      <sec>
        <title>Observer Performance Test</title>
        <p>To investigate whether each component of the proposed system can assist in improving the diagnostic performance of ophthalmologists, a web-based experiment was conducted in which each participant was instructed to classify CSC subtypes from a given SD-OCT image. The experimental procedure comprised 3 steps (<xref rid="figure2" ref-type="fig">Figure 2</xref>). In the first step, observers had to identify the possible CSC subtype based on the SD-OCT image. The observers diagnosed retinal disease without artificial intelligence assistance (ie, <italic>No AI</italic>). In the subsequent step, an AI probability panel was provided to the observers (ie, artificial intelligence assistance with a probability score [<italic>AI Prob</italic>]). The AI probability panel shows the probability score of each retinal disease (acute or chronic). At the end of the step, both the AI probability panel and AI evidence heatmap panel were added to the system to provide a visual explanation to the observers (ie, artificial intelligence assistance with a probability score and visual evidence heatmap [<italic>AI Prob+Evid</italic>]).</p>
        <p>In each step, all observers had to determine whether the given OCT image reflected acute or chronic CSC by selecting a button on the web system. The same OCT image was used in the 3 steps. As 100 SD-OCT images were used in our experiment, each participant assessed 300 cases (ie, 3 steps × 100 images) in total. The 100 images were randomly extracted from the test set that was not used to train our model. The step-by-step user interface for the observer performance test is illustrated in <xref ref-type="supplementary-material" rid="app4">Multimedia Appendix 4</xref>.</p>
        <p>The study recruited 66 participants, including 36 retina and 30 nonretina specialists. The retina specialists were medical doctors who had completed 1-2 years of the retina fellowship training program. In contrast, nonretina specialists were board-certified ophthalmologists who were not specialized in the retina. The detailed information of the 66 participants is summarized <xref ref-type="supplementary-material" rid="app5">Multimedia Appendix 5</xref>.</p>
        <fig id="figure2" position="float">
          <label>Figure 2</label>
          <caption>
            <p>An illustration of the experimental procedure. In total, 66 ophthalmologists participated in the experiment. All observers were instructed to select one of the possible retinal diseases after reviewing the given SD-OCT image. In step 1, only the SD-OCT image was provided without any AI advice (ie, No AI). In step 2, the decision probability provided by AI was displayed on the system (ie, AI Prob). Finally, in step 3, the visual evidence of the AI decision was provided in addition to the AI probability panel (ie, AI Prob+Evid). The AI model was trained, validated, and tested using 1389, 141, and 163 images, respectively. The experiment was conducted using 100 images randomly sampled from the test set. AI: artificial intelligence; AI Prob+Evid: artificial intelligence assistance with a probability score and visual evidence heatmap; AI Prob: artificial intelligence assistance with a probability score; No AI: without artificial intelligence assistance; SD-OCT: spectral domain optical coherence tomography.</p>
          </caption>
          <graphic xlink:href="jmir_v25i1e48142_fig2.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Statistical Analysis</title>
        <p>Receiver operating characteristic analysis was conducted to evaluate the performance of the proposed model and ophthalmologists in classifying the CSC subtypes. Thereafter, the receiver operating characteristic curve with the true-positive and false-positive rates was plotted to measure the area under the receiver operating characteristic curve (AUROC) score. Friedman [<xref ref-type="bibr" rid="ref28">28</xref>] test, followed by the Wilcoxon signed-rank test, was used to quantify the differences among the 3 different conditions (ie, <italic>No AI</italic>, <italic>AI Prob</italic>, and <italic>AI Prob+Evid</italic>) [<xref ref-type="bibr" rid="ref29">29</xref>]. A 2-tailed <italic>t</italic> test was used to compare diagnostic performance between the nonretina and retina specialists.</p>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <sec>
        <title>CSC-Subtype Detection Model Performance</title>
        <p>The proposed model exhibited high accuracy, sensitivity, and specificity values of 96.3%, 97.1%, and 95.7%, respectively (n=163). The model achieved 98.4% (n=163) of the AUROC which outperforms Resnet-50 (n=163; AUROC 87.9%) and VGG-19 (n=163; AUROC 96.1%). The model failed to accurately predict 6 cases only in our test set. Importantly, the training AUROC of 95.6% (n=163) indicates that our model strikes a balance between avoiding overfitting and underfitting, further affirming its reliability.</p>
      </sec>
      <sec>
        <title>Performance Comparison Between the AI-CAD System and Ophthalmologists</title>
        <p>The diagnostic performance of the AI-CAD system was compared with that of ophthalmologists. The AUROCs were calculated to evaluate the AI-CAD and human predictive abilities for 100 images randomly extracted from the test set. The AI-CAD system (AUROC 99.5%; n=100) outperformed both retina (AUROC 92.1%; n-36) and nonretina (AUROC 87.8%; n=30) specialists.</p>
      </sec>
      <sec>
        <title>Diagnostic Performance of Ophthalmologists</title>
        <p>The retinal-disease detection performance of the 36 retina and 30 nonretina specialists were evaluated under 3 different conditions (ie, <italic>No AI</italic>, <italic>AI Prob,</italic> and <italic>AI Prob+Evid</italic>; <xref ref-type="table" rid="table1">Table 1</xref>). The results of the Friedman test followed by the Wilcoxon signed-rank test revealed significant differences in diagnostic performance among the 3 different conditions for retina (statistic=59.5, <italic>df</italic>=2; <italic>P</italic>&#60;.001) and nonretina (statistic=44.4, <italic>df</italic>=2; <italic>P</italic>&#60;.001) specialists. In particular, the retina specialists who were provided with the AI probability panel and AI evidence heatmap panel (ie, <italic>AI Prob+Evid</italic>) achieved the highest mean diagnostic performance (AUROC 95.8%, 95% CI 0.948-0.969; n=36) compared with those subjected to other conditions (<italic>No AI</italic>: 0.921, 95% CI 0.907-0.935; <italic>P</italic>&#60;.001; and <italic>AI Prob</italic>: 0.956, 95% CI 0.946-0.967; <italic>P</italic>&#60;.05). The nonretina specialists also displayed their best performance (0.929, 95% CI 0.913-0.946) with numerical and visual information compared with when they were subjected to other conditions (<italic>No AI</italic>: 0.878, 95% CI 0.860-0.895; <italic>P</italic>&#60;.001; and <italic>AI Prob</italic>: 0.922, 95% CI 0.905-0.940; <italic>P</italic>&#60;.001).</p>
        <table-wrap position="float" id="table1">
          <label>Table 1</label>
          <caption>
            <p>Diagnostic performance of retina and nonretina specialists.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="30"/>
            <col width="0"/>
            <col width="170"/>
            <col width="0"/>
            <col width="200"/>
            <col width="0"/>
            <col width="200"/>
            <col width="0"/>
            <col width="200"/>
            <col width="0"/>
            <col width="200"/>
            <thead>
              <tr valign="top">
                <td colspan="3">Step and observer group</td>
                <td colspan="2">AUROC<sup>a</sup> (95% CI)</td>
                <td colspan="2">Sensitivity (95% CI)</td>
                <td colspan="2">Specificity (95% CI)</td>
                <td colspan="2">Accuracy (95% CI)</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td colspan="11">
                  <bold>Step 1: <italic>No AI</italic><sup>b</sup></bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td colspan="3">Nonretina specialist</td>
                <td colspan="2">0.878 (0.860-0.895)</td>
                <td colspan="2">0.934 (0.910-0.958)</td>
                <td colspan="2">0.821 (0.794-0.848)</td>
                <td>0.859 (0.840-0.878)</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td colspan="3">Retina specialist</td>
                <td colspan="2">0.921 (0.907-0.935)</td>
                <td colspan="2">0.930 (0.905-0.954)</td>
                <td colspan="2">0.912 (0.890-0.933)</td>
                <td>0.918 (0.904-0.932)</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td colspan="3">Both</td>
                <td colspan="2">0.901 (0.889-0.913)</td>
                <td colspan="2">0.932 (0.915-0.949)</td>
                <td colspan="2">0.870 (0.850-0.890)</td>
                <td>0.891 (0.878-0.905)</td>
              </tr>
              <tr valign="top">
                <td colspan="11">
                  <bold>Step 2: <italic>AI Prob</italic><sup>c</sup></bold>
                </td>
              </tr>
              <tr valign="top">
                <td colspan="2">
                  <break/>
                </td>
                <td colspan="2">Nonretina specialist</td>
                <td colspan="2">0.922 (0.905-0.940)</td>
                <td colspan="2">0.977 (0.966-0.989)</td>
                <td colspan="2">0.867 (0.837-0.897)</td>
                <td>0.905 (0.884-0.926)</td>
              </tr>
              <tr valign="top">
                <td colspan="2">
                  <break/>
                </td>
                <td colspan="2">Retina specialist</td>
                <td colspan="2">0.956 (0.946-0.966)</td>
                <td colspan="2">0.969 (0.954-0.984)</td>
                <td colspan="2">0.943 (0.924-0.962)</td>
                <td>0.952 (0.940-0.964)</td>
              </tr>
              <tr valign="top">
                <td colspan="2">
                  <break/>
                </td>
                <td colspan="2">Both</td>
                <td colspan="2">0.941 (0.930-0.951)</td>
                <td colspan="2">0.973 (0.963-0.982)</td>
                <td colspan="2">0.909 (0.889-0.928)</td>
                <td>0.930 (0.918-0.943)</td>
              </tr>
              <tr valign="top">
                <td colspan="11">
                  <bold>Step 3: <italic>AI Prob+Evid</italic><sup>d</sup></bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td colspan="3">Nonretina specialist</td>
                <td colspan="2">0.929 (0.913-0.946)</td>
                <td colspan="2">0.982 (0.974-0.990)</td>
                <td colspan="2">0.876 (0.846-0.906)</td>
                <td>0.912 (0.892-0.933)</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td colspan="3">Retina specialist</td>
                <td colspan="2">0.958 (0.948-0.969)</td>
                <td colspan="2">0.971 (0.956-0.987)</td>
                <td colspan="2">0.945 (0.926-0.964)</td>
                <td>0.954 (0.942-0.966)</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td colspan="3">Both</td>
                <td colspan="2">0.945 (0.935-0.955)</td>
                <td colspan="2">0.976 (0.967-0.985)</td>
                <td colspan="2">0.914 (0.895-0.933)</td>
                <td>0.935 (0.923-0.947)</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table1fn1">
              <p><sup>a</sup>AUROC: area under the receiver operating characteristic curve.</p>
            </fn>
            <fn id="table1fn2">
              <p><sup>b</sup><italic>No AI</italic>: without artificial intelligence assistance.</p>
            </fn>
            <fn id="table1fn3">
              <p><sup>c</sup><italic>AI Prob</italic>: artificial intelligence assistance with a probability score.</p>
            </fn>
            <fn id="table1fn4">
              <p><sup>d</sup><italic>AI Prob+Evid</italic>: artificial intelligence assistance with a probability score and visual evidence heatmap.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
      </sec>
      <sec>
        <title>Benefits of the AI-CAD System in CSC-Subtype Classification</title>
        <p>This study investigated how the AI-CAD system can help nonretina specialists detect retinal diseases. The <italic>t</italic> test results revealed no significant differences in AUROC between the nonretina specialists supported by the AI-CAD system (<italic>AI Prob</italic>: 0.922, 95% CI 0.905-0.940; <italic>P</italic>=.88; and <italic>AI Prob+Evid</italic>: 0.929, 95% CI 0.913-0.946; <italic>P</italic>=.42) and retina specialists not supported by the AI-CAD system (<italic>No AI</italic>: 0.921, 95% CI 0.907-0.935). This finding demonstrates that nonretina specialists can achieve expert-level diagnostic performance with the support of the proposed AI CAD system (<xref rid="figure3" ref-type="fig">Figure 3</xref>).</p>
        <fig id="figure3" position="float">
          <label>Figure 3</label>
          <caption>
            <p>Comparison of the diagnostic performance of retina and nonretina specialists. The <italic>t</italic> test results revealed no significant difference in AUROC between nonretina specialists supported by AI-CAD (AI Prob: 0.922, 95% CI 0.905 to 0.940; <italic>P</italic>=.88 or AI Prob+Evid: 0.929, 95% CI 0.913 to 0.946; <italic>P</italic>=.42) and retina specialists with no AI support (No AI: 0.921, 95% CI 0.907 to 0.935). This implies that nonretina specialists can achieve expert-level performance with the proposed AI-CAD system. AI: artificial intelligence; AI Prob+Evid: artificial intelligence assistance with a probability score and visual evidence heatmap; AI Prob: artificial intelligence assistance with a probability score; AI-CAD: artificial intelligence–based computer-aided diagnosis; AUROC: area under the receiver operating characteristic curve; No AI: without artificial intelligence assistance.</p>
          </caption>
          <graphic xlink:href="jmir_v25i1e48142_fig3.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Changes in Clinical Diagnosis With the Support of the AI-CAD System</title>
        <p>To evaluate the proposed system’s positive effect in assisting ophthalmologists, the number of positive (ie, false negative to true positive and false positive to true negative) and negative (ie, true positive to false negative and true negative to false positive) changes that could be observed between the <italic>No AI</italic> and <italic>AI Prob+Evid</italic> conditions were recorded. Chronic CSC was set as the positive class. Overall, 42.8% (307/718) of the misclassified cases under the <italic>No AI</italic> condition were accurately classified in the <italic>AI Prob+Evid</italic> condition (<xref ref-type="table" rid="table2">Table 2</xref>). In particular, 106 false-negative cases turned into true positives after using the proposed AI-CAD system, implying that the proposed system is useful for ophthalmologists in distinguishing between acute and chronic CSC.</p>
        <table-wrap position="float" id="table2">
          <label>Table 2</label>
          <caption>
            <p>Clinical diagnosis changes after AI-CAD<sup>a</sup> use.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="240"/>
            <col width="120"/>
            <col width="120"/>
            <col width="0"/>
            <col width="140"/>
            <col width="0"/>
            <col width="130"/>
            <col width="120"/>
            <col width="0"/>
            <col width="130"/>
            <thead>
              <tr valign="top">
                <td>Observer group</td>
                <td colspan="9">Clinical diagnosis</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td colspan="3">Positively changed by AI<sup>b</sup></td>
                <td colspan="2">Sum of positive changes</td>
                <td colspan="3">Negatively changed by AI</td>
                <td>Sum of negative changes</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>FN<sup>c</sup> to TP<sup>d</sup></td>
                <td>FP<sup>e</sup> to TN<sup>f</sup></td>
                <td colspan="2">
                  <break/>
                </td>
                <td colspan="2">TP to FN</td>
                <td>TN to FP</td>
                <td colspan="2">
                  <break/>
                </td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>Nonretina specialist, n</td>
                <td>52</td>
                <td>115</td>
                <td colspan="2">167</td>
                <td colspan="2">3</td>
                <td>5</td>
                <td colspan="2">8</td>
              </tr>
              <tr valign="top">
                <td>Retina specialist, n</td>
                <td>54</td>
                <td>86</td>
                <td colspan="2">140</td>
                <td colspan="2">3</td>
                <td>6</td>
                <td colspan="2">9</td>
              </tr>
              <tr valign="top">
                <td>All, n</td>
                <td>106</td>
                <td>201</td>
                <td colspan="2">307</td>
                <td colspan="2">6</td>
                <td>11</td>
                <td colspan="2">17</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table2fn1">
              <p><sup>a</sup>AI-CAD: artificial intelligence–based computer-aided diagnosis system.</p>
            </fn>
            <fn id="table2fn2">
              <p><sup>b</sup>AI: artificial intelligence.</p>
            </fn>
            <fn id="table2fn3">
              <p><sup>c</sup>FN: false negative.</p>
            </fn>
            <fn id="table2fn4">
              <p><sup>d</sup>TP: true positive.</p>
            </fn>
            <fn id="table2fn5">
              <p><sup>e</sup>FP: false positive.</p>
            </fn>
            <fn id="table2fn6">
              <p><sup>f</sup>TN: true negative.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
      </sec>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <sec>
        <title>Principal Findings</title>
        <p>This study proposed the development of an AI-CAD system to assist ophthalmologists in distinguishing chronic from acute CSC. In particular, the proposed system provides (1) the probability of retinal disease and (2) visual evidence to effectively assist ophthalmologists in their clinical decisions. To evaluate the effectiveness of the proposed AI-CAD system in enhancing ophthalmologists’ clinical decision-making, a within-subject user study involving 66 ophthalmologists was conducted. The extensive experiments demonstrated that the proposed AI-CAD system effectively assists ophthalmologists in improving their diagnostic performance for retinal disease.</p>
        <p>The proposed deep neural network in the AI-CAD system achieved a high retinal-disease detection performance of 99.5% (n=100) of the AUROC, outperforming all 66 ophthalmologists who participated in the experiment. The high performance of the proposed AI-CAD model implies that it can lessen the heavy workloads and reduce potential errors by clinicians [<xref ref-type="bibr" rid="ref8">8</xref>,<xref ref-type="bibr" rid="ref9">9</xref>]. The quality of clinical services can be improved by using a deep learning model that guarantees consistent and high-level detection performance for retinal disease.</p>
        <p>The observer performance test revealed that the proposed AI-CAD system can effectively help ophthalmologists diagnose retinal disease. The experimental results revealed that the diagnostic performance of the retina specialists and nonretina specialists increased by 3.5% (n=36) and 4.4% (n=30), respectively (<xref ref-type="table" rid="table1">Table 1</xref>). This signifies that the proposed system successfully improved the ability of ophthalmologists to detect retinal disease. In particular, the performance difference between cases with and without the AI-CAD system was higher in the nonretina specialist group (n=30; 4.4%) than that in the retina specialist group (n=36; 3.5%), implying that the nonretina specialist group tended to receive support more than the retina specialist group. This finding is consistent with that of a prior study, which revealed that relatively less-skilled physicians exhibited greater improvement in the detection of pulmonary disease compared with that of skilled physicians [<xref ref-type="bibr" rid="ref6">6</xref>,<xref ref-type="bibr" rid="ref7">7</xref>]. Moreover, ophthalmologists achieved an even higher AUROC (retina specialists: 0.958; nonretina specialists: 0.929) if AI diagnosis information was availed with its visual explanation (gradient-weighted class activation mapping). This indicates that providing probability scores with a visual explanation is more useful for ophthalmologists than simply displaying the probability scores alone, thereby exhibiting consistency with prior work that revealed the usefulness of visual modality in detecting diseases [<xref ref-type="bibr" rid="ref7">7</xref>].</p>
      </sec>
      <sec>
        <title>Practical Issues</title>
        <p>Deploying a clinical decision support system (CDSS) in real-world health care settings presents a set of practical challenges. Chief among these concerns is the system’s susceptibility to errors, which can undermine trust in AI-driven solutions. To address this, a dynamic training model becomes indispensable. In the realm of academia, researchers have embraced the “Human-In-the-Loop” paradigm to tackle this issue [<xref ref-type="bibr" rid="ref30">30</xref>-<xref ref-type="bibr" rid="ref32">32</xref>]. This approach involves the seamless integration of human oversight and intervention into the CDSS’s decision-making processes. By empowering human experts to review and amend the system's outputs, we expedite the identification and rectification of errors. Consequently, this iterative feedback mechanism bolsters the CDSS's trustworthiness and reliability in real-world applications, bringing it into closer alignment with user expectations and requirements.</p>
        <p>Another pivotal concern pertains to the system's security. Safeguarding patient data and upholding the integrity of the CDSS is paramount within the health care domain. This demands the implementation of robust encryption, stringent access controls, and regular security audits to protect sensitive information and prevent unauthorized access or data breaches. Furthermore, continuous monitoring and timely updates to the CDSS are essential for addressing emerging security threats and vulnerabilities, ensuring a high level of security in real-world applications.</p>
        <p>By concurrently addressing error mitigation and security, CDSS developers and health care professionals can collaborate in creating a more dependable and trustworthy system that serves the best interests of both patients and medical practitioners.</p>
      </sec>
      <sec>
        <title>Limitations</title>
        <p>This study has some limitations. First, all images were acquired from a single OCT device located at a single academic center. Although the data set was sufficient to train and validate the proposed model for distinguishing between CSC subtypes, external validation with a different center is needed. Second, experiments were conducted using the web-based AI-CAD system developed in this study. Thus, the environment was relatively different from that of actual clinical practice. However, we attempted to design and develop a user-friendly AI-CAD system under the supervision of retina specialists. Third, considering that the model's training data comprises images taken exclusively by the Heidelberg Spectralis device (Heidelberg Engineering Inc), its performance might be insufficient when dealing with images from different devices. Future studies should prioritize (1) extending the proposed AI-CAD system to other retinal diseases, such as AMD and diabetic retinopathy; (2) developing strategies to improve the reliability of doctors when using the AI-CAD system; and (3) exploring the application of transfer learning techniques to address the challenges arising from variations in devices.</p>
      </sec>
      <sec>
        <title>Comparison With Prior Work</title>
        <p>This study has several implications. First, to the best of our knowledge, this study is the first attempt to develop and evaluate an AI-CAD system for the detection of retinal disease using OCT. Prior studies have developed AI-CAD systems for the detection of pulmonary disease and evaluated their effectiveness [<xref ref-type="bibr" rid="ref6">6</xref>,<xref ref-type="bibr" rid="ref33">33</xref>,<xref ref-type="bibr" rid="ref34">34</xref>]. However, minimal attention has been focused on the application of an AI-CAD system for the diagnosis of retinal diseases, such as CSC and AMD. In this study, an AI-CAD system that can assist ophthalmologists in identifying retinal diseases was developed and its usefulness in detecting retinal disease was evaluated.</p>
        <p>Second, the proposed AI-CAD system is potentially useful for small or local medical care centers where retina specialists are unavailable. Unlike in large-scale medical care centers or hospitals, retina specialists are rarely found in small or local centers. Diagnosing subtypes of retinal diseases (ie, acute vs chronic CSC [<xref ref-type="bibr" rid="ref8">8</xref>] and polypoidal choroidal vasculopathy vs retinal angiomatous proliferation [<xref ref-type="bibr" rid="ref10">10</xref>]) requires more elaborate expertise than simply screening abnormal cases (ie, normal vs CSC [<xref ref-type="bibr" rid="ref8">8</xref>]), and the proposed AI-CAD system exhibits higher performance than that of retina specialists with over 10 years of experience, implying that the proposed system potentially plays an important role in such cases. In CSC, assessing the chronicity of the disease at the time of diagnosis is crucial for selecting an appropriate course of treatment or forecasting its prognosis [<xref ref-type="bibr" rid="ref8">8</xref>,<xref ref-type="bibr" rid="ref15">15</xref>,<xref ref-type="bibr" rid="ref22">22</xref>]. Acute CSC typically follows a self-limiting natural course, whereas chronic CSC with or without sustained SRF may be associated with irreversible vision loss or may require active intervention, such as intravitreal antivascular endothelial growth factor injections or photodynamic therapy, all of which are intended to prevent long-term visual loss that can lower the patient’s quality of life. Further, on comparing diagnostic performance between nonretina and retina specialists, the experimental results of this study demonstrate that nonretina specialists can achieve retina specialist-level performance with the support of the proposed AI-CAD system (<xref rid="figure3" ref-type="fig">Figure 3</xref>). This implies that the proposed system can alleviate the heavy workload of ophthalmologists who have expert-level diagnostic performance and facilitate the decision-making process of less-skilled ophthalmologists (nonretina specialists) by improving their diagnostic performance.</p>
      </sec>
      <sec>
        <title>Conclusions</title>
        <p>To the best of our knowledge, this study is the first attempt to design, develop, and evaluate an AI-CAD system for the detection of retinal disease using OCT. First, an AI-CAD system was developed with a high-performance deep learning model. Thereafter, an observer performance test was conducted with the proposed system to determine the ability of the system to assist ophthalmologists in diagnosing retinal diseases. The results indicated that the proposed AI-CAD system can provide retinal expert-level diagnostic performance and help ophthalmologists improve their diagnostic performance in detecting CSC subtypes. Thus, the proposed AI-CAD system can alleviate the heavy workload of ophthalmologists and help in the decision-making process involved in detecting CSC subtypes. As a base study, this study demonstrates the usefulness and effectiveness of using an AI-CAD system in detecting retinal diseases, particularly CSC subtypes. In the future, the proposed AI-CAD system may be easily extended to the detection of other retinal diseases, such as AMD, diabetic retinopathy, and branch retinal vein occlusion.</p>
      </sec>
    </sec>
  </body>
  <back>
    <app-group>
      <supplementary-material id="app1">
        <label>Multimedia Appendix 1</label>
        <p>Representative cases of acute and chronic central serous chorioretinopathy.</p>
        <media xlink:href="jmir_v25i1e48142_app1.docx" xlink:title="DOCX File , 204 KB"/>
      </supplementary-material>
      <supplementary-material id="app2">
        <label>Multimedia Appendix 2</label>
        <p>An illustration of the proposed deep learning model based on VGG-16 architecture.</p>
        <media xlink:href="jmir_v25i1e48142_app2.docx" xlink:title="DOCX File , 247 KB"/>
      </supplementary-material>
      <supplementary-material id="app3">
        <label>Multimedia Appendix 3</label>
        <p>Method details.</p>
        <media xlink:href="jmir_v25i1e48142_app3.docx" xlink:title="DOCX File , 17 KB"/>
      </supplementary-material>
      <supplementary-material id="app4">
        <label>Multimedia Appendix 4</label>
        <p>A step-by-step illustration of the observer performance test.</p>
        <media xlink:href="jmir_v25i1e48142_app4.docx" xlink:title="DOCX File , 1741 KB"/>
      </supplementary-material>
      <supplementary-material id="app5">
        <label>Multimedia Appendix 5</label>
        <p>Information and test results of the ophthalmologists who participated in the observer performance test.</p>
        <media xlink:href="jmir_v25i1e48142_app5.docx" xlink:title="DOCX File , 20 KB"/>
      </supplementary-material>
    </app-group>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">AI-CAD</term>
          <def>
            <p>artificial intelligence–based computer-aided diagnosis</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">AI</term>
          <def>
            <p>artificial intelligence</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">AI Prob+Evid</term>
          <def>
            <p>artificial intelligence assistance with a probability score and visual evidence heatmap</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb4">AI Prob</term>
          <def>
            <p>artificial intelligence assistance with a probability score</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb5">AMD</term>
          <def>
            <p>age-related macular degeneration</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb6">AUROC</term>
          <def>
            <p>area under the receiver operating characteristic curve</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb7">CAD</term>
          <def>
            <p>computer-aided diagnosis</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb8">CDSS</term>
          <def>
            <p>clinical decision support system</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb9">CNN</term>
          <def>
            <p>convolutional neural network</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb10">CSC</term>
          <def>
            <p>central serous chorioretinopathy</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb11">FA</term>
          <def>
            <p>fluorescein angiography</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb12">ICGA</term>
          <def>
            <p>indocyanine green angiography</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb13">No AI</term>
          <def>
            <p>without artificial intelligence assistance</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb14">OCT</term>
          <def>
            <p>optical coherence tomography</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb15">RPE</term>
          <def>
            <p>retinal pigment epithelium</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb16">SD-OCT</term>
          <def>
            <p>spectral domain optical coherence tomography</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb17">SRF</term>
          <def>
            <p>subretinal fluid</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>The authors thank the 66 ophthalmologists who participated in our study. This work was supported in part by the National Research Foundation of Korea (NRF) grant funded by the Korea government (Ministry of Science and Information and Communications Technology) (2023R1A2C2007625) and in part by Electronics and Telecommunications Research Institute (ETRI) grant funded by the Korean government (23ZT1100, Development of Information and Communications Technology Convergence Technology based on Urban Area).</p>
    </ack>
    <notes>
      <sec>
        <title>Data Availability</title>
        <p>The data are not available for public access because of patient privacy concerns but are available from the corresponding author upon reasonable request.</p>
      </sec>
    </notes>
    <fn-group>
      <fn fn-type="con">
        <p>JY worked on the methodology, software, writing of the original draft, visualization, and investigation. JH did the methodology, writing review and editing; JK also worked on the methodology and software. SC and JIP performed on the investigation and validation. JSH worked on the data curation and validation. JMH did the data curation and validation. DDJH performed on the methodology, writing review and editing, supervision, data curation, and validation.</p>
      </fn>
      <fn fn-type="conflict">
        <p>JH and SC own stock of RAON DATA Inc. DDJH and JH own stock of Lux Mind Inc. All other authors declare no competing interests.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Doi</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Computer-aided diagnosis in medical imaging: historical review, current status and future potential</article-title>
          <source>Comput Med Imaging Graph</source>
          <year>2007</year>
          <volume>31</volume>
          <issue>4-5</issue>
          <fpage>198</fpage>
          <lpage>211</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/17349778"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.compmedimag.2007.02.002</pub-id>
          <pub-id pub-id-type="medline">17349778</pub-id>
          <pub-id pub-id-type="pii">S0895-6111(07)00026-2</pub-id>
          <pub-id pub-id-type="pmcid">PMC1955762</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Chan</surname>
              <given-names>HP</given-names>
            </name>
            <name name-style="western">
              <surname>Samala</surname>
              <given-names>RK</given-names>
            </name>
            <name name-style="western">
              <surname>Hadjiiski</surname>
              <given-names>LM</given-names>
            </name>
          </person-group>
          <article-title>CAD and AI for breast cancer-recent development and challenges</article-title>
          <source>Br J Radiol</source>
          <year>2020</year>
          <volume>93</volume>
          <issue>1108</issue>
          <fpage>20190580</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.birpublications.org/doi/10.1259/bjr.20190580"/>
          </comment>
          <pub-id pub-id-type="doi">10.1259/bjr.20190580</pub-id>
          <pub-id pub-id-type="medline">31742424</pub-id>
          <pub-id pub-id-type="pmcid">PMC7362917</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Brown</surname>
              <given-names>MS</given-names>
            </name>
            <name name-style="western">
              <surname>Lo</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Goldin</surname>
              <given-names>JG</given-names>
            </name>
            <name name-style="western">
              <surname>Barnoy</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Kim</surname>
              <given-names>GHJ</given-names>
            </name>
            <name name-style="western">
              <surname>McNitt-Gray</surname>
              <given-names>MF</given-names>
            </name>
            <name name-style="western">
              <surname>Aberle</surname>
              <given-names>DR</given-names>
            </name>
          </person-group>
          <article-title>Toward clinically usable CAD for lung cancer screening with computed tomography</article-title>
          <source>Eur Radiol</source>
          <year>2014</year>
          <volume>24</volume>
          <issue>11</issue>
          <fpage>2719</fpage>
          <lpage>2728</lpage>
          <pub-id pub-id-type="doi">10.1007/s00330-014-3329-0</pub-id>
          <pub-id pub-id-type="medline">25052078</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mittal</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Kaur</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Computer-aided-diagnosis in colorectal cancer: a survey of state of the art techniques</article-title>
          <year>2016</year>
          <conf-name>2016 International Conference on Inventive Computation Technologies (ICICT)</conf-name>
          <conf-date>August 26-27, 2016</conf-date>
          <conf-loc>Coimbatore, India</conf-loc>
          <fpage>1</fpage>
          <lpage>6</lpage>
          <pub-id pub-id-type="doi">10.1109/inventive.2016.7823260</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Martínez-Murcia</surname>
              <given-names>FJ</given-names>
            </name>
            <name name-style="western">
              <surname>Górriz</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Ramírez</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Puntonet</surname>
              <given-names>CG</given-names>
            </name>
            <name name-style="western">
              <surname>Salas-González</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>Computer aided diagnosis tool for Alzheimer's disease based on Mann-Whitney-Wilcoxon U-Test</article-title>
          <source>Expert Syst Appl</source>
          <year>2012</year>
          <volume>39</volume>
          <issue>10</issue>
          <fpage>9676</fpage>
          <lpage>9685</lpage>
          <pub-id pub-id-type="doi">10.1016/j.eswa.2012.02.153</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>EJ</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Jin</surname>
              <given-names>KN</given-names>
            </name>
            <name name-style="western">
              <surname>Kim</surname>
              <given-names>JI</given-names>
            </name>
            <name name-style="western">
              <surname>Choi</surname>
              <given-names>SY</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>JH</given-names>
            </name>
            <name name-style="western">
              <surname>Goo</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Aum</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Yim</surname>
              <given-names>JJ</given-names>
            </name>
            <name name-style="western">
              <surname>Cohen</surname>
              <given-names>JG</given-names>
            </name>
            <name name-style="western">
              <surname>Ferretti</surname>
              <given-names>GR</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>CM</given-names>
            </name>
          </person-group>
          <article-title>Development and validation of a deep learning-based automated detection algorithm for major thoracic diseases on chest radiographs</article-title>
          <source>JAMA Netw Open</source>
          <year>2019</year>
          <volume>2</volume>
          <issue>3</issue>
          <fpage>e191095</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/30901052"/>
          </comment>
          <pub-id pub-id-type="doi">10.1001/jamanetworkopen.2019.1095</pub-id>
          <pub-id pub-id-type="medline">30901052</pub-id>
          <pub-id pub-id-type="pii">2728630</pub-id>
          <pub-id pub-id-type="pmcid">PMC6583308</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Choi</surname>
              <given-names>SY</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Kim</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Choi</surname>
              <given-names>YR</given-names>
            </name>
            <name name-style="western">
              <surname>Jin</surname>
              <given-names>KN</given-names>
            </name>
          </person-group>
          <article-title>Evaluation of a deep learning-based computer-aided detection algorithm on chest radiographs: case-control study</article-title>
          <source>Medicine (Baltimore)</source>
          <year>2021</year>
          <volume>100</volume>
          <issue>16</issue>
          <fpage>e25663</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/33879750"/>
          </comment>
          <pub-id pub-id-type="doi">10.1097/MD.0000000000025663</pub-id>
          <pub-id pub-id-type="medline">33879750</pub-id>
          <pub-id pub-id-type="pii">00005792-202104230-00097</pub-id>
          <pub-id pub-id-type="pmcid">PMC8078463</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Yoon</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Han</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>JI</given-names>
            </name>
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>JS</given-names>
            </name>
            <name name-style="western">
              <surname>Han</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Sohn</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>KH</given-names>
            </name>
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>DD</given-names>
            </name>
          </person-group>
          <article-title>Optical coherence tomography-based deep-learning model for detecting central serous chorioretinopathy</article-title>
          <source>Sci Rep</source>
          <year>2020</year>
          <volume>10</volume>
          <issue>1</issue>
          <fpage>18852</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.nature.com/articles/s41598-020-75816-w"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41598-020-75816-w</pub-id>
          <pub-id pub-id-type="medline">33139813</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41598-020-75816-w</pub-id>
          <pub-id pub-id-type="pmcid">PMC7608618</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Yoon</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Han</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ko</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Choi</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>JI</given-names>
            </name>
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>JS</given-names>
            </name>
            <name name-style="western">
              <surname>Han</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Jang</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Sohn</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>KH</given-names>
            </name>
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>DDJ</given-names>
            </name>
          </person-group>
          <article-title>Classifying central serous chorioretinopathy subtypes with a deep neural network using optical coherence tomography images: a cross-sectional study</article-title>
          <source>Sci Rep</source>
          <year>2022</year>
          <volume>12</volume>
          <issue>1</issue>
          <fpage>422</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.nature.com/articles/s41598-021-04424-z"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41598-021-04424-z</pub-id>
          <pub-id pub-id-type="medline">35013502</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41598-021-04424-z</pub-id>
          <pub-id pub-id-type="pmcid">PMC8748505</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>DDJ</given-names>
            </name>
            <name name-style="western">
              <surname>Choi</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Ko</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Yoon</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>JI</given-names>
            </name>
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>JS</given-names>
            </name>
            <name name-style="western">
              <surname>Han</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>HJ</given-names>
            </name>
            <name name-style="western">
              <surname>Sohn</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>KH</given-names>
            </name>
            <name name-style="western">
              <surname>Han</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Distinguishing retinal angiomatous proliferation from polypoidal choroidal vasculopathy with a deep neural network based on optical coherence tomography</article-title>
          <source>Sci Rep</source>
          <year>2021</year>
          <volume>11</volume>
          <issue>1</issue>
          <fpage>9275</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.nature.com/articles/s41598-021-88543-7"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41598-021-88543-7</pub-id>
          <pub-id pub-id-type="medline">33927240</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41598-021-88543-7</pub-id>
          <pub-id pub-id-type="pmcid">PMC8085229</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Esteva</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Kuprel</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Novoa</surname>
              <given-names>RA</given-names>
            </name>
            <name name-style="western">
              <surname>Ko</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Swetter</surname>
              <given-names>SM</given-names>
            </name>
            <name name-style="western">
              <surname>Blau</surname>
              <given-names>HM</given-names>
            </name>
            <name name-style="western">
              <surname>Thrun</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Dermatologist-level classification of skin cancer with deep neural networks</article-title>
          <source>Nature</source>
          <year>2017</year>
          <volume>542</volume>
          <issue>7639</issue>
          <fpage>115</fpage>
          <lpage>118</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/28117445"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/nature21056</pub-id>
          <pub-id pub-id-type="medline">28117445</pub-id>
          <pub-id pub-id-type="pii">nature21056</pub-id>
          <pub-id pub-id-type="pmcid">PMC8382232</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Munch</surname>
              <given-names>IC</given-names>
            </name>
            <name name-style="western">
              <surname>Hasler</surname>
              <given-names>PW</given-names>
            </name>
            <name name-style="western">
              <surname>Prünte</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Larsen</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Central serous chorioretinopathy</article-title>
          <source>Acta Ophthalmol</source>
          <year>2008</year>
          <volume>86</volume>
          <issue>2</issue>
          <fpage>126</fpage>
          <lpage>145</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://onlinelibrary.wiley.com/doi/10.1111/j.1600-0420.2007.00889.x"/>
          </comment>
          <pub-id pub-id-type="doi">10.1111/j.1600-0420.2007.00889.x</pub-id>
          <pub-id pub-id-type="medline">17662099</pub-id>
          <pub-id pub-id-type="pii">AOS889</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Breukink</surname>
              <given-names>MB</given-names>
            </name>
            <name name-style="western">
              <surname>Dingemans</surname>
              <given-names>AJ</given-names>
            </name>
            <name name-style="western">
              <surname>den Hollander</surname>
              <given-names>AI</given-names>
            </name>
            <name name-style="western">
              <surname>Keunen</surname>
              <given-names>JE</given-names>
            </name>
            <name name-style="western">
              <surname>MacLaren</surname>
              <given-names>RE</given-names>
            </name>
            <name name-style="western">
              <surname>Fauser</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Querques</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Hoyng</surname>
              <given-names>CB</given-names>
            </name>
            <name name-style="western">
              <surname>Downes</surname>
              <given-names>SM</given-names>
            </name>
            <name name-style="western">
              <surname>Boon</surname>
              <given-names>CJ</given-names>
            </name>
          </person-group>
          <article-title>Chronic central serous chorioretinopathy: long-term follow-up and vision-related quality of life</article-title>
          <source>Clin Ophthalmol</source>
          <year>2017</year>
          <volume>11</volume>
          <fpage>39</fpage>
          <lpage>46</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/28053499"/>
          </comment>
          <pub-id pub-id-type="doi">10.2147/OPTH.S115685</pub-id>
          <pub-id pub-id-type="medline">28053499</pub-id>
          <pub-id pub-id-type="pii">opth-11-039</pub-id>
          <pub-id pub-id-type="pmcid">PMC5189979</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sahin</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Bez</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Kaya</surname>
              <given-names>MC</given-names>
            </name>
            <name name-style="western">
              <surname>Türkcü</surname>
              <given-names>FM</given-names>
            </name>
            <name name-style="western">
              <surname>Sahin</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Yüksel</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Psychological distress and poor quality of life in patients with central serous chorioretinopathy</article-title>
          <source>Semin Ophthalmol</source>
          <year>2014</year>
          <volume>29</volume>
          <issue>2</issue>
          <fpage>73</fpage>
          <lpage>76</lpage>
          <pub-id pub-id-type="doi">10.3109/08820538.2013.793728</pub-id>
          <pub-id pub-id-type="medline">23758338</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>van Rijssen</surname>
              <given-names>TJ</given-names>
            </name>
            <name name-style="western">
              <surname>van Dijk</surname>
              <given-names>EHC</given-names>
            </name>
            <name name-style="western">
              <surname>Yzer</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Ohno-Matsui</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Keunen</surname>
              <given-names>JEE</given-names>
            </name>
            <name name-style="western">
              <surname>Schlingemann</surname>
              <given-names>RO</given-names>
            </name>
            <name name-style="western">
              <surname>Sivaprasad</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Querques</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Downes</surname>
              <given-names>SM</given-names>
            </name>
            <name name-style="western">
              <surname>Fauser</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Hoyng</surname>
              <given-names>CB</given-names>
            </name>
            <name name-style="western">
              <surname>Piccolino</surname>
              <given-names>FC</given-names>
            </name>
            <name name-style="western">
              <surname>Chhablani</surname>
              <given-names>JK</given-names>
            </name>
            <name name-style="western">
              <surname>Lai</surname>
              <given-names>TYY</given-names>
            </name>
            <name name-style="western">
              <surname>Lotery</surname>
              <given-names>AJ</given-names>
            </name>
            <name name-style="western">
              <surname>Larsen</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Holz</surname>
              <given-names>FG</given-names>
            </name>
            <name name-style="western">
              <surname>Freund</surname>
              <given-names>KB</given-names>
            </name>
            <name name-style="western">
              <surname>Yannuzzi</surname>
              <given-names>LA</given-names>
            </name>
            <name name-style="western">
              <surname>Boon</surname>
              <given-names>CJF</given-names>
            </name>
          </person-group>
          <article-title>Central serous chorioretinopathy: towards an evidence-based treatment guideline</article-title>
          <source>Prog Retin Eye Res</source>
          <year>2019</year>
          <volume>73</volume>
          <fpage>100770</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S1350-9462(18)30094-6"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.preteyeres.2019.07.003</pub-id>
          <pub-id pub-id-type="medline">31319157</pub-id>
          <pub-id pub-id-type="pii">S1350-9462(18)30094-6</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Shinojima</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Hirose</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Mori</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Kawamura</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Yuzawa</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Morphologic findings in acute central serous chorioretinopathy using spectral domain-optical coherence tomography with simultaneous angiography</article-title>
          <source>Retina</source>
          <year>2010</year>
          <volume>30</volume>
          <issue>2</issue>
          <fpage>193</fpage>
          <lpage>202</lpage>
          <pub-id pub-id-type="doi">10.1097/IAE.0b013e3181c70203</pub-id>
          <pub-id pub-id-type="medline">20142712</pub-id>
          <pub-id pub-id-type="pii">00006982-201002000-00001</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Song</surname>
              <given-names>IS</given-names>
            </name>
            <name name-style="western">
              <surname>Shin</surname>
              <given-names>YU</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>BR</given-names>
            </name>
          </person-group>
          <article-title>Time-periodic characteristics in the morphology of idiopathic central serous chorioretinopathy evaluated by volume scan using spectral-domain optical coherence tomography</article-title>
          <source>Am J Ophthalmol</source>
          <year>2012</year>
          <volume>154</volume>
          <issue>2</issue>
          <fpage>366</fpage>
          <lpage>375.e4</lpage>
          <pub-id pub-id-type="doi">10.1016/j.ajo.2012.02.031</pub-id>
          <pub-id pub-id-type="medline">22633348</pub-id>
          <pub-id pub-id-type="pii">S0002-9394(12)00164-X</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kaye</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Chandra</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Sheth</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Boon</surname>
              <given-names>CJF</given-names>
            </name>
            <name name-style="western">
              <surname>Sivaprasad</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Lotery</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Central serous chorioretinopathy: an update on risk factors, pathophysiology and imaging modalities</article-title>
          <source>Prog Retin Eye Res</source>
          <year>2020</year>
          <volume>79</volume>
          <fpage>100865</fpage>
          <pub-id pub-id-type="doi">10.1016/j.preteyeres.2020.100865</pub-id>
          <pub-id pub-id-type="medline">32407978</pub-id>
          <pub-id pub-id-type="pii">S1350-9462(20)30037-9</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ko</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Han</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Yoon</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>JI</given-names>
            </name>
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>JS</given-names>
            </name>
            <name name-style="western">
              <surname>Han</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>KH</given-names>
            </name>
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>DDJ</given-names>
            </name>
          </person-group>
          <article-title>Assessing central serous chorioretinopathy with deep learning and multiple optical coherence tomography images</article-title>
          <source>Sci Rep</source>
          <year>2022</year>
          <volume>12</volume>
          <issue>1</issue>
          <fpage>1831</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.nature.com/articles/s41598-022-05051-y"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41598-022-05051-y</pub-id>
          <pub-id pub-id-type="medline">35115577</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41598-022-05051-y</pub-id>
          <pub-id pub-id-type="pmcid">PMC8814130</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Montero</surname>
              <given-names>JA</given-names>
            </name>
            <name name-style="western">
              <surname>Ruiz-Moreno</surname>
              <given-names>JM</given-names>
            </name>
          </person-group>
          <article-title>Optical coherence tomography characterisation of idiopathic central serous chorioretinopathy</article-title>
          <source>Br J Ophthalmol</source>
          <year>2005</year>
          <volume>89</volume>
          <issue>5</issue>
          <fpage>562</fpage>
          <lpage>564</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://bjo.bmj.com/lookup/pmidlookup?view=long&#38;pmid=15834085"/>
          </comment>
          <pub-id pub-id-type="doi">10.1136/bjo.2004.049403</pub-id>
          <pub-id pub-id-type="medline">15834085</pub-id>
          <pub-id pub-id-type="pii">89/5/562</pub-id>
          <pub-id pub-id-type="pmcid">PMC1772614</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Spaide</surname>
              <given-names>RF</given-names>
            </name>
            <name name-style="western">
              <surname>Campeas</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Haas</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Yannuzzi</surname>
              <given-names>LA</given-names>
            </name>
            <name name-style="western">
              <surname>Fisher</surname>
              <given-names>YL</given-names>
            </name>
            <name name-style="western">
              <surname>Guyer</surname>
              <given-names>DR</given-names>
            </name>
            <name name-style="western">
              <surname>Slakter</surname>
              <given-names>JS</given-names>
            </name>
            <name name-style="western">
              <surname>Sorenson</surname>
              <given-names>JA</given-names>
            </name>
            <name name-style="western">
              <surname>Orlock</surname>
              <given-names>DA</given-names>
            </name>
          </person-group>
          <article-title>Central serous chorioretinopathy in younger and older adults</article-title>
          <source>Ophthalmology</source>
          <year>1996</year>
          <volume>103</volume>
          <issue>12</issue>
          <fpage>2070</fpage>
          <lpage>2079; discussion 2079</lpage>
          <pub-id pub-id-type="doi">10.1016/s0161-6420(96)30386-2</pub-id>
          <pub-id pub-id-type="medline">9003341</pub-id>
          <pub-id pub-id-type="pii">S0161-6420(96)30386-2</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Daruich</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Matet</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Dirani</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Bousquet</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Zhao</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Farman</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Jaisser</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Behar-Cohen</surname>
              <given-names>F</given-names>
            </name>
          </person-group>
          <article-title>Central serous chorioretinopathy: recent findings and new physiopathology hypothesis</article-title>
          <source>Prog Retin Eye Res</source>
          <year>2015</year>
          <volume>48</volume>
          <fpage>82</fpage>
          <lpage>118</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S1350-9462(15)00033-6"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.preteyeres.2015.05.003</pub-id>
          <pub-id pub-id-type="medline">26026923</pub-id>
          <pub-id pub-id-type="pii">S1350-9462(15)00033-6</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Grinberg</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <source>Flask Web Development: Developing Web Applications with Python</source>
          <year>2018</year>
          <publisher-loc>Sebastopol, CA</publisher-loc>
          <publisher-name>O'Reilly Media, Inc</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Selvaraju</surname>
              <given-names>RR</given-names>
            </name>
            <name name-style="western">
              <surname>Cogswell</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Das</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Vedantam</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Parikh</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Batra</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>Grad-CAM: visual explanations from deep networks via gradient-based localization</article-title>
          <year>2017</year>
          <conf-name>2017 IEEE International Conference on Computer Vision (ICCV)</conf-name>
          <conf-date>October 22-29, 2017</conf-date>
          <conf-loc>Venice, Italy</conf-loc>
          <fpage>618</fpage>
          <lpage>626</lpage>
          <pub-id pub-id-type="doi">10.1109/iccv.2017.74</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Simonyan</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Zisserman</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Very deep convolutional networks for large-scale image recognition</article-title>
          <year>2015</year>
          <conf-name>International Conference on Learning Representations</conf-name>
          <conf-date>May 7-9, 2015</conf-date>
          <conf-loc>San Diego, CA, USA</conf-loc>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://arxiv.org/abs/1409.1556"/>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref26">
        <label>26</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>He</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Ren</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Sun</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Deep residual learning for image recognition</article-title>
          <year>2016</year>
          <conf-name>2016 IEEE Conference on Computer Vision and Pattern Recognition (CVPR)</conf-name>
          <conf-date>June 27-30, 2016</conf-date>
          <conf-loc>Las Vegas, NV, USA</conf-loc>
          <fpage>770</fpage>
          <lpage>778</lpage>
          <pub-id pub-id-type="doi">10.1109/cvpr.2016.90</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref27">
        <label>27</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kingma</surname>
              <given-names>DP</given-names>
            </name>
            <name name-style="western">
              <surname>Ba</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Adam: a method for stochastic optimization</article-title>
          <source>Proceedings of the 3rd International Conference on Learning Representations</source>
          <year>2015</year>
          <conf-name>International Conference on Learning Representations</conf-name>
          <conf-date>May 7-9, 2015</conf-date>
          <conf-loc>San Diego, CA, USA</conf-loc>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://arxiv.org/abs/1412.6980"/>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref28">
        <label>28</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Friedman</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>The use of ranks to avoid the assumption of normality implicit in the analysis of variance</article-title>
          <source>J Am Stat Assoc</source>
          <year>1937</year>
          <volume>32</volume>
          <issue>200</issue>
          <fpage>675</fpage>
          <lpage>701</lpage>
          <pub-id pub-id-type="doi">10.1080/01621459.1937.10503522</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref29">
        <label>29</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wilcoxon</surname>
              <given-names>F</given-names>
            </name>
          </person-group>
          <person-group person-group-type="editor">
            <name name-style="western">
              <surname>Kotz</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Johnson</surname>
              <given-names>NL</given-names>
            </name>
          </person-group>
          <article-title>Individual comparisons by ranking methods</article-title>
          <source>Springer Series in Statistics: Breakthroughs in Statistics</source>
          <year>1992</year>
          <publisher-loc>New York, NY</publisher-loc>
          <publisher-name>Springer</publisher-name>
          <fpage>196</fpage>
          <lpage>202</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref30">
        <label>30</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wu</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Xiao</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Sun</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ma</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>He</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <article-title>A survey of human-in-the-loop for machine learning</article-title>
          <source>Future Gener Comput Syst</source>
          <year>2022</year>
          <volume>135</volume>
          <fpage>364</fpage>
          <lpage>381</lpage>
          <pub-id pub-id-type="doi">10.1016/j.future.2022.05.014</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref31">
        <label>31</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sorantin</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Grasser</surname>
              <given-names>MG</given-names>
            </name>
            <name name-style="western">
              <surname>Hemmelmayr</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Tschauner</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Hrzic</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Weiss</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Lacekova</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Holzinger</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>The augmented radiologist: artificial intelligence in the practice of radiology</article-title>
          <source>Pediatr Radiol</source>
          <year>2022</year>
          <volume>52</volume>
          <issue>11</issue>
          <fpage>2074</fpage>
          <lpage>2086</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/34664088"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s00247-021-05177-7</pub-id>
          <pub-id pub-id-type="medline">34664088</pub-id>
          <pub-id pub-id-type="pii">10.1007/s00247-021-05177-7</pub-id>
          <pub-id pub-id-type="pmcid">PMC9537212</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref32">
        <label>32</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Le</surname>
              <given-names>EPV</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Huang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Hickman</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Gilbert</surname>
              <given-names>FJ</given-names>
            </name>
          </person-group>
          <article-title>Artificial intelligence in breast imaging</article-title>
          <source>Clin Radiol</source>
          <year>2019</year>
          <volume>74</volume>
          <issue>5</issue>
          <fpage>357</fpage>
          <lpage>366</lpage>
          <pub-id pub-id-type="doi">10.1016/j.crad.2019.02.006</pub-id>
          <pub-id pub-id-type="medline">30898381</pub-id>
          <pub-id pub-id-type="pii">S0009-9260(19)30116-3</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref33">
        <label>33</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Nam</surname>
              <given-names>JG</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>EJ</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>JH</given-names>
            </name>
            <name name-style="western">
              <surname>Jin</surname>
              <given-names>KN</given-names>
            </name>
            <name name-style="western">
              <surname>Lim</surname>
              <given-names>KY</given-names>
            </name>
            <name name-style="western">
              <surname>Vu</surname>
              <given-names>TH</given-names>
            </name>
            <name name-style="western">
              <surname>Sohn</surname>
              <given-names>JH</given-names>
            </name>
            <name name-style="western">
              <surname>Hwang</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Goo</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>CM</given-names>
            </name>
          </person-group>
          <article-title>Development and validation of deep learning-based automatic detection algorithm for malignant pulmonary nodules on chest radiographs</article-title>
          <source>Radiology</source>
          <year>2019</year>
          <volume>290</volume>
          <issue>1</issue>
          <fpage>218</fpage>
          <lpage>228</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://pubs.rsna.org/doi/10.1148/radiol.2018180237"/>
          </comment>
          <pub-id pub-id-type="doi">10.1148/radiol.2018180237</pub-id>
          <pub-id pub-id-type="medline">30251934</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref34">
        <label>34</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sung</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>SM</given-names>
            </name>
            <name name-style="western">
              <surname>Bae</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Park</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Jung</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Seo</surname>
              <given-names>JB</given-names>
            </name>
            <name name-style="western">
              <surname>Jung</surname>
              <given-names>KH</given-names>
            </name>
          </person-group>
          <article-title>Added value of deep learning-based detection system for multiple major findings on chest radiographs: a randomized crossover study</article-title>
          <source>Radiology</source>
          <year>2021</year>
          <volume>299</volume>
          <issue>2</issue>
          <fpage>450</fpage>
          <lpage>459</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://pubs.rsna.org/doi/10.1148/radiol.2021202818"/>
          </comment>
          <pub-id pub-id-type="doi">10.1148/radiol.2021202818</pub-id>
          <pub-id pub-id-type="medline">33754828</pub-id>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
