CINXE.COM

Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax

<!DOCTYPE html> <html lang="en" xmlns:og="http://ogp.me/ns#" xmlns:fb="https://www.facebook.com/2008/fbml"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <meta content="mdpi" name="sso-service" /> <meta content="width=device-width, initial-scale=1.0" name="viewport" /> <title>Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax</title><link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/font-awesome.min.css?eb190a3a77e5e1ee?1732286508"> <link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/jquery.multiselect.css?f56c135cbf4d1483?1732286508"> <link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/chosen.min.css?d7ca5ca9441ef9e1?1732286508"> <link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/main2.css?69b39374e6b554b7?1732286508"> <link rel="mask-icon" href="https://pub.mdpi-res.com/img/mask-icon-128.svg?c1c7eca266cd7013?1732286508" color="#4f5671"> <link rel="apple-touch-icon" sizes="180x180" href="https://pub.mdpi-res.com/icon/apple-touch-icon-180x180.png?1732286508"> <link rel="apple-touch-icon" sizes="152x152" href="https://pub.mdpi-res.com/icon/apple-touch-icon-152x152.png?1732286508"> <link rel="apple-touch-icon" sizes="144x144" href="https://pub.mdpi-res.com/icon/apple-touch-icon-144x144.png?1732286508"> <link rel="apple-touch-icon" sizes="120x120" href="https://pub.mdpi-res.com/icon/apple-touch-icon-120x120.png?1732286508"> <link rel="apple-touch-icon" sizes="114x114" href="https://pub.mdpi-res.com/icon/apple-touch-icon-114x114.png?1732286508"> <link rel="apple-touch-icon" sizes="76x76" href="https://pub.mdpi-res.com/icon/apple-touch-icon-76x76.png?1732286508"> <link rel="apple-touch-icon" sizes="72x72" href="https://pub.mdpi-res.com/icon/apple-touch-icon-72x72.png?1732286508"> <link rel="apple-touch-icon" sizes="57x57" href="https://pub.mdpi-res.com/icon/apple-touch-icon-57x57.png?1732286508"> <link rel="apple-touch-icon" href="https://pub.mdpi-res.com/icon/apple-touch-icon-57x57.png?1732286508"> <link rel="apple-touch-icon-precomposed" href="https://pub.mdpi-res.com/icon/apple-touch-icon-57x57.png?1732286508"> <link rel="manifest" href="/manifest.json"> <meta name="theme-color" content="#ffffff"> <meta name="application-name" content="&nbsp;"/> <link rel="apple-touch-startup-image" href="https://pub.mdpi-res.com/img/journals/sensors-logo-sq.png?8600e93ff98dbf14"> <link rel="apple-touch-icon" href="https://pub.mdpi-res.com/img/journals/sensors-logo-sq.png?8600e93ff98dbf14"> <meta name="msapplication-TileImage" content="https://pub.mdpi-res.com/img/journals/sensors-logo-sq.png?8600e93ff98dbf14"> <link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/jquery-ui-1.10.4.custom.min.css?80647d88647bf347?1732286508"> <link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/magnific-popup.min.css?04d343e036f8eecd?1732286508"> <link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/xml2html/article-html.css?230b005b39af4260?1732286508"> <style> h2, #abstract .related_suggestion_title { } .batch_articles a { color: #000; } a, .batch_articles .authors a, a:focus, a:hover, a:active, .batch_articles a:focus, .batch_articles a:hover, li.side-menu-li a { } span.label a { color: #fff; } #main-content a.title-link:hover, #main-content a.title-link:focus, #main-content div.generic-item a.title-link:hover, #main-content div.generic-item a.title-link:focus { } #main-content #middle-column .generic-item.article-item a.title-link:hover, #main-content #middle-column .generic-item.article-item a.title-link:focus { } .art-authors a.toEncode { color: #333; font-weight: 700; } #main-content #middle-column ul li::before { } .accordion-navigation.active a.accordion__title, .accordion-navigation.active a.accordion__title::after { } .accordion-navigation li:hover::before, .accordion-navigation li:hover a, .accordion-navigation li:focus a { } .relative-size-container .relative-size-image .relative-size { } .middle-column__help__fixed a:hover i, } input[type="checkbox"]:checked:after { } input[type="checkbox"]:not(:disabled):hover:before { } #main-content .bolded-text { } #main-content .hypothesis-count-container { } #main-content .hypothesis-count-container:before { } .full-size-menu ul li.menu-item .dropdown-wrapper { } .full-size-menu ul li.menu-item > a.open::after { } #title-story .title-story-orbit .orbit-caption { #background: url('/img/design/000000_background.png') !important; background: url('/img/design/ffffff_background.png') !important; color: rgb(51, 51, 51) !important; } #main-content .content__container__orbit { background-color: #000 !important; } #main-content .content__container__journal { color: #fff; } .html-article-menu .row span { } .html-article-menu .row span.active { } .accordion-navigation__journal .side-menu-li.active::before, .accordion-navigation__journal .side-menu-li.active a { color: rgba(0,124,127,0.75) !important; font-weight: 700; } .accordion-navigation__journal .side-menu-li:hover::before , .accordion-navigation__journal .side-menu-li:hover a { color: rgba(0,124,127,0.75) !important; } .side-menu-ul li.active a, .side-menu-ul li.active, .side-menu-ul li.active::before { color: rgba(0,124,127,0.75) !important; } .side-menu-ul li.active a { } .result-selected, .active-result.highlighted, .active-result:hover, .result-selected, .active-result.highlighted, .active-result:focus { } .search-container.search-container__default-scheme { } nav.tab-bar .open-small-search.active:after { } .search-container.search-container__default-scheme .custom-accordion-for-small-screen-link::after { color: #fff; } @media only screen and (max-width: 50em) { #main-content .content__container.journal-info { color: #fff; } #main-content .content__container.journal-info a { color: #fff; } } .button.button--color { } .button.button--color:hover, .button.button--color:focus { } .button.button--color-journal { position: relative; background-color: rgba(0,124,127,0.75); border-color: #fff; color: #fff !important; } .button.button--color-journal:hover::before { content: ''; position: absolute; top: 0; left: 0; height: 100%; width: 100%; background-color: #ffffff; opacity: 0.2; } .button.button--color-journal:visited, .button.button--color-journal:hover, .button.button--color-journal:focus { background-color: rgba(0,124,127,0.75); border-color: #fff; color: #fff !important; } .button.button--color path { } .button.button--color:hover path { fill: #fff; } #main-content #search-refinements .ui-slider-horizontal .ui-slider-range { } .breadcrumb__element:last-of-type a { } #main-header { } #full-size-menu .top-bar, #full-size-menu li.menu-item span.user-email { } .top-bar-section li:not(.has-form) a:not(.button) { } #full-size-menu li.menu-item .dropdown-wrapper li a:hover { } #full-size-menu li.menu-item a:hover, #full-size-menu li.menu.item a:focus, nav.tab-bar a:hover { } #full-size-menu li.menu.item a:active, #full-size-menu li.menu.item a.active { } #full-size-menu li.menu-item a.open-mega-menu.active, #full-size-menu li.menu-item div.mega-menu, a.open-mega-menu.active { } #full-size-menu li.menu-item div.mega-menu li, #full-size-menu li.menu-item div.mega-menu a { border-color: #9a9a9a; } div.type-section h2 { font-size: 20px; line-height: 26px; font-weight: 300; } div.type-section h3 { margin-left: 15px; margin-bottom: 0px; font-weight: 300; } .journal-tabs .tab-title.active a { } </style> <link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/slick.css?f38b2db10e01b157?1732286508"> <meta name="title" content="Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax"> <meta name="description" content="Electrical impedance tomography (EIT) is a non-invasive technique for visualizing the internal structure of a human body. Capacitively coupled electrical impedance tomography (CCEIT) is a new contactless EIT technique that can potentially be used as a wearable device. Recent studies have shown that a machine learning-based approach is very promising for EIT image reconstruction. Most of the studies concern models containing up to 22 electrodes and focus on using different artificial neural network models, from simple shallow networks to complex convolutional networks. However, the use of convolutional networks in image reconstruction with a higher number of electrodes requires further investigation. In this work, two different architectures of artificial networks were used for CCEIT image reconstruction: a fully connected deep neural network and a conditional generative adversarial network (cGAN). The training dataset was generated by the numerical simulation of a thorax phantom with healthy and illness-affected lungs. Three kinds of illnesses, pneumothorax, pleural effusion, and hydropneumothorax, were modeled using the electrical properties of the tissues. The thorax phantom included the heart, aorta, spine, and lungs. The sensor with 32 area electrodes was used in the numerical model. The ECTsim custom-designed toolbox for Matlab was used to solve the forward problem and measurement simulation. Two artificial neural networks were trained with supervision for image reconstruction. Reconstruction quality was compared between those networks and one-step algebraic reconstruction methods such as linear back projection and pseudoinverse with Tikhonov regularization. This evaluation was based on pixel-to-pixel metrics such as root-mean-square error, structural similarity index, 2D correlation coefficient, and peak signal-to-noise ratio. Additionally, the diagnostic value measured by the ROC AUC metric was used to assess the image quality. The results showed that obtaining information about regional lung function (regions affected by pneumothorax or pleural effusion) is possible using image reconstruction based on supervised learning and deep neural networks in EIT. The results obtained using cGAN are strongly better than those obtained using a fully connected network, especially in the case of noisy measurement data. However, diagnostic value estimation showed that even algebraic methods allow us to obtain satisfactory results." > <link rel="image_src" href="https://pub.mdpi-res.com/img/journals/sensors-logo.png?8600e93ff98dbf14" > <meta name="dc.title" content="Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax"> <meta name="dc.creator" content="Mikhail Ivanenko"> <meta name="dc.creator" content="Waldemar T. Smolik"> <meta name="dc.creator" content="Damian Wanta"> <meta name="dc.creator" content="Mateusz Midura"> <meta name="dc.creator" content="Przemysław Wróblewski"> <meta name="dc.creator" content="Xiaohan Hou"> <meta name="dc.creator" content="Xiaoheng Yan"> <meta name="dc.type" content="Article"> <meta name="dc.source" content="Sensors 2023, Vol. 23, Page 7774"> <meta name="dc.date" content="2023-09-09"> <meta name ="dc.identifier" content="10.3390/s23187774"> <meta name="dc.publisher" content="Multidisciplinary Digital Publishing Institute"> <meta name="dc.rights" content="http://creativecommons.org/licenses/by/3.0/"> <meta name="dc.format" content="application/pdf" > <meta name="dc.language" content="en" > <meta name="dc.description" content="Electrical impedance tomography (EIT) is a non-invasive technique for visualizing the internal structure of a human body. Capacitively coupled electrical impedance tomography (CCEIT) is a new contactless EIT technique that can potentially be used as a wearable device. Recent studies have shown that a machine learning-based approach is very promising for EIT image reconstruction. Most of the studies concern models containing up to 22 electrodes and focus on using different artificial neural network models, from simple shallow networks to complex convolutional networks. However, the use of convolutional networks in image reconstruction with a higher number of electrodes requires further investigation. In this work, two different architectures of artificial networks were used for CCEIT image reconstruction: a fully connected deep neural network and a conditional generative adversarial network (cGAN). The training dataset was generated by the numerical simulation of a thorax phantom with healthy and illness-affected lungs. Three kinds of illnesses, pneumothorax, pleural effusion, and hydropneumothorax, were modeled using the electrical properties of the tissues. The thorax phantom included the heart, aorta, spine, and lungs. The sensor with 32 area electrodes was used in the numerical model. The ECTsim custom-designed toolbox for Matlab was used to solve the forward problem and measurement simulation. Two artificial neural networks were trained with supervision for image reconstruction. Reconstruction quality was compared between those networks and one-step algebraic reconstruction methods such as linear back projection and pseudoinverse with Tikhonov regularization. This evaluation was based on pixel-to-pixel metrics such as root-mean-square error, structural similarity index, 2D correlation coefficient, and peak signal-to-noise ratio. Additionally, the diagnostic value measured by the ROC AUC metric was used to assess the image quality. The results showed that obtaining information about regional lung function (regions affected by pneumothorax or pleural effusion) is possible using image reconstruction based on supervised learning and deep neural networks in EIT. The results obtained using cGAN are strongly better than those obtained using a fully connected network, especially in the case of noisy measurement data. However, diagnostic value estimation showed that even algebraic methods allow us to obtain satisfactory results." > <meta name="dc.subject" content="capacitively coupled electrical impedance tomography" > <meta name="dc.subject" content="image reconstruction" > <meta name="dc.subject" content="inverse problem" > <meta name="dc.subject" content="deep neural networks" > <meta name="dc.subject" content="deep learning" > <meta name="dc.subject" content="fully connected neural networks" > <meta name="dc.subject" content="cGAN" > <meta name="dc.subject" content="medical imaging" > <meta name="dc.subject" content="lung imaging" > <meta name="dc.subject" content="pneumothorax" > <meta name="dc.subject" content="pleural effusion" > <meta name ="prism.issn" content="1424-8220"> <meta name ="prism.publicationName" content="Sensors"> <meta name ="prism.publicationDate" content="2023-09-09"> <meta name ="prism.volume" content="23"> <meta name ="prism.number" content="18"> <meta name ="prism.section" content="Article" > <meta name ="prism.startingPage" content="7774" > <meta name="citation_issn" content="1424-8220"> <meta name="citation_journal_title" content="Sensors"> <meta name="citation_publisher" content="Multidisciplinary Digital Publishing Institute"> <meta name="citation_title" content="Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax"> <meta name="citation_publication_date" content="2023/1"> <meta name="citation_online_date" content="2023/09/09"> <meta name="citation_volume" content="23"> <meta name="citation_issue" content="18"> <meta name="citation_firstpage" content="7774"> <meta name="citation_author" content="Ivanenko, Mikhail"> <meta name="citation_author" content="Smolik, Waldemar T."> <meta name="citation_author" content="Wanta, Damian"> <meta name="citation_author" content="Midura, Mateusz"> <meta name="citation_author" content="Wróblewski, Przemysław"> <meta name="citation_author" content="Hou, Xiaohan"> <meta name="citation_author" content="Yan, Xiaoheng"> <meta name="citation_doi" content="10.3390/s23187774"> <meta name="citation_id" content="mdpi-s23187774"> <meta name="citation_abstract_html_url" content="https://www.mdpi.com/1424-8220/23/18/7774"> <meta name="citation_pdf_url" content="https://www.mdpi.com/1424-8220/23/18/7774/pdf?version=1694251005"> <link rel="alternate" type="application/pdf" title="PDF Full-Text" href="https://www.mdpi.com/1424-8220/23/18/7774/pdf?version=1694251005"> <meta name="fulltext_pdf" content="https://www.mdpi.com/1424-8220/23/18/7774/pdf?version=1694251005"> <meta name="citation_fulltext_html_url" content="https://www.mdpi.com/1424-8220/23/18/7774/htm"> <link rel="alternate" type="text/html" title="HTML Full-Text" href="https://www.mdpi.com/1424-8220/23/18/7774/htm"> <meta name="fulltext_html" content="https://www.mdpi.com/1424-8220/23/18/7774/htm"> <link rel="alternate" type="text/xml" title="XML Full-Text" href="https://www.mdpi.com/1424-8220/23/18/7774/xml"> <meta name="fulltext_xml" content="https://www.mdpi.com/1424-8220/23/18/7774/xml"> <meta name="citation_xml_url" content="https://www.mdpi.com/1424-8220/23/18/7774/xml"> <meta name="twitter:card" content="summary" /> <meta name="twitter:site" content="@MDPIOpenAccess" /> <meta name="twitter:image" content="https://pub.mdpi-res.com/img/journals/sensors-logo-social.png?8600e93ff98dbf14" /> <meta property="fb:app_id" content="131189377574"/> <meta property="og:site_name" content="MDPI"/> <meta property="og:type" content="article"/> <meta property="og:url" content="https://www.mdpi.com/1424-8220/23/18/7774" /> <meta property="og:title" content="Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax" /> <meta property="og:description" content="Electrical impedance tomography (EIT) is a non-invasive technique for visualizing the internal structure of a human body. Capacitively coupled electrical impedance tomography (CCEIT) is a new contactless EIT technique that can potentially be used as a wearable device. Recent studies have shown that a machine learning-based approach is very promising for EIT image reconstruction. Most of the studies concern models containing up to 22 electrodes and focus on using different artificial neural network models, from simple shallow networks to complex convolutional networks. However, the use of convolutional networks in image reconstruction with a higher number of electrodes requires further investigation. In this work, two different architectures of artificial networks were used for CCEIT image reconstruction: a fully connected deep neural network and a conditional generative adversarial network (cGAN). The training dataset was generated by the numerical simulation of a thorax phantom with healthy and illness-affected lungs. Three kinds of illnesses, pneumothorax, pleural effusion, and hydropneumothorax, were modeled using the electrical properties of the tissues. The thorax phantom included the heart, aorta, spine, and lungs. The sensor with 32 area electrodes was used in the numerical model. The ECTsim custom-designed toolbox for Matlab was used to solve the forward problem and measurement simulation. Two artificial neural networks were trained with supervision for image reconstruction. Reconstruction quality was compared between those networks and one-step algebraic reconstruction methods such as linear back projection and pseudoinverse with Tikhonov regularization. This evaluation was based on pixel-to-pixel metrics such as root-mean-square error, structural similarity index, 2D correlation coefficient, and peak signal-to-noise ratio. Additionally, the diagnostic value measured by the ROC AUC metric was used to assess the image quality. The results showed that obtaining information about regional lung function (regions affected by pneumothorax or pleural effusion) is possible using image reconstruction based on supervised learning and deep neural networks in EIT. The results obtained using cGAN are strongly better than those obtained using a fully connected network, especially in the case of noisy measurement data. However, diagnostic value estimation showed that even algebraic methods allow us to obtain satisfactory results." /> <meta property="og:image" content="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g001-550.jpg?1694251103" /> <link rel="alternate" type="application/rss+xml" title="MDPI Publishing - Latest articles" href="https://www.mdpi.com/rss"> <meta name="google-site-verification" content="PxTlsg7z2S00aHroktQd57fxygEjMiNHydKn3txhvwY"> <meta name="facebook-domain-verification" content="mcoq8dtq6sb2hf7z29j8w515jjoof7" /> <script id="Cookiebot" data-cfasync="false" src="https://consent.cookiebot.com/uc.js" data-cbid="51491ddd-fe7a-4425-ab39-69c78c55829f" type="text/javascript" async></script> <!--[if lt IE 9]> <script>var browserIe8 = true;</script> <link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/ie8foundationfix.css?50273beac949cbf0?1732286508"> <script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script> <script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.6.2/html5shiv.js"></script> <script src="//s3.amazonaws.com/nwapi/nwmatcher/nwmatcher-1.2.5-min.js"></script> <script src="//html5base.googlecode.com/svn-history/r38/trunk/js/selectivizr-1.0.3b.js"></script> <script src="//cdnjs.cloudflare.com/ajax/libs/respond.js/1.1.0/respond.min.js"></script> <script src="https://pub.mdpi-res.com/assets/js/ie8/ie8patch.js?9e1d3c689a0471df?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/ie8/rem.min.js?94b62787dcd6d2f2?1732286508"></script> <![endif]--> <script type="text/plain" data-cookieconsent="statistics"> (function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0], j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src= 'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f); })(window,document,'script','dataLayer','GTM-WPK7SW5'); </script> <script type="text/plain" data-cookieconsent="statistics"> _linkedin_partner_id = "2846186"; window._linkedin_data_partner_ids = window._linkedin_data_partner_ids || []; window._linkedin_data_partner_ids.push(_linkedin_partner_id); </script><script type="text/javascript"> (function(){var s = document.getElementsByTagName("script")[0]; var b = document.createElement("script"); b.type = "text/javascript";b.async = true; b.src = "https://snap.licdn.com/li.lms-analytics/insight.min.js"; s.parentNode.insertBefore(b, s);})(); </script> <script type="text/plain" data-cookieconsent="statistics" data-cfasync="false" src="//script.crazyegg.com/pages/scripts/0116/4951.js" async="async" ></script> </head> <body> <div class="direction direction_right" id="small_right" style="border-right-width: 0px; padding:0;"> <i class="fa fa-caret-right fa-2x"></i> </div> <div class="big_direction direction_right" id="big_right" style="border-right-width: 0px;"> <div style="text-align: right;"> Next Article in Journal<br> <div><a href="/1424-8220/23/18/7776">Seeking a Sufficient Data Volume for Railway Infrastructure Component Detection with Computer Vision Models</a></div> Next Article in Special Issue<br> <div><a href="/1424-8220/24/8/2641">COVID-19 Hierarchical Classification Using a Deep Learning Multi-Modal</a></div> </div> </div> <div class="direction" id="small_left" style="border-left-width: 0px"> <i class="fa fa-caret-left fa-2x"></i> </div> <div class="big_direction" id="big_left" style="border-left-width: 0px;"> <div> Previous Article in Journal<br> <div><a href="/1424-8220/23/18/7773">Generalized Concept and MATLAB Code for Modeling and Analyzing Wideband 90&deg; Stub-Loaded Phase Shifters with Simulation and Experimental Verifications</a></div> </div> </div> <div style="clear: both;"></div> <div id="menuModal" class="reveal-modal reveal-modal-new reveal-modal-menu" aria-hidden="true" data-reveal role="dialog"> <div class="menu-container"> <div class="UI_NavMenu"> <div class="content__container " > <div class="custom-accordion-for-small-screen-link " > <h2>Journals</h2> </div> <div class="target-item custom-accordion-for-small-screen-content show-for-medium-up"> <div class="menu-container__links"> <div style="width: 100%; float: left;"> <a href="/about/journals">Active Journals</a> <a href="/about/journalfinder">Find a Journal</a> <a href="/about/journals/proposal">Journal Proposal</a> <a href="/about/proceedings">Proceedings Series</a> </div> </div> </div> </div> <a href="/topics"> <h2>Topics</h2> </a> <div class="content__container " > <div class="custom-accordion-for-small-screen-link " > <h2>Information</h2> </div> <div class="target-item custom-accordion-for-small-screen-content show-for-medium-up"> <div class="menu-container__links"> <div style="width: 100%; max-width: 200px; float: left;"> <a href="/authors">For Authors</a> <a href="/reviewers">For Reviewers</a> <a href="/editors">For Editors</a> <a href="/librarians">For Librarians</a> <a href="/publishing_services">For Publishers</a> <a href="/societies">For Societies</a> <a href="/conference_organizers">For Conference Organizers</a> </div> <div style="width: 100%; max-width: 250px; float: left;"> <a href="/openaccess">Open Access Policy</a> <a href="/ioap">Institutional Open Access Program</a> <a href="/special_issues_guidelines">Special Issues Guidelines</a> <a href="/editorial_process">Editorial Process</a> <a href="/ethics">Research and Publication Ethics</a> <a href="/apc">Article Processing Charges</a> <a href="/awards">Awards</a> <a href="/testimonials">Testimonials</a> </div> </div> </div> </div> <a href="/authors/english"> <h2>Editing Services</h2> </a> <div class="content__container " > <div class="custom-accordion-for-small-screen-link " > <h2>Initiatives</h2> </div> <div class="target-item custom-accordion-for-small-screen-content show-for-medium-up"> <div class="menu-container__links"> <div style="width: 100%; float: left;"> <a href="https://sciforum.net" target="_blank" rel="noopener noreferrer">Sciforum</a> <a href="https://www.mdpi.com/books" target="_blank" rel="noopener noreferrer">MDPI Books</a> <a href="https://www.preprints.org" target="_blank" rel="noopener noreferrer">Preprints.org</a> <a href="https://www.scilit.net" target="_blank" rel="noopener noreferrer">Scilit</a> <a href="https://sciprofiles.com" target="_blank" rel="noopener noreferrer">SciProfiles</a> <a href="https://encyclopedia.pub" target="_blank" rel="noopener noreferrer">Encyclopedia</a> <a href="https://jams.pub" target="_blank" rel="noopener noreferrer">JAMS</a> <a href="/about/proceedings">Proceedings Series</a> </div> </div> </div> </div> <div class="content__container " > <div class="custom-accordion-for-small-screen-link " > <h2>About</h2> </div> <div class="target-item custom-accordion-for-small-screen-content show-for-medium-up"> <div class="menu-container__links"> <div style="width: 100%; float: left;"> <a href="/about">Overview</a> <a href="/about/contact">Contact</a> <a href="https://careers.mdpi.com" target="_blank" rel="noopener noreferrer">Careers</a> <a href="/about/announcements">News</a> <a href="/about/press">Press</a> <a href="http://blog.mdpi.com/" target="_blank" rel="noopener noreferrer">Blog</a> </div> </div> </div> </div> </div> <div class="menu-container__buttons"> <a class="button UA_SignInUpButton" href="/user/login">Sign In / Sign Up</a> </div> </div> </div> <div id="captchaModal" class="reveal-modal reveal-modal-new reveal-modal-new--small" data-reveal aria-label="Captcha" aria-hidden="true" role="dialog"></div> <div id="actionDisabledModal" class="reveal-modal" data-reveal aria-labelledby="actionDisableModalTitle" aria-hidden="true" role="dialog" style="width: 300px;"> <h2 id="actionDisableModalTitle">Notice</h2> <form action="/email/captcha" method="post" id="emailCaptchaForm"> <div class="row"> <div id="js-action-disabled-modal-text" class="small-12 columns"> </div> <div id="js-action-disabled-modal-submit" class="small-12 columns" style="margin-top: 10px; display: none;"> You can make submissions to other journals <a href="https://susy.mdpi.com/user/manuscripts/upload">here</a>. </div> </div> </form> <a class="close-reveal-modal" aria-label="Close"> <i class="material-icons">clear</i> </a> </div> <div id="rssNotificationModal" class="reveal-modal reveal-modal-new" data-reveal aria-labelledby="rssNotificationModalTitle" aria-hidden="true" role="dialog"> <div class="row"> <div class="small-12 columns"> <h2 id="rssNotificationModalTitle">Notice</h2> <p> You are accessing a machine-readable page. In order to be human-readable, please install an RSS reader. </p> </div> </div> <div class="row"> <div class="small-12 columns"> <a class="button button--color js-rss-notification-confirm">Continue</a> <a class="button button--grey" onclick="$(this).closest('.reveal-modal').find('.close-reveal-modal').click(); return false;">Cancel</a> </div> </div> <a class="close-reveal-modal" aria-label="Close"> <i class="material-icons">clear</i> </a> </div> <div id="drop-article-label-openaccess" class="f-dropdown medium" data-dropdown-content aria-hidden="true" tabindex="-1"> <p> All articles published by MDPI are made immediately available worldwide under an open access license. No special permission is required to reuse all or part of the article published by MDPI, including figures and tables. For articles published under an open access Creative Common CC BY license, any part of the article may be reused without permission provided that the original article is clearly cited. For more information, please refer to <a href="https://www.mdpi.com/openaccess">https://www.mdpi.com/openaccess</a>. </p> </div> <div id="drop-article-label-feature" class="f-dropdown medium" data-dropdown-content aria-hidden="true" tabindex="-1"> <p> Feature papers represent the most advanced research with significant potential for high impact in the field. A Feature Paper should be a substantial original Article that involves several techniques or approaches, provides an outlook for future research directions and describes possible research applications. </p> <p> Feature papers are submitted upon individual invitation or recommendation by the scientific editors and must receive positive feedback from the reviewers. </p> </div> <div id="drop-article-label-choice" class="f-dropdown medium" data-dropdown-content aria-hidden="true" tabindex="-1"> <p> Editor’s Choice articles are based on recommendations by the scientific editors of MDPI journals from around the world. Editors select a small number of articles recently published in the journal that they believe will be particularly interesting to readers, or important in the respective research area. The aim is to provide a snapshot of some of the most exciting work published in the various research areas of the journal. <div style="margin-top: -10px;"> <div id="drop-article-label-choice-journal-link" style="display: none; margin-top: -10px; padding-top: 10px;"> </div> </div> </p> </div> <div id="drop-article-label-resubmission" class="f-dropdown medium" data-dropdown-content aria-hidden="true" tabindex="-1"> <p> Original Submission Date Received: <span id="drop-article-label-resubmission-date"></span>. </p> </div> <div id="container"> <noscript> <div id="no-javascript"> You seem to have javascript disabled. Please note that many of the page functionalities won't work as expected without javascript enabled. </div> </noscript> <div class="fixed"> <nav class="tab-bar show-for-medium-down"> <div class="row full-width collapse"> <div class="medium-3 small-4 columns"> <a href="/"> <img class="full-size-menu__mdpi-logo" src="https://pub.mdpi-res.com/img/design/mdpi-pub-logo-black-small1.svg?da3a8dcae975a41c?1732286508" style="width: 64px;" title="MDPI Open Access Journals"> </a> </div> <div class="medium-3 small-4 columns right-aligned"> <div class="show-for-medium-down"> <a href="#" style="display: none;"> <i class="material-icons" onclick="$('#menuModal').foundation('reveal', 'close'); return false;">clear</i> </a> <a class="js-toggle-desktop-layout-link" title="Toggle desktop layout" style="display: none;" href="/toggle_desktop_layout_cookie"> <i class="material-icons">zoom_out_map</i> </a> <a href="#" class="js-open-small-search open-small-search"> <i class="material-icons show-for-small only">search</i> </a> <a title="MDPI main page" class="js-open-menu" data-reveal-id="menuModal" href="#"> <i class="material-icons">menu</i> </a> </div> </div> </div> </nav> </div> <section class="main-section"> <header> <div class="full-size-menu show-for-large-up"> <div class="row full-width"> <div class="large-1 columns"> <a href="/"> <img class="full-size-menu__mdpi-logo" src="https://pub.mdpi-res.com/img/design/mdpi-pub-logo-black-small1.svg?da3a8dcae975a41c?1732286508" title="MDPI Open Access Journals"> </a> </div> <div class="large-8 columns text-right UI_NavMenu"> <ul> <li class="menu-item"> <a href="/about/journals" data-dropdown="journals-dropdown" aria-controls="journals-dropdown" aria-expanded="false" data-options="is_hover: true; hover_timeout: 200">Journals</a> <ul id="journals-dropdown" class="f-dropdown dropdown-wrapper dropdown-wrapper__small" data-dropdown-content aria-hidden="true" tabindex="-1"> <li> <div class="row"> <div class="small-12 columns"> <ul> <li> <a href="/about/journals"> Active Journals </a> </li> <li> <a href="/about/journalfinder"> Find a Journal </a> </li> <li> <a href="/about/journals/proposal"> Journal Proposal </a> </li> <li> <a href="/about/proceedings"> Proceedings Series </a> </li> </ul> </div> </div> </li> </ul> </li> <li class="menu-item"> <a href="/topics">Topics</a> </li> <li class="menu-item"> <a href="/authors" data-dropdown="information-dropdown" aria-controls="information-dropdown" aria-expanded="false" data-options="is_hover:true; hover_timeout:200">Information</a> <ul id="information-dropdown" class="f-dropdown dropdown-wrapper" data-dropdown-content aria-hidden="true" tabindex="-1"> <li> <div class="row"> <div class="small-5 columns right-border"> <ul> <li> <a href="/authors">For Authors</a> </li> <li> <a href="/reviewers">For Reviewers</a> </li> <li> <a href="/editors">For Editors</a> </li> <li> <a href="/librarians">For Librarians</a> </li> <li> <a href="/publishing_services">For Publishers</a> </li> <li> <a href="/societies">For Societies</a> </li> <li> <a href="/conference_organizers">For Conference Organizers</a> </li> </ul> </div> <div class="small-7 columns"> <ul> <li> <a href="/openaccess">Open Access Policy</a> </li> <li> <a href="/ioap">Institutional Open Access Program</a> </li> <li> <a href="/special_issues_guidelines">Special Issues Guidelines</a> </li> <li> <a href="/editorial_process">Editorial Process</a> </li> <li> <a href="/ethics">Research and Publication Ethics</a> </li> <li> <a href="/apc">Article Processing Charges</a> </li> <li> <a href="/awards">Awards</a> </li> <li> <a href="/testimonials">Testimonials</a> </li> </ul> </div> </div> </li> </ul> </li> <li class="menu-item"> <a href="/authors/english">Editing Services</a> </li> <li class="menu-item"> <a href="/about/initiatives" data-dropdown="initiatives-dropdown" aria-controls="initiatives-dropdown" aria-expanded="false" data-options="is_hover: true; hover_timeout: 200">Initiatives</a> <ul id="initiatives-dropdown" class="f-dropdown dropdown-wrapper dropdown-wrapper__small" data-dropdown-content aria-hidden="true" tabindex="-1"> <li> <div class="row"> <div class="small-12 columns"> <ul> <li> <a href="https://sciforum.net" target="_blank" rel="noopener noreferrer"> Sciforum </a> </li> <li> <a href="https://www.mdpi.com/books" target="_blank" rel="noopener noreferrer"> MDPI Books </a> </li> <li> <a href="https://www.preprints.org" target="_blank" rel="noopener noreferrer"> Preprints.org </a> </li> <li> <a href="https://www.scilit.net" target="_blank" rel="noopener noreferrer"> Scilit </a> </li> <li> <a href="https://sciprofiles.com" target="_blank" rel="noopener noreferrer"> SciProfiles </a> </li> <li> <a href="https://encyclopedia.pub" target="_blank" rel="noopener noreferrer"> Encyclopedia </a> </li> <li> <a href="https://jams.pub" target="_blank" rel="noopener noreferrer"> JAMS </a> </li> <li> <a href="/about/proceedings"> Proceedings Series </a> </li> </ul> </div> </div> </li> </ul> </li> <li class="menu-item"> <a href="/about" data-dropdown="about-dropdown" aria-controls="about-dropdown" aria-expanded="false" data-options="is_hover: true; hover_timeout: 200">About</a> <ul id="about-dropdown" class="f-dropdown dropdown-wrapper dropdown-wrapper__small" data-dropdown-content aria-hidden="true" tabindex="-1"> <li> <div class="row"> <div class="small-12 columns"> <ul> <li> <a href="/about"> Overview </a> </li> <li> <a href="/about/contact"> Contact </a> </li> <li> <a href="https://careers.mdpi.com" target="_blank" rel="noopener noreferrer"> Careers </a> </li> <li> <a href="/about/announcements"> News </a> </li> <li> <a href="/about/press"> Press </a> </li> <li> <a href="http://blog.mdpi.com/" target="_blank" rel="noopener noreferrer"> Blog </a> </li> </ul> </div> </div> </li> </ul> </li> </ul> </div> <div class="large-3 columns text-right full-size-menu__buttons"> <div> <a class="button button--default-inversed UA_SignInUpButton" href="/user/login">Sign In / Sign Up</a> <a class="button button--default js-journal-active-only-link js-journal-active-only-submit-link UC_NavSubmitButton" href=" https://susy.mdpi.com/user/manuscripts/upload?journal=sensors " data-disabledmessage="new submissions are not possible.">Submit</a> </div> </div> </div> </div> <div class="header-divider">&nbsp;</div> <div class="search-container hide-for-small-down row search-container__homepage-scheme"> <form id="basic_search" style="background-color: inherit !important;" class="large-12 medium-12 columns " action="/search" method="get"> <div class="row search-container__main-elements"> <div class="large-2 medium-2 small-12 columns text-right1 small-only-text-left"> <div class="show-for-medium-up"> <div class="search-input-label">&nbsp;</div> </div> <span class="search-container__title">Search<span class="hide-for-medium"> for Articles</span><span class="hide-for-small">:</span></span> </div> <div class="custom-accordion-for-small-screen-content"> <div class="large-2 medium-2 small-6 columns "> <div class=""> <div class="search-input-label">Title / Keyword</div> </div> <input type="text" placeholder="Title / Keyword" id="q" tabindex="1" name="q" value="" /> </div> <div class="large-2 medium-2 small-6 columns "> <div class=""> <div class="search-input-label">Author / Affiliation / Email</div> </div> <input type="text" id="authors" placeholder="Author / Affiliation / Email" tabindex="2" name="authors" value="" /> </div> <div class="large-2 medium-2 small-6 columns "> <div class=""> <div class="search-input-label">Journal</div> </div> <select id="journal" tabindex="3" name="journal" class="chosen-select"> <option value="">All Journals</option> <option value="acoustics" > Acoustics </option> <option value="amh" > Acta Microbiologica Hellenica (AMH) </option> <option value="actuators" > Actuators </option> <option value="admsci" > Administrative Sciences </option> <option value="adolescents" > Adolescents </option> <option value="arm" > Advances in Respiratory Medicine (ARM) </option> <option value="aerobiology" > Aerobiology </option> <option value="aerospace" > Aerospace </option> <option value="agriculture" > Agriculture </option> <option value="agriengineering" > AgriEngineering </option> <option value="agrochemicals" > Agrochemicals </option> <option value="agronomy" > Agronomy </option> <option value="ai" > AI </option> <option value="air" > Air </option> <option value="algorithms" > Algorithms </option> <option value="allergies" > Allergies </option> <option value="alloys" > Alloys </option> <option value="analytica" > Analytica </option> <option value="analytics" > Analytics </option> <option value="anatomia" > Anatomia </option> <option value="anesthres" > Anesthesia Research </option> <option value="animals" > Animals </option> <option value="antibiotics" > Antibiotics </option> <option value="antibodies" > Antibodies </option> <option value="antioxidants" > Antioxidants </option> <option value="applbiosci" > Applied Biosciences </option> <option value="applmech" > Applied Mechanics </option> <option value="applmicrobiol" > Applied Microbiology </option> <option value="applnano" > Applied Nano </option> <option value="applsci" > Applied Sciences </option> <option value="asi" > Applied System Innovation (ASI) </option> <option value="appliedchem" > AppliedChem </option> <option value="appliedmath" > AppliedMath </option> <option value="aquacj" > Aquaculture Journal </option> <option value="architecture" > Architecture </option> <option value="arthropoda" > Arthropoda </option> <option value="arts" > Arts </option> <option value="astronomy" > Astronomy </option> <option value="atmosphere" > Atmosphere </option> <option value="atoms" > Atoms </option> <option value="audiolres" > Audiology Research </option> <option value="automation" > Automation </option> <option value="axioms" > Axioms </option> <option value="bacteria" > Bacteria </option> <option value="batteries" > Batteries </option> <option value="behavsci" > Behavioral Sciences </option> <option value="beverages" > Beverages </option> <option value="BDCC" > Big Data and Cognitive Computing (BDCC) </option> <option value="biochem" > BioChem </option> <option value="bioengineering" > Bioengineering </option> <option value="biologics" > Biologics </option> <option value="biology" > Biology </option> <option value="blsf" > Biology and Life Sciences Forum </option> <option value="biomass" > Biomass </option> <option value="biomechanics" > Biomechanics </option> <option value="biomed" > BioMed </option> <option value="biomedicines" > Biomedicines </option> <option value="biomedinformatics" > BioMedInformatics </option> <option value="biomimetics" > Biomimetics </option> <option value="biomolecules" > Biomolecules </option> <option value="biophysica" > Biophysica </option> <option value="biosensors" > Biosensors </option> <option value="biotech" > BioTech </option> <option value="birds" > Birds </option> <option value="blockchains" > Blockchains </option> <option value="brainsci" > Brain Sciences </option> <option value="buildings" > Buildings </option> <option value="businesses" > Businesses </option> <option value="carbon" > C </option> <option value="cancers" > Cancers </option> <option value="cardiogenetics" > Cardiogenetics </option> <option value="catalysts" > Catalysts </option> <option value="cells" > Cells </option> <option value="ceramics" > Ceramics </option> <option value="challenges" > Challenges </option> <option value="ChemEngineering" > ChemEngineering </option> <option value="chemistry" > Chemistry </option> <option value="chemproc" > Chemistry Proceedings </option> <option value="chemosensors" > Chemosensors </option> <option value="children" > Children </option> <option value="chips" > Chips </option> <option value="civileng" > CivilEng </option> <option value="cleantechnol" > Clean Technologies (Clean Technol.) </option> <option value="climate" > Climate </option> <option value="ctn" > Clinical and Translational Neuroscience (CTN) </option> <option value="clinbioenerg" > Clinical Bioenergetics </option> <option value="clinpract" > Clinics and Practice </option> <option value="clockssleep" > Clocks &amp; Sleep </option> <option value="coasts" > Coasts </option> <option value="coatings" > Coatings </option> <option value="colloids" > Colloids and Interfaces </option> <option value="colorants" > Colorants </option> <option value="commodities" > Commodities </option> <option value="complications" > Complications </option> <option value="compounds" > Compounds </option> <option value="computation" > Computation </option> <option value="csmf" > Computer Sciences &amp; Mathematics Forum </option> <option value="computers" > Computers </option> <option value="condensedmatter" > Condensed Matter </option> <option value="conservation" > Conservation </option> <option value="constrmater" > Construction Materials </option> <option value="cmd" > Corrosion and Materials Degradation (CMD) </option> <option value="cosmetics" > Cosmetics </option> <option value="covid" > COVID </option> <option value="crops" > Crops </option> <option value="cryo" > Cryo </option> <option value="cryptography" > Cryptography </option> <option value="crystals" > Crystals </option> <option value="cimb" > Current Issues in Molecular Biology (CIMB) </option> <option value="curroncol" > Current Oncology </option> <option value="dairy" > Dairy </option> <option value="data" > Data </option> <option value="dentistry" > Dentistry Journal </option> <option value="dermato" > Dermato </option> <option value="dermatopathology" > Dermatopathology </option> <option value="designs" > Designs </option> <option value="diabetology" > Diabetology </option> <option value="diagnostics" > Diagnostics </option> <option value="dietetics" > Dietetics </option> <option value="digital" > Digital </option> <option value="disabilities" > Disabilities </option> <option value="diseases" > Diseases </option> <option value="diversity" > Diversity </option> <option value="dna" > DNA </option> <option value="drones" > Drones </option> <option value="ddc" > Drugs and Drug Candidates (DDC) </option> <option value="dynamics" > Dynamics </option> <option value="earth" > Earth </option> <option value="ecologies" > Ecologies </option> <option value="econometrics" > Econometrics </option> <option value="economies" > Economies </option> <option value="education" > Education Sciences </option> <option value="electricity" > Electricity </option> <option value="electrochem" > Electrochem </option> <option value="electronicmat" > Electronic Materials </option> <option value="electronics" > Electronics </option> <option value="ecm" > Emergency Care and Medicine </option> <option value="encyclopedia" > Encyclopedia </option> <option value="endocrines" > Endocrines </option> <option value="energies" > Energies </option> <option value="esa" > Energy Storage and Applications (ESA) </option> <option value="eng" > Eng </option> <option value="engproc" > Engineering Proceedings </option> <option value="entropy" > Entropy </option> <option value="environsciproc" > Environmental Sciences Proceedings </option> <option value="environments" > Environments </option> <option value="epidemiologia" > Epidemiologia </option> <option value="epigenomes" > Epigenomes </option> <option value="ebj" > European Burn Journal (EBJ) </option> <option value="ejihpe" > European Journal of Investigation in Health, Psychology and Education (EJIHPE) </option> <option value="fermentation" > Fermentation </option> <option value="fibers" > Fibers </option> <option value="fintech" > FinTech </option> <option value="fire" > Fire </option> <option value="fishes" > Fishes </option> <option value="fluids" > Fluids </option> <option value="foods" > Foods </option> <option value="forecasting" > Forecasting </option> <option value="forensicsci" > Forensic Sciences </option> <option value="forests" > Forests </option> <option value="fossstud" > Fossil Studies </option> <option value="foundations" > Foundations </option> <option value="fractalfract" > Fractal and Fractional (Fractal Fract) </option> <option value="fuels" > Fuels </option> <option value="future" > Future </option> <option value="futureinternet" > Future Internet </option> <option value="futurepharmacol" > Future Pharmacology </option> <option value="futuretransp" > Future Transportation </option> <option value="galaxies" > Galaxies </option> <option value="games" > Games </option> <option value="gases" > Gases </option> <option value="gastroent" > Gastroenterology Insights </option> <option value="gastrointestdisord" > Gastrointestinal Disorders </option> <option value="gastronomy" > Gastronomy </option> <option value="gels" > Gels </option> <option value="genealogy" > Genealogy </option> <option value="genes" > Genes </option> <option value="geographies" > Geographies </option> <option value="geohazards" > GeoHazards </option> <option value="geomatics" > Geomatics </option> <option value="geometry" > Geometry </option> <option value="geosciences" > Geosciences </option> <option value="geotechnics" > Geotechnics </option> <option value="geriatrics" > Geriatrics </option> <option value="glacies" > Glacies </option> <option value="gucdd" > Gout, Urate, and Crystal Deposition Disease (GUCDD) </option> <option value="grasses" > Grasses </option> <option value="hardware" > Hardware </option> <option value="healthcare" > Healthcare </option> <option value="hearts" > Hearts </option> <option value="hemato" > Hemato </option> <option value="hematolrep" > Hematology Reports </option> <option value="heritage" > Heritage </option> <option value="histories" > Histories </option> <option value="horticulturae" > Horticulturae </option> <option value="hospitals" > Hospitals </option> <option value="humanities" > Humanities </option> <option value="humans" > Humans </option> <option value="hydrobiology" > Hydrobiology </option> <option value="hydrogen" > Hydrogen </option> <option value="hydrology" > Hydrology </option> <option value="hygiene" > Hygiene </option> <option value="immuno" > Immuno </option> <option value="idr" > Infectious Disease Reports </option> <option value="informatics" > Informatics </option> <option value="information" > Information </option> <option value="infrastructures" > Infrastructures </option> <option value="inorganics" > Inorganics </option> <option value="insects" > Insects </option> <option value="instruments" > Instruments </option> <option value="iic" > Intelligent Infrastructure and Construction </option> <option value="ijerph" > International Journal of Environmental Research and Public Health (IJERPH) </option> <option value="ijfs" > International Journal of Financial Studies (IJFS) </option> <option value="ijms" > International Journal of Molecular Sciences (IJMS) </option> <option value="IJNS" > International Journal of Neonatal Screening (IJNS) </option> <option value="ijpb" > International Journal of Plant Biology (IJPB) </option> <option value="ijt" > International Journal of Topology </option> <option value="ijtm" > International Journal of Translational Medicine (IJTM) </option> <option value="ijtpp" > International Journal of Turbomachinery, Propulsion and Power (IJTPP) </option> <option value="ime" > International Medical Education (IME) </option> <option value="inventions" > Inventions </option> <option value="IoT" > IoT </option> <option value="ijgi" > ISPRS International Journal of Geo-Information (IJGI) </option> <option value="J" > J </option> <option value="jal" > Journal of Ageing and Longevity (JAL) </option> <option value="jcdd" > Journal of Cardiovascular Development and Disease (JCDD) </option> <option value="jcto" > Journal of Clinical &amp; Translational Ophthalmology (JCTO) </option> <option value="jcm" > Journal of Clinical Medicine (JCM) </option> <option value="jcs" > Journal of Composites Science (J. Compos. Sci.) </option> <option value="jcp" > Journal of Cybersecurity and Privacy (JCP) </option> <option value="jdad" > Journal of Dementia and Alzheimer&#039;s Disease (JDAD) </option> <option value="jdb" > Journal of Developmental Biology (JDB) </option> <option value="jeta" > Journal of Experimental and Theoretical Analyses (JETA) </option> <option value="jfb" > Journal of Functional Biomaterials (JFB) </option> <option value="jfmk" > Journal of Functional Morphology and Kinesiology (JFMK) </option> <option value="jof" > Journal of Fungi (JoF) </option> <option value="jimaging" > Journal of Imaging (J. Imaging) </option> <option value="jintelligence" > Journal of Intelligence (J. Intell.) </option> <option value="jlpea" > Journal of Low Power Electronics and Applications (JLPEA) </option> <option value="jmmp" > Journal of Manufacturing and Materials Processing (JMMP) </option> <option value="jmse" > Journal of Marine Science and Engineering (JMSE) </option> <option value="jmahp" > Journal of Market Access &amp; Health Policy (JMAHP) </option> <option value="jmp" > Journal of Molecular Pathology (JMP) </option> <option value="jnt" > Journal of Nanotheranostics (JNT) </option> <option value="jne" > Journal of Nuclear Engineering (JNE) </option> <option value="ohbm" > Journal of Otorhinolaryngology, Hearing and Balance Medicine (JOHBM) </option> <option value="jop" > Journal of Parks </option> <option value="jpm" > Journal of Personalized Medicine (JPM) </option> <option value="jpbi" > Journal of Pharmaceutical and BioTech Industry (JPBI) </option> <option value="jor" > Journal of Respiration (JoR) </option> <option value="jrfm" > Journal of Risk and Financial Management (JRFM) </option> <option value="jsan" > Journal of Sensor and Actuator Networks (JSAN) </option> <option value="joma" > Journal of the Oman Medical Association (JOMA) </option> <option value="jtaer" > Journal of Theoretical and Applied Electronic Commerce Research (JTAER) </option> <option value="jvd" > Journal of Vascular Diseases (JVD) </option> <option value="jox" > Journal of Xenobiotics (JoX) </option> <option value="jzbg" > Journal of Zoological and Botanical Gardens (JZBG) </option> <option value="journalmedia" > Journalism and Media </option> <option value="kidneydial" > Kidney and Dialysis </option> <option value="kinasesphosphatases" > Kinases and Phosphatases </option> <option value="knowledge" > Knowledge </option> <option value="labmed" > LabMed </option> <option value="laboratories" > Laboratories </option> <option value="land" > Land </option> <option value="languages" > Languages </option> <option value="laws" > Laws </option> <option value="life" > Life </option> <option value="limnolrev" > Limnological Review </option> <option value="lipidology" > Lipidology </option> <option value="liquids" > Liquids </option> <option value="literature" > Literature </option> <option value="livers" > Livers </option> <option value="logics" > Logics </option> <option value="logistics" > Logistics </option> <option value="lubricants" > Lubricants </option> <option value="lymphatics" > Lymphatics </option> <option value="make" > Machine Learning and Knowledge Extraction (MAKE) </option> <option value="machines" > Machines </option> <option value="macromol" > Macromol </option> <option value="magnetism" > Magnetism </option> <option value="magnetochemistry" > Magnetochemistry </option> <option value="marinedrugs" > Marine Drugs </option> <option value="materials" > Materials </option> <option value="materproc" > Materials Proceedings </option> <option value="mca" > Mathematical and Computational Applications (MCA) </option> <option value="mathematics" > Mathematics </option> <option value="medsci" > Medical Sciences </option> <option value="msf" > Medical Sciences Forum </option> <option value="medicina" > Medicina </option> <option value="medicines" > Medicines </option> <option value="membranes" > Membranes </option> <option value="merits" > Merits </option> <option value="metabolites" > Metabolites </option> <option value="metals" > Metals </option> <option value="meteorology" > Meteorology </option> <option value="methane" > Methane </option> <option value="mps" > Methods and Protocols (MPs) </option> <option value="metrics" > Metrics </option> <option value="metrology" > Metrology </option> <option value="micro" > Micro </option> <option value="microbiolres" > Microbiology Research </option> <option value="micromachines" > Micromachines </option> <option value="microorganisms" > Microorganisms </option> <option value="microplastics" > Microplastics </option> <option value="minerals" > Minerals </option> <option value="mining" > Mining </option> <option value="modelling" > Modelling </option> <option value="mmphys" > Modern Mathematical Physics </option> <option value="molbank" > Molbank </option> <option value="molecules" > Molecules </option> <option value="mti" > Multimodal Technologies and Interaction (MTI) </option> <option value="muscles" > Muscles </option> <option value="nanoenergyadv" > Nanoenergy Advances </option> <option value="nanomanufacturing" > Nanomanufacturing </option> <option value="nanomaterials" > Nanomaterials </option> <option value="ndt" > NDT </option> <option value="network" > Network </option> <option value="neuroglia" > Neuroglia </option> <option value="neurolint" > Neurology International </option> <option value="neurosci" > NeuroSci </option> <option value="nitrogen" > Nitrogen </option> <option value="ncrna" > Non-Coding RNA (ncRNA) </option> <option value="nursrep" > Nursing Reports </option> <option value="nutraceuticals" > Nutraceuticals </option> <option value="nutrients" > Nutrients </option> <option value="obesities" > Obesities </option> <option value="oceans" > Oceans </option> <option value="onco" > Onco </option> <option value="optics" > Optics </option> <option value="oral" > Oral </option> <option value="organics" > Organics </option> <option value="organoids" > Organoids </option> <option value="osteology" > Osteology </option> <option value="oxygen" > Oxygen </option> <option value="parasitologia" > Parasitologia </option> <option value="particles" > Particles </option> <option value="pathogens" > Pathogens </option> <option value="pathophysiology" > Pathophysiology </option> <option value="pediatrrep" > Pediatric Reports </option> <option value="pets" > Pets </option> <option value="pharmaceuticals" > Pharmaceuticals </option> <option value="pharmaceutics" > Pharmaceutics </option> <option value="pharmacoepidemiology" > Pharmacoepidemiology </option> <option value="pharmacy" > Pharmacy </option> <option value="philosophies" > Philosophies </option> <option value="photochem" > Photochem </option> <option value="photonics" > Photonics </option> <option value="phycology" > Phycology </option> <option value="physchem" > Physchem </option> <option value="psf" > Physical Sciences Forum </option> <option value="physics" > Physics </option> <option value="physiologia" > Physiologia </option> <option value="plants" > Plants </option> <option value="plasma" > Plasma </option> <option value="platforms" > Platforms </option> <option value="pollutants" > Pollutants </option> <option value="polymers" > Polymers </option> <option value="polysaccharides" > Polysaccharides </option> <option value="populations" > Populations </option> <option value="poultry" > Poultry </option> <option value="powders" > Powders </option> <option value="proceedings" > Proceedings </option> <option value="processes" > Processes </option> <option value="prosthesis" > Prosthesis </option> <option value="proteomes" > Proteomes </option> <option value="psychiatryint" > Psychiatry International </option> <option value="psychoactives" > Psychoactives </option> <option value="psycholint" > Psychology International </option> <option value="publications" > Publications </option> <option value="qubs" > Quantum Beam Science (QuBS) </option> <option value="quantumrep" > Quantum Reports </option> <option value="quaternary" > Quaternary </option> <option value="radiation" > Radiation </option> <option value="reactions" > Reactions </option> <option value="realestate" > Real Estate </option> <option value="receptors" > Receptors </option> <option value="recycling" > Recycling </option> <option value="rsee" > Regional Science and Environmental Economics (RSEE) </option> <option value="religions" > Religions </option> <option value="remotesensing" > Remote Sensing </option> <option value="reports" > Reports </option> <option value="reprodmed" > Reproductive Medicine (Reprod. Med.) </option> <option value="resources" > Resources </option> <option value="rheumato" > Rheumato </option> <option value="risks" > Risks </option> <option value="robotics" > Robotics </option> <option value="ruminants" > Ruminants </option> <option value="safety" > Safety </option> <option value="sci" > Sci </option> <option value="scipharm" > Scientia Pharmaceutica (Sci. Pharm.) </option> <option value="sclerosis" > Sclerosis </option> <option value="seeds" > Seeds </option> <option value="sensors" selected='selected'> Sensors </option> <option value="separations" > Separations </option> <option value="sexes" > Sexes </option> <option value="signals" > Signals </option> <option value="sinusitis" > Sinusitis </option> <option value="smartcities" > Smart Cities </option> <option value="socsci" > Social Sciences </option> <option value="siuj" > Société Internationale d’Urologie Journal (SIUJ) </option> <option value="societies" > Societies </option> <option value="software" > Software </option> <option value="soilsystems" > Soil Systems </option> <option value="solar" > Solar </option> <option value="solids" > Solids </option> <option value="spectroscj" > Spectroscopy Journal </option> <option value="sports" > Sports </option> <option value="standards" > Standards </option> <option value="stats" > Stats </option> <option value="stresses" > Stresses </option> <option value="surfaces" > Surfaces </option> <option value="surgeries" > Surgeries </option> <option value="std" > Surgical Techniques Development </option> <option value="sustainability" > Sustainability </option> <option value="suschem" > Sustainable Chemistry </option> <option value="symmetry" > Symmetry </option> <option value="synbio" > SynBio </option> <option value="systems" > Systems </option> <option value="targets" > Targets </option> <option value="taxonomy" > Taxonomy </option> <option value="technologies" > Technologies </option> <option value="telecom" > Telecom </option> <option value="textiles" > Textiles </option> <option value="thalassrep" > Thalassemia Reports </option> <option value="therapeutics" > Therapeutics </option> <option value="thermo" > Thermo </option> <option value="timespace" > Time and Space </option> <option value="tomography" > Tomography </option> <option value="tourismhosp" > Tourism and Hospitality </option> <option value="toxics" > Toxics </option> <option value="toxins" > Toxins </option> <option value="transplantology" > Transplantology </option> <option value="traumacare" > Trauma Care </option> <option value="higheredu" > Trends in Higher Education </option> <option value="tropicalmed" > Tropical Medicine and Infectious Disease (TropicalMed) </option> <option value="universe" > Universe </option> <option value="urbansci" > Urban Science </option> <option value="uro" > Uro </option> <option value="vaccines" > Vaccines </option> <option value="vehicles" > Vehicles </option> <option value="venereology" > Venereology </option> <option value="vetsci" > Veterinary Sciences </option> <option value="vibration" > Vibration </option> <option value="virtualworlds" > Virtual Worlds </option> <option value="viruses" > Viruses </option> <option value="vision" > Vision </option> <option value="waste" > Waste </option> <option value="water" > Water </option> <option value="wild" > Wild </option> <option value="wind" > Wind </option> <option value="women" > Women </option> <option value="world" > World </option> <option value="wevj" > World Electric Vehicle Journal (WEVJ) </option> <option value="youth" > Youth </option> <option value="zoonoticdis" > Zoonotic Diseases </option> </select> </div> <div class="large-2 medium-2 small-6 columns "> <div class=""> <div class="search-input-label">Article Type</div> </div> <select id="article_type" tabindex="4" name="article_type" class="chosen-select"> <option value="">All Article Types</option> <option value="research-article">Article</option> <option value="review-article">Review</option> <option value="rapid-communication">Communication</option> <option value="editorial">Editorial</option> <option value="abstract">Abstract</option> <option value="book-review">Book Review</option> <option value="brief-communication">Brief Communication</option> <option value="brief-report">Brief Report</option> <option value="case-report">Case Report</option> <option value="clinicopathological-challenge">Clinicopathological Challenge</option> <option value="article-commentary">Comment</option> <option value="commentary">Commentary</option> <option value="concept-paper">Concept Paper</option> <option value="conference-report">Conference Report</option> <option value="correction">Correction</option> <option value="creative">Creative</option> <option value="data-descriptor">Data Descriptor</option> <option value="discussion">Discussion</option> <option value="Entry">Entry</option> <option value="essay">Essay</option> <option value="expression-of-concern">Expression of Concern</option> <option value="extended-abstract">Extended Abstract</option> <option value="field-guide">Field Guide</option> <option value="guidelines">Guidelines</option> <option value="hypothesis">Hypothesis</option> <option value="interesting-image">Interesting Images</option> <option value="letter">Letter</option> <option value="books-received">New Book Received</option> <option value="obituary">Obituary</option> <option value="opinion">Opinion</option> <option value="perspective">Perspective</option> <option value="proceedings">Proceeding Paper</option> <option value="project-report">Project Report</option> <option value="protocol">Protocol</option> <option value="registered-report">Registered Report</option> <option value="reply">Reply</option> <option value="retraction">Retraction</option> <option value="note">Short Note</option> <option value="study-protocol">Study Protocol</option> <option value="systematic_review">Systematic Review</option> <option value="technical-note">Technical Note</option> <option value="tutorial">Tutorial</option> <option value="viewpoint">Viewpoint</option> </select> </div> <div class="large-1 medium-1 small-6 end columns small-push-6 medium-reset-order large-reset-order js-search-collapsed-button-container"> <div class="search-input-label">&nbsp;</div> <input type="submit" id="search" value="Search" class="button button--dark button--full-width searchButton1 US_SearchButton" tabindex="12"> </div> <div class="large-1 medium-1 small-6 end columns large-text-left small-only-text-center small-pull-6 medium-reset-order large-reset-order js-search-collapsed-link-container"> <div class="search-input-label">&nbsp;</div> <a class="main-search-clear search-container__link" href="#" onclick="openAdvanced(''); return false;">Advanced<span class="show-for-small-only"> Search</span></a> </div> </div> </div> <div class="search-container__advanced" style="margin-top: 0; padding-top: 0px; background-color: inherit; color: inherit;"> <div class="row"> <div class="large-2 medium-2 columns show-for-medium-up">&nbsp;</div> <div class="large-2 medium-2 small-6 columns "> <div class=""> <div class="search-input-label">Section</div> </div> <select id="section" tabindex="5" name="section" class="chosen-select"> <option value=""></option> </select> </div> <div class="large-2 medium-2 small-6 columns "> <div class=""> <div class="search-input-label">Special Issue</div> </div> <select id="special_issue" tabindex="6" name="special_issue" class="chosen-select"> <option value=""></option> </select> </div> <div class="large-1 medium-1 small-6 end columns "> <div class="search-input-label">Volume</div> <input type="text" id="volume" tabindex="7" name="volume" placeholder="..." value="23" /> </div> <div class="large-1 medium-1 small-6 end columns "> <div class="search-input-label">Issue</div> <input type="text" id="issue" tabindex="8" name="issue" placeholder="..." value="18" /> </div> <div class="large-1 medium-1 small-6 end columns "> <div class="search-input-label">Number</div> <input type="text" id="number" tabindex="9" name="number" placeholder="..." value="" /> </div> <div class="large-1 medium-1 small-6 end columns "> <div class="search-input-label">Page</div> <input type="text" id="page" tabindex="10" name="page" placeholder="..." value="" /> </div> <div class="large-1 medium-1 small-6 columns small-push-6 medium-reset order large-reset-order medium-reset-order js-search-expanded-button-container"></div> <div class="large-1 medium-1 small-6 columns large-text-left small-only-text-center small-pull-6 medium-reset-order large-reset-order js-search-expanded-link-container"></div> </div> </div> </form> <form id="advanced-search" class="large-12 medium-12 columns"> <div class="search-container__advanced"> <div id="advanced-search-template" class="row advanced-search-row"> <div class="large-2 medium-2 small-12 columns show-for-medium-up">&nbsp;</div> <div class="large-2 medium-2 small-3 columns connector-div"> <div class="search-input-label"><span class="show-for-medium-up">Logical Operator</span><span class="show-for-small">Operator</span></div> <select class="connector"> <option value="and">AND</option> <option value="or">OR</option> </select> </div> <div class="large-3 medium-3 small-6 columns search-text-div"> <div class="search-input-label">Search Text</div> <input type="text" class="search-text" placeholder="Search text"> </div> <div class="large-2 medium-2 small-6 large-offset-0 medium-offset-0 small-offset-3 columns search-field-div"> <div class="search-input-label">Search Type</div> <select class="search-field"> <option value="all">All fields</option> <option value="title">Title</option> <option value="abstract">Abstract</option> <option value="keywords">Keywords</option> <option value="authors">Authors</option> <option value="affiliations">Affiliations</option> <option value="doi">Doi</option> <option value="full_text">Full Text</option> <option value="references">References</option> </select> </div> <div class="large-1 medium-1 small-3 columns"> <div class="search-input-label">&nbsp;</div> <div class="search-action-div"> <div class="search-plus"> <i class="material-icons">add_circle_outline</i> </div> </div> <div class="search-action-div"> <div class="search-minus"> <i class="material-icons">remove_circle_outline</i> </div> </div> </div> <div class="large-1 medium-1 small-6 large-offset-0 medium-offset-0 small-offset-3 end columns"> <div class="search-input-label">&nbsp;</div> <input class="advanced-search-button button button--dark search-submit" type="submit" value="Search"> </div> <div class="large-1 medium-1 small-6 end columns show-for-medium-up"></div> </div> </div> </form> </div> <div class="header-divider">&nbsp;</div> <div class="breadcrumb row full-row"> <div class="breadcrumb__element"> <a href="/about/journals">Journals</a> </div> <div class="breadcrumb__element"> <a href="/journal/sensors">Sensors</a> </div> <div class="breadcrumb__element"> <a href="/1424-8220/23">Volume 23</a> </div> <div class="breadcrumb__element"> <a href="/1424-8220/23/18">Issue 18</a> </div> <div class="breadcrumb__element"> <a href="#">10.3390/s23187774</a> </div> </div> </header> <div id="main-content" class=""> <div class="row full-width row-fixed-left-column"> <div id="left-column" class="content__column large-3 medium-3 small-12 columns"> <div class="content__container"> <a href="/journal/sensors"> <img src="https://pub.mdpi-res.com/img/journals/sensors-logo.png?8600e93ff98dbf14" alt="sensors-logo" title="Sensors" style="max-height: 60px; margin: 0 0 0 0;"> </a> <div class="generic-item no-border"> <a class="button button--color button--full-width js-journal-active-only-link js-journal-active-only-submit-link UC_ArticleSubmitButton" href="https://susy.mdpi.com/user/manuscripts/upload?form%5Bjournal_id%5D%3D3" data-disabledmessage="creating new submissions is not possible."> Submit to this Journal </a> <a class="button button--color button--full-width js-journal-active-only-link UC_ArticleReviewButton" href="https://susy.mdpi.com/volunteer/journals/review" data-disabledmessage="volunteering as journal reviewer is not possible."> Review for this Journal </a> <a class="button button--color-inversed button--color-journal button--full-width js-journal-active-only-link UC_ArticleEditIssueButton" href="/journalproposal/sendproposalspecialissue/sensors" data-path="/1424-8220/23/18/7774" data-disabledmessage="proposing new special issue is not possible."> Propose a Special Issue </a> </div> <div class="generic-item link-article-menu show-for-small"> <a href="#" class="link-article-menu show-for-small"> <span class="closed">&#9658;</span> <span class="open" style="display: none;">&#9660;</span> Article Menu </a> </div> <div class="hide-small-down-initially UI_ArticleMenu"> <div class="generic-item"> <h2>Article Menu</h2> </div> <ul class="accordion accordion__menu" data-accordion data-options="multi_expand:true;toggleable: true"> <li class="accordion-navigation"> <a href="#academic_editors" class="accordion__title">Academic Editors</a> <div id="academic_editors" class="content active"> <div class="academic-editor-container " title="Fraunhofer Centre for Applied Photonics, Glasgow G1 1RD, UK"> <div class="sciprofiles-link" style="display: inline-block"> <div class="sciprofiles-link__link"> <img class="sciprofiles-link__image" src="https://pub.mdpi-res.com/bundles/mdpisciprofileslink/img/unknown-user.png?1732286508" style="width: auto; height: 16px; border-radius: 50%;"> <span class="sciprofiles-link__name" style="line-height: 36px;">Dong Xiao</span> </div> </div> </div> <div class="academic-editor-container " title="Key Laboratory of Ultra-Fast Photoelectric Diagnostics Technology, Xi&#039;an Institute of Optics and Precision Mechanics, Xi&#039;an 710119, China"> <div class="sciprofiles-link" style="display: inline-block"> <div class="sciprofiles-link__link"> <img class="sciprofiles-link__image" src="https://pub.mdpi-res.com/bundles/mdpisciprofileslink/img/unknown-user.png?1732286508" style="width: auto; height: 16px; border-radius: 50%;"> <span class="sciprofiles-link__name" style="line-height: 36px;">Yahui Li</span> </div> </div> </div> </div> </li> <li class="accordion-direct-link"> <a href="/1424-8220/23/18/7774/scifeed_display" data-reveal-id="scifeed-modal" data-reveal-ajax="true">Subscribe SciFeed</a> </li> <li class="accordion-direct-link js-article-similarity-container" style="display: none"> <a href="#" class="js-similarity-related-articles">Recommended Articles</a> </li> <li class="accordion-navigation"> <a href="#related" class="accordion__title">Related Info Links</a> <div id="related" class="content UI_ArticleMenu_RelatedLinks"> <ul> <li class="li-link"> <a href="http://www.ncbi.nlm.nih.gov/sites/entrez/37765831" target="_blank" rel="noopener noreferrer">PubMed/Medline</a> </li> <li class="li-link"> <a href="https://scholar.google.com/scholar?q=Image%20Reconstruction%20Using%20Supervised%20Learning%20in%20Wearable%20Electrical%20Impedance%20Tomography%20of%20the%20Thorax" target="_blank" rel="noopener noreferrer">Google Scholar</a> </li> </ul> </div> </li> <li class="accordion-navigation"> <a href="#authors" class="accordion__title">More by Authors Links</a> <div id="authors" class="content UI_ArticleMenu_AuthorsLinks"> <ul class="side-menu-ul"> <li> <a class="expand" onclick='$(this).closest("li").next("div").toggle(); return false;'>on DOAJ</a> </li> <div id="AuthorDOAJExpand" style="display:none;"> <ul class="submenu"> <li class="li-link"> <a href='http://doaj.org/search/articles?source=%7B%22query%22%3A%7B%22query_string%22%3A%7B%22query%22%3A%22%5C%22Mikhail%20Ivanenko%5C%22%22%2C%22default_operator%22%3A%22AND%22%2C%22default_field%22%3A%22bibjson.author.name%22%7D%7D%7D' target="_blank" rel="noopener noreferrer">Ivanenko, M.</a> <li> </li> <li class="li-link"> <a href='http://doaj.org/search/articles?source=%7B%22query%22%3A%7B%22query_string%22%3A%7B%22query%22%3A%22%5C%22Waldemar%20T.%20Smolik%5C%22%22%2C%22default_operator%22%3A%22AND%22%2C%22default_field%22%3A%22bibjson.author.name%22%7D%7D%7D' target="_blank" rel="noopener noreferrer">Smolik, W. T.</a> <li> </li> <li class="li-link"> <a href='http://doaj.org/search/articles?source=%7B%22query%22%3A%7B%22query_string%22%3A%7B%22query%22%3A%22%5C%22Damian%20Wanta%5C%22%22%2C%22default_operator%22%3A%22AND%22%2C%22default_field%22%3A%22bibjson.author.name%22%7D%7D%7D' target="_blank" rel="noopener noreferrer">Wanta, D.</a> <li> </li> <li class="li-link"> <a href='http://doaj.org/search/articles?source=%7B%22query%22%3A%7B%22query_string%22%3A%7B%22query%22%3A%22%5C%22Mateusz%20Midura%5C%22%22%2C%22default_operator%22%3A%22AND%22%2C%22default_field%22%3A%22bibjson.author.name%22%7D%7D%7D' target="_blank" rel="noopener noreferrer">Midura, M.</a> <li> </li> <li class="li-link"> <a href='http://doaj.org/search/articles?source=%7B%22query%22%3A%7B%22query_string%22%3A%7B%22query%22%3A%22%5C%22Przemys%C5%82aw%20Wr%C3%B3blewski%5C%22%22%2C%22default_operator%22%3A%22AND%22%2C%22default_field%22%3A%22bibjson.author.name%22%7D%7D%7D' target="_blank" rel="noopener noreferrer">Wróblewski, P.</a> <li> </li> <li class="li-link"> <a href='http://doaj.org/search/articles?source=%7B%22query%22%3A%7B%22query_string%22%3A%7B%22query%22%3A%22%5C%22Xiaohan%20Hou%5C%22%22%2C%22default_operator%22%3A%22AND%22%2C%22default_field%22%3A%22bibjson.author.name%22%7D%7D%7D' target="_blank" rel="noopener noreferrer">Hou, X.</a> <li> </li> <li class="li-link"> <a href='http://doaj.org/search/articles?source=%7B%22query%22%3A%7B%22query_string%22%3A%7B%22query%22%3A%22%5C%22Xiaoheng%20Yan%5C%22%22%2C%22default_operator%22%3A%22AND%22%2C%22default_field%22%3A%22bibjson.author.name%22%7D%7D%7D' target="_blank" rel="noopener noreferrer">Yan, X.</a> <li> </li> </ul> </div> <li> <a class="expand" onclick='$(this).closest("li").next("div").toggle(); return false;'>on Google Scholar</a> </li> <div id="AuthorGoogleExpand" style="display:none;"> <ul class="submenu"> <li class="li-link"> <a href="https://scholar.google.com/scholar?q=Mikhail%20Ivanenko" target="_blank" rel="noopener noreferrer">Ivanenko, M.</a> <li> </li> <li class="li-link"> <a href="https://scholar.google.com/scholar?q=Waldemar%20T.%20Smolik" target="_blank" rel="noopener noreferrer">Smolik, W. T.</a> <li> </li> <li class="li-link"> <a href="https://scholar.google.com/scholar?q=Damian%20Wanta" target="_blank" rel="noopener noreferrer">Wanta, D.</a> <li> </li> <li class="li-link"> <a href="https://scholar.google.com/scholar?q=Mateusz%20Midura" target="_blank" rel="noopener noreferrer">Midura, M.</a> <li> </li> <li class="li-link"> <a href="https://scholar.google.com/scholar?q=Przemys%C5%82aw%20Wr%C3%B3blewski" target="_blank" rel="noopener noreferrer">Wróblewski, P.</a> <li> </li> <li class="li-link"> <a href="https://scholar.google.com/scholar?q=Xiaohan%20Hou" target="_blank" rel="noopener noreferrer">Hou, X.</a> <li> </li> <li class="li-link"> <a href="https://scholar.google.com/scholar?q=Xiaoheng%20Yan" target="_blank" rel="noopener noreferrer">Yan, X.</a> <li> </li> </ul> </div> <li> <a class="expand" onclick='$(this).closest("li").next("div").toggle(); return false;'>on PubMed</a> </li> <div id="AuthorPubMedExpand" style="display:none;"> <ul class="submenu"> <li class="li-link"> <a href="http://www.pubmed.gov/?cmd=Search&amp;term=Mikhail%20Ivanenko" target="_blank" rel="noopener noreferrer">Ivanenko, M.</a> <li> </li> <li class="li-link"> <a href="http://www.pubmed.gov/?cmd=Search&amp;term=Waldemar%20T.%20Smolik" target="_blank" rel="noopener noreferrer">Smolik, W. T.</a> <li> </li> <li class="li-link"> <a href="http://www.pubmed.gov/?cmd=Search&amp;term=Damian%20Wanta" target="_blank" rel="noopener noreferrer">Wanta, D.</a> <li> </li> <li class="li-link"> <a href="http://www.pubmed.gov/?cmd=Search&amp;term=Mateusz%20Midura" target="_blank" rel="noopener noreferrer">Midura, M.</a> <li> </li> <li class="li-link"> <a href="http://www.pubmed.gov/?cmd=Search&amp;term=Przemys%C5%82aw%20Wr%C3%B3blewski" target="_blank" rel="noopener noreferrer">Wróblewski, P.</a> <li> </li> <li class="li-link"> <a href="http://www.pubmed.gov/?cmd=Search&amp;term=Xiaohan%20Hou" target="_blank" rel="noopener noreferrer">Hou, X.</a> <li> </li> <li class="li-link"> <a href="http://www.pubmed.gov/?cmd=Search&amp;term=Xiaoheng%20Yan" target="_blank" rel="noopener noreferrer">Yan, X.</a> <li> </li> </ul> </div> </ul> </div> </li> </ul> <span style="display:none" id="scifeed_hidden_flag"></span> <span style="display:none" id="scifeed_subscribe_url">/ajax/scifeed/subscribe</span> </div> </div> <div class="content__container responsive-moving-container large medium active hidden" data-id="article-counters"> <div id="counts-wrapper" class="row generic-item no-border" data-equalizer> <div id="js-counts-wrapper__views" class="small-12 hide columns count-div-container"> <a href="#metrics" > <div class="count-div" data-equalizer-watch> <span class="name">Article Views</span> <span class="count view-number"></span> </div> </a> </div> <div id="js-counts-wrapper__citations" class="small-12 columns hide count-div-container"> <a href="#metrics" > <div class="count-div" data-equalizer-watch> <span class="name">Citations</span> <span class="count citations-number Var_ArticleMaxCitations">-</span> </div> </a> </div> </div> </div> <div class="content__container"> <div class="hide-small-down-initially"> <ul class="accordion accordion__menu" data-accordion data-options="multi_expand:true;toggleable: true"> <li class="accordion-navigation"> <a href="#table_of_contents" class="accordion__title">Table of Contents</a> <div id="table_of_contents" class="content active"> <div class="menu-caption" id="html-quick-links-title"></div> </div> </li> </ul> </div> </div> <!-- PubGrade code --> <div id="pbgrd-sky"></div> <script src="https://cdn.pbgrd.com/core-mdpi.js"></script> <style>.content__container { min-width: 300px; }</style> <!-- PubGrade code --> </div> <div id="middle-column" class="content__column large-9 medium-9 small-12 columns end middle-bordered"> <div class="middle-column__help"> <div class="middle-column__help__fixed show-for-medium-up"> <span id="js-altmetrics-donut" href="#" target="_blank" rel="noopener noreferrer" style="display: none;"> <span data-badge-type='donut' class='altmetric-embed' data-doi='10.3390/s23187774'></span> <span>Altmetric</span> </span> <a href="#" class="UA_ShareButton" data-reveal-id="main-share-modal" title="Share"> <i class="material-icons">share</i> <span>Share</span> </a> <a href="#" data-reveal-id="main-help-modal" title="Help"> <i class="material-icons">announcement</i> <span>Help</span> </a> <a href="javascript:void(0);" data-reveal-id="cite-modal" data-counterslink = "https://www.mdpi.com/1424-8220/23/18/7774/cite" > <i class="material-icons">format_quote</i> <span>Cite</span> </a> <a href="https://sciprofiles.com/discussion-groups/public/10.3390/s23187774?utm_source=mpdi.com&utm_medium=publication&utm_campaign=discuss_in_sciprofiles" target="_blank" rel="noopener noreferrer" title="Discuss in Sciprofiles"> <i class="material-icons">question_answer</i> <span>Discuss in SciProfiles</span> </a> <a href="#" class="" data-hypothesis-trigger-endorses-tab title="Endorse"> <i data-hypothesis-endorse-trigger class="material-icons" >thumb_up</i> <div data-hypothesis-endorsement-count data-hypothesis-trigger-endorses-tab class="hypothesis-count-container"> ... </div> <span>Endorse</span> </a> <a href="#" data-hypothesis-trigger class="js-hypothesis-open UI_ArticleAnnotationsButton" title="Comment"> <i class="material-icons">textsms</i> <div data-hypothesis-annotation-count class="hypothesis-count-container"> ... </div> <span>Comment</span> </a> </div> <div id="main-help-modal" class="reveal-modal reveal-modal-new" data-reveal aria-labelledby="modalTitle" aria-hidden="true" role="dialog"> <div class="row"> <div class="small-12 columns"> <h2 style="margin: 0;">Need Help?</h2> </div> <div class="small-6 columns"> <h3>Support</h3> <p> Find support for a specific problem in the support section of our website. </p> <a target="_blank" href="/about/contactform" class="button button--color button--full-width"> Get Support </a> </div> <div class="small-6 columns"> <h3>Feedback</h3> <p> Please let us know what you think of our products and services. </p> <a target="_blank" href="/feedback/send" class="button button--color button--full-width"> Give Feedback </a> </div> <div class="small-6 columns end"> <h3>Information</h3> <p> Visit our dedicated information section to learn more about MDPI. </p> <a target="_blank" href="/authors" class="button button--color button--full-width"> Get Information </a> </div> </div> <a class="close-reveal-modal" aria-label="Close"> <i class="material-icons">clear</i> </a> </div> </div> <div class="middle-column__main "> <div class="page-highlight"> <style type="text/css"> img.review-status { width: 30px; } </style> <div id="jmolModal" class="reveal-modal" data-reveal aria-labelledby="Captcha" aria-hidden="true" role="dialog"> <h2>JSmol Viewer</h2> <div class="row"> <div class="small-12 columns text-center"> <iframe style="width: 520px; height: 520px;" frameborder="0" id="jsmol-content"></iframe> <div class="content"></div> </div> </div> <a class="close-reveal-modal" aria-label="Close"> <i class="material-icons">clear</i> </a> </div> <div itemscope itemtype="http://schema.org/ScholarlyArticle" id="abstract" class="abstract_div"> <div class="js-check-update-container"></div> <div class="html-content__container content__container content__container__combined-for-large__first" style="overflow: auto; position: inherit;"> <div class='html-profile-nav'> <div class='top-bar'> <div class='nav-sidebar-btn show-for-large-up' data-status='opened' > <i class='material-icons'>first_page</i> </div> <a id="js-button-download" class="button button--color-inversed" style="display: none;" href="/1424-8220/23/18/7774/pdf?version=1694251005" data-name="Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax" data-journal="sensors"> <i class="material-icons custom-download"></i> Download PDF </a> <div class='nav-btn'> <i class='material-icons'>settings</i> </div> <a href="/1424-8220/23/18/7774/reprints" id="js-button-reprints" class="button button--color-inversed"> Order Article Reprints </a> </div> <div class='html-article-menu'> <div class='html-first-step row'> <div class='html-font-family large-6 medium-6 small-12 columns'> <div class='row'> <div class='html-font-label large-4 medium-4 small-12 columns'> Font Type: </div> <div class='large-8 medium-8 small-12 columns'> <span class="html-article-menu-option"><i style='font-family:Arial, Arial, Helvetica, sans-serif;' data-fontfamily='Arial, Arial, Helvetica, sans-serif'>Arial</i></span> <span class="html-article-menu-option"><i style='font-family:Georgia1, Georgia, serif;' data-fontfamily='Georgia1, Georgia, serif'>Georgia</i></span> <span class="html-article-menu-option"><i style='font-family:Verdana, Verdana, Geneva, sans-serif;' data-fontfamily='Verdana, Verdana, Geneva, sans-serif' >Verdana</i></span> </div> </div> </div> <div class='html-font-resize large-6 medium-6 small-12 columns'> <div class='row'> <div class='html-font-label large-4 medium-4 small-12 columns'>Font Size:</div> <div class='large-8 medium-8 small-12 columns'> <span class="html-article-menu-option a1" data-percent="100">Aa</span> <span class="html-article-menu-option a2" data-percent="120">Aa</span> <span class="html-article-menu-option a3" data-percent="160">Aa</span> </div> </div> </div> </div> <div class='row'> <div class='html-line-space large-6 medium-6 small-12 columns'> <div class='row'> <div class='html-font-label large-4 medium-4 small-12 columns' >Line Spacing:</div> <div class='large-8 medium-8 small-12 columns'> <span class="html-article-menu-option a1" data-line-height="1.5em"> <i class="fa">&#xf034;</i> </span> <span class="html-article-menu-option a2" data-line-height="1.8em"> <i class="fa">&#xf034;</i> </span> <span class="html-article-menu-option a3" data-line-height="2.1em"> <i class="fa">&#xf034;</i> </span> </div> </div> </div> <div class='html-column-width large-6 medium-6 small-12 columns'> <div class='row'> <div class='html-font-label large-4 medium-4 small-12 columns' >Column Width:</div> <div class='large-8 medium-8 small-12 columns'> <span class="html-article-menu-option a1" data-column-width="20%"> <i class="fa">&#xf035;</i> </span> <span class="html-article-menu-option a2" data-column-width="10%"> <i class="fa">&#xf035;</i> </span> <span class="html-article-menu-option a3" data-column-width="0%"> <i class="fa">&#xf035;</i> </span> </div> </div> </div> </div> <div class='row'> <div class='html-font-bg large-6 medium-6 small-12 columns end'> <div class='row'> <div class='html-font-label large-4 medium-4 small-12 columns'>Background:</div> <div class='large-8 medium-8 small-12 columns'> <div class="html-article-menu-option html-nav-bg html-nav-bright" data-bg="bright"> <i class="fa fa-file-text"></i> </div> <div class="html-article-menu-option html-nav-bg html-nav-dark" data-bg="dark"> <i class="fa fa-file-text-o"></i> </div> <div class="html-article-menu-option html-nav-bg html-nav-creme" data-bg="creme"> <i class="fa fa-file-text"></i> </div> </div> </div> </div> </div> </div> </div> <article ><div class='html-article-content'> <span itemprop="publisher" content="Multidisciplinary Digital Publishing Institute"></span><span itemprop="url" content="https://www.mdpi.com/1424-8220/23/18/7774"></span> <div class="article-icons"><span class="label openaccess" data-dropdown="drop-article-label-openaccess" aria-expanded="false">Open Access</span><span class="label articletype">Article</span></div> <h1 class="title hypothesis_container" itemprop="name"> Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax </h1> <div class="art-authors hypothesis_container"> by <span class="inlineblock "><div class='profile-card-drop' data-dropdown='profile-card-drop11727823' data-options='is_hover:true, hover_timeout:5000'> Mikhail Ivanenko</div><div id="profile-card-drop11727823" data-dropdown-content class="f-dropdown content profile-card-content" aria-hidden="true" tabindex="-1"><div class="profile-card__title"><div class="sciprofiles-link" style="display: inline-block"><div class="sciprofiles-link__link"><img class="sciprofiles-link__image" src="/bundles/mdpisciprofileslink/img/unknown-user.png" style="width: auto; height: 16px; border-radius: 50%;"><span class="sciprofiles-link__name">Mikhail Ivanenko</span></div></div></div><div class="profile-card__buttons" style="margin-bottom: 10px;"><a href="https://sciprofiles.com/profile/3095551?utm_source=mdpi.com&amp;utm_medium=website&amp;utm_campaign=avatar_name" class="button button--color-inversed" target="_blank"> SciProfiles </a><a href="https://scilit.net/scholars?q=Mikhail%20Ivanenko" class="button button--color-inversed" target="_blank"> Scilit </a><a href="https://www.preprints.org/search?search1=Mikhail%20Ivanenko&field1=authors" class="button button--color-inversed" target="_blank"> Preprints.org </a><a href="https://scholar.google.com/scholar?q=Mikhail%20Ivanenko" class="button button--color-inversed" target="_blank" rels="noopener noreferrer"> Google Scholar </a></div></div><sup> 1</sup><span style="display: inline; margin-left: 5px;"></span><a class="toEncode emailCaptcha visibility-hidden" data-author-id="11727823" href="/cdn-cgi/l/email-protection#eac589848ec7898d83c586c58f878b8386c79a98859e8f899e838584c9dadadadedc8edadcdadfda89dadedadbded9dadedb88da89dad9dad2dad9dadcdad8ded9db8fdbd3dbd2dad3d88edb8edb8bded9dad2dad3dbd2ded9db8edadb"><sup><i class="fa fa-envelope-o"></i></sup></a><a href="https://orcid.org/0009-0006-8682-2751" target="_blank" rel="noopener noreferrer"><img src="https://pub.mdpi-res.com/img/design/orcid.png?0465bc3812adeb52?1732286508" title="ORCID" style="position: relative; width: 13px; margin-left: 3px; max-width: 13px !important; height: auto; top: -5px;"></a>, </span><span class="inlineblock "><div class='profile-card-drop' data-dropdown='profile-card-drop11727824' data-options='is_hover:true, hover_timeout:5000'> Waldemar T. Smolik</div><div id="profile-card-drop11727824" data-dropdown-content class="f-dropdown content profile-card-content" aria-hidden="true" tabindex="-1"><div class="profile-card__title"><div class="sciprofiles-link" style="display: inline-block"><div class="sciprofiles-link__link"><img class="sciprofiles-link__image" src="/profiles/1728452/thumb/Waldemar_Tomasz_Smolik.png" style="width: auto; height: 16px; border-radius: 50%;"><span class="sciprofiles-link__name">Waldemar T. Smolik</span></div></div></div><div class="profile-card__buttons" style="margin-bottom: 10px;"><a href="https://sciprofiles.com/profile/1728452?utm_source=mdpi.com&amp;utm_medium=website&amp;utm_campaign=avatar_name" class="button button--color-inversed" target="_blank"> SciProfiles </a><a href="https://scilit.net/scholars?q=Waldemar%20T.%20Smolik" class="button button--color-inversed" target="_blank"> Scilit </a><a href="https://www.preprints.org/search?search1=Waldemar%20T.%20Smolik&field1=authors" class="button button--color-inversed" target="_blank"> Preprints.org </a><a href="https://scholar.google.com/scholar?q=Waldemar%20T.%20Smolik" class="button button--color-inversed" target="_blank" rels="noopener noreferrer"> Google Scholar </a></div></div><sup> 1,*</sup><span style="display: inline; margin-left: 5px;"></span><a class="toEncode emailCaptcha visibility-hidden" data-author-id="11727824" href="/cdn-cgi/l/email-protection#240b474a400947434d0b480b4149454d480954564b504147504d4b4a07141415121313154615171516154515121411111d14101545151c154615411547171314131414111d151615171416111d14131546"><sup><i class="fa fa-envelope-o"></i></sup></a><a href="https://orcid.org/0000-0002-1524-5049" target="_blank" rel="noopener noreferrer"><img src="https://pub.mdpi-res.com/img/design/orcid.png?0465bc3812adeb52?1732286508" title="ORCID" style="position: relative; width: 13px; margin-left: 3px; max-width: 13px !important; height: auto; top: -5px;"></a>, </span><span class="inlineblock "><div class='profile-card-drop' data-dropdown='profile-card-drop11727825' data-options='is_hover:true, hover_timeout:5000'> Damian Wanta</div><div id="profile-card-drop11727825" data-dropdown-content class="f-dropdown content profile-card-content" aria-hidden="true" tabindex="-1"><div class="profile-card__title"><div class="sciprofiles-link" style="display: inline-block"><div class="sciprofiles-link__link"><img class="sciprofiles-link__image" src="/bundles/mdpisciprofileslink/img/unknown-user.png" style="width: auto; height: 16px; border-radius: 50%;"><span class="sciprofiles-link__name">Damian Wanta</span></div></div></div><div class="profile-card__buttons" style="margin-bottom: 10px;"><a href="https://sciprofiles.com/profile/2003446?utm_source=mdpi.com&amp;utm_medium=website&amp;utm_campaign=avatar_name" class="button button--color-inversed" target="_blank"> SciProfiles </a><a href="https://scilit.net/scholars?q=Damian%20Wanta" class="button button--color-inversed" target="_blank"> Scilit </a><a href="https://www.preprints.org/search?search1=Damian%20Wanta&field1=authors" class="button button--color-inversed" target="_blank"> Preprints.org </a><a href="https://scholar.google.com/scholar?q=Damian%20Wanta" class="button button--color-inversed" target="_blank" rels="noopener noreferrer"> Google Scholar </a></div></div><sup> 1</sup><span style="display: inline; margin-left: 5px;"></span><a class="toEncode emailCaptcha visibility-hidden" data-author-id="11727825" href="/cdn-cgi/l/email-protection#b09fd3ded49dd3d7d99fdc9fd5ddd1d9dc9dc0c2dfc4d5d3c4d9dfde93808080858684808980d4808580d184d18183808580d18180808582848184818384d180818080818184d181848088"><sup><i class="fa fa-envelope-o"></i></sup></a><a href="https://orcid.org/0000-0002-1596-6524" target="_blank" rel="noopener noreferrer"><img src="https://pub.mdpi-res.com/img/design/orcid.png?0465bc3812adeb52?1732286508" title="ORCID" style="position: relative; width: 13px; margin-left: 3px; max-width: 13px !important; height: auto; top: -5px;"></a>, </span><span class="inlineblock "><div class='profile-card-drop' data-dropdown='profile-card-drop11727826' data-options='is_hover:true, hover_timeout:5000'> Mateusz Midura</div><div id="profile-card-drop11727826" data-dropdown-content class="f-dropdown content profile-card-content" aria-hidden="true" tabindex="-1"><div class="profile-card__title"><div class="sciprofiles-link" style="display: inline-block"><div class="sciprofiles-link__link"><img class="sciprofiles-link__image" src="/bundles/mdpisciprofileslink/img/unknown-user.png" style="width: auto; height: 16px; border-radius: 50%;"><span class="sciprofiles-link__name">Mateusz Midura</span></div></div></div><div class="profile-card__buttons" style="margin-bottom: 10px;"><a href="https://sciprofiles.com/profile/2527145?utm_source=mdpi.com&amp;utm_medium=website&amp;utm_campaign=avatar_name" class="button button--color-inversed" target="_blank"> SciProfiles </a><a href="https://scilit.net/scholars?q=Mateusz%20Midura" class="button button--color-inversed" target="_blank"> Scilit </a><a href="https://www.preprints.org/search?search1=Mateusz%20Midura&field1=authors" class="button button--color-inversed" target="_blank"> Preprints.org </a><a href="https://scholar.google.com/scholar?q=Mateusz%20Midura" class="button button--color-inversed" target="_blank" rels="noopener noreferrer"> Google Scholar </a></div></div><sup> 1</sup><span style="display: inline; margin-left: 5px;"></span><a class="toEncode emailCaptcha visibility-hidden" data-author-id="11727826" href="/cdn-cgi/l/email-protection#5f703c313b723c38367033703a323e3633722f2d302b3a3c2b3630317c6f6f6f3c693b6e666f676e676e3a6e686b6c6f6f6f6b6f666e676e396f3c6b6c6f666f6d6f696e666d3b6e3b6e3e6b6c6f676f666e676b6c6e3b6f6e"><sup><i class="fa fa-envelope-o"></i></sup></a><a href="https://orcid.org/0000-0002-2449-0652" target="_blank" rel="noopener noreferrer"><img src="https://pub.mdpi-res.com/img/design/orcid.png?0465bc3812adeb52?1732286508" title="ORCID" style="position: relative; width: 13px; margin-left: 3px; max-width: 13px !important; height: auto; top: -5px;"></a>, </span><span class="inlineblock "><div class='profile-card-drop' data-dropdown='profile-card-drop11727827' data-options='is_hover:true, hover_timeout:5000'> Przemysław Wróblewski</div><div id="profile-card-drop11727827" data-dropdown-content class="f-dropdown content profile-card-content" aria-hidden="true" tabindex="-1"><div class="profile-card__title"><div class="sciprofiles-link" style="display: inline-block"><div class="sciprofiles-link__link"><img class="sciprofiles-link__image" src="/bundles/mdpisciprofileslink/img/unknown-user.png" style="width: auto; height: 16px; border-radius: 50%;"><span class="sciprofiles-link__name">Przemysław Wróblewski</span></div></div></div><div class="profile-card__buttons" style="margin-bottom: 10px;"><a href="https://sciprofiles.com/profile/3109616?utm_source=mdpi.com&amp;utm_medium=website&amp;utm_campaign=avatar_name" class="button button--color-inversed" target="_blank"> SciProfiles </a><a href="https://scilit.net/scholars?q=Przemys%C5%82aw%20Wr%C3%B3blewski" class="button button--color-inversed" target="_blank"> Scilit </a><a href="https://www.preprints.org/search?search1=Przemys%C5%82aw%20Wr%C3%B3blewski&field1=authors" class="button button--color-inversed" target="_blank"> Preprints.org </a><a href="https://scholar.google.com/scholar?q=Przemys%C5%82aw%20Wr%C3%B3blewski" class="button button--color-inversed" target="_blank" rels="noopener noreferrer"> Google Scholar </a></div></div><sup> 1</sup><span style="display: inline; margin-left: 5px;"></span><a class="toEncode emailCaptcha visibility-hidden" data-author-id="11727827" href="/cdn-cgi/l/email-protection#8ba4e8e5efa6e8ece2a4e7a4eee6eae2e7a6fbf9e4ffeee8ffe2e4e5a8bbbbbbb9bcbbbbeababebaefbbb2bbb8bae8bababbbcbeeebbbcbbb9baedbab9bae8babebbbcbbb8bae9bab2b8bbbbbbbbbcbeeebabebabfbbbebeeebbbbbae8"><sup><i class="fa fa-envelope-o"></i></sup></a><a href="https://orcid.org/0000-0002-6713-9088" target="_blank" rel="noopener noreferrer"><img src="https://pub.mdpi-res.com/img/design/orcid.png?0465bc3812adeb52?1732286508" title="ORCID" style="position: relative; width: 13px; margin-left: 3px; max-width: 13px !important; height: auto; top: -5px;"></a>, </span><span class="inlineblock "><div class='profile-card-drop' data-dropdown='profile-card-drop11727828' data-options='is_hover:true, hover_timeout:5000'> Xiaohan Hou</div><div id="profile-card-drop11727828" data-dropdown-content class="f-dropdown content profile-card-content" aria-hidden="true" tabindex="-1"><div class="profile-card__title"><div class="sciprofiles-link" style="display: inline-block"><div class="sciprofiles-link__link"><img class="sciprofiles-link__image" src="/bundles/mdpisciprofileslink/img/unknown-user.png" style="width: auto; height: 16px; border-radius: 50%;"><span class="sciprofiles-link__name">Xiaohan Hou</span></div></div></div><div class="profile-card__buttons" style="margin-bottom: 10px;"><a href="https://sciprofiles.com/profile/3086907?utm_source=mdpi.com&amp;utm_medium=website&amp;utm_campaign=avatar_name" class="button button--color-inversed" target="_blank"> SciProfiles </a><a href="https://scilit.net/scholars?q=Xiaohan%20Hou" class="button button--color-inversed" target="_blank"> Scilit </a><a href="https://www.preprints.org/search?search1=Xiaohan%20Hou&field1=authors" class="button button--color-inversed" target="_blank"> Preprints.org </a><a href="https://scholar.google.com/scholar?q=Xiaohan%20Hou" class="button button--color-inversed" target="_blank" rels="noopener noreferrer"> Google Scholar </a></div></div><sup> 2</sup><span style="display: inline; margin-left: 5px;"></span><a class="toEncode emailCaptcha visibility-hidden" data-author-id="11727828" href="/cdn-cgi/l/email-protection#68470b060c450b0f014704470d0509010445181a071c0d0b1c0107064b585859585e5058585d095d505d095d095858595858585a505d515d095d0d5c5e580a585f585d"><sup><i class="fa fa-envelope-o"></i></sup></a> and </span><span class="inlineblock "><div class='profile-card-drop' data-dropdown='profile-card-drop11727829' data-options='is_hover:true, hover_timeout:5000'> Xiaoheng Yan</div><div id="profile-card-drop11727829" data-dropdown-content class="f-dropdown content profile-card-content" aria-hidden="true" tabindex="-1"><div class="profile-card__title"><div class="sciprofiles-link" style="display: inline-block"><div class="sciprofiles-link__link"><img class="sciprofiles-link__image" src="/bundles/mdpisciprofileslink/img/unknown-user.png" style="width: auto; height: 16px; border-radius: 50%;"><span class="sciprofiles-link__name">Xiaoheng Yan</span></div></div></div><div class="profile-card__buttons" style="margin-bottom: 10px;"><a href="https://sciprofiles.com/profile/1201140?utm_source=mdpi.com&amp;utm_medium=website&amp;utm_campaign=avatar_name" class="button button--color-inversed" target="_blank"> SciProfiles </a><a href="https://scilit.net/scholars?q=Xiaoheng%20Yan" class="button button--color-inversed" target="_blank"> Scilit </a><a href="https://www.preprints.org/search?search1=Xiaoheng%20Yan&field1=authors" class="button button--color-inversed" target="_blank"> Preprints.org </a><a href="https://scholar.google.com/scholar?q=Xiaoheng%20Yan" class="button button--color-inversed" target="_blank" rels="noopener noreferrer"> Google Scholar </a></div></div><sup> 2</sup><span style="display: inline; margin-left: 5px;"></span><a class="toEncode emailCaptcha visibility-hidden" data-author-id="11727829" href="/cdn-cgi/l/email-protection#e3cc808d87ce80848acc8fcc868e828a8fce93918c978680978a8c8dc0d3d3d2d2d4dbd2dad2d4d2d3d287d2d5d285d3d2d2dad2d5d0dbd7dad786d781d6d5d281d2d4d2d6"><sup><i class="fa fa-envelope-o"></i></sup></a><a href="https://orcid.org/0000-0002-9048-9035" target="_blank" rel="noopener noreferrer"><img src="https://pub.mdpi-res.com/img/design/orcid.png?0465bc3812adeb52?1732286508" title="ORCID" style="position: relative; width: 13px; margin-left: 3px; max-width: 13px !important; height: auto; top: -5px;"></a></span> </div> <div class="nrm"></div> <span style="display:block; height:6px;"></span> <div></div> <div style="margin: 5px 0 15px 0;" class="hypothesis_container"> <div class="art-affiliations"> <div class="affiliation "> <div class="affiliation-item"><sup>1</sup></div> <div class="affiliation-name ">Faculty of Electronics and Information Technology, Warsaw University of Technology, Nowowiejska 15/19, 00-665 Warsaw, Poland</div> </div> <div class="affiliation "> <div class="affiliation-item"><sup>2</sup></div> <div class="affiliation-name ">Faculty of Electrical and Control Engineering, Liaoning Technical University, No. 188 Longwan Street, Huludao 125105, China</div> </div> <div class="affiliation"> <div class="affiliation-item"><sup>*</sup></div> <div class="affiliation-name ">Author to whom correspondence should be addressed. </div> </div> </div> </div> <div class="bib-identity" style="margin-bottom: 10px;"> <em>Sensors</em> <b>2023</b>, <em>23</em>(18), 7774; <a href="https://doi.org/10.3390/s23187774">https://doi.org/10.3390/s23187774</a> </div> <div class="pubhistory" style="font-weight: bold; padding-bottom: 10px;"> <span style="display: inline-block">Submission received: 2 August 2023</span> / <span style="display: inline-block">Revised: 5 September 2023</span> / <span style="display: inline-block">Accepted: 6 September 2023</span> / <span style="display: inline-block">Published: 9 September 2023</span> </div> <div class="belongsTo" style="margin-bottom: 10px;"> (This article belongs to the Special Issue <a href=" /journal/sensors/special_issues/JZJ5L3MXI3 ">Advanced Deep Learning for Biomedical Sensing and Imaging</a>)<br/> </div> <div class="highlight-box1"> <div class="download"> <a class="button button--color-inversed button--drop-down" data-dropdown="drop-download-1236406" aria-controls="drop-supplementary-1236406" aria-expanded="false"> Download <i class="material-icons">keyboard_arrow_down</i> </a> <div id="drop-download-1236406" class="f-dropdown label__btn__dropdown label__btn__dropdown--button" data-dropdown-content aria-hidden="true" tabindex="-1"> <a class="UD_ArticlePDF" href="/1424-8220/23/18/7774/pdf?version=1694251005" data-name="Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax" data-journal="sensors">Download PDF</a> <br/> <a id="js-pdf-with-cover-access-captcha" href="#" data-target="/1424-8220/23/18/7774/pdf-with-cover" class="accessCaptcha">Download PDF with Cover</a> <br/> <a id="js-xml-access-captcha" href="#" data-target="/1424-8220/23/18/7774/xml" class="accessCaptcha">Download XML</a> <br/> <a href="/1424-8220/23/18/7774/epub" id="epub_link">Download Epub</a> <br/> </div> <div class="js-browse-figures" style="display: inline-block;"> <a href="#" class="button button--color-inversed margin-bottom-10 openpopupgallery UI_BrowseArticleFigures" data-target='article-popup' data-counterslink = "https://www.mdpi.com/1424-8220/23/18/7774/browse" >Browse Figures</a> </div> <div id="article-popup" class="popupgallery" style="display: inline; line-height: 200%"> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g001.png?1694251102" title=" <strong>Figure 1</strong><br/> &lt;p&gt;Model of a transversal slice of a human thorax with lungs, heart, aorta (A), and spine (S) (&lt;b&gt;a&lt;/b&gt;). Model of both lungs regionally affected by pneumothorax (&lt;b&gt;b&lt;/b&gt;), pleural effusion (&lt;b&gt;c&lt;/b&gt;), and hydropneumothorax (&lt;b&gt;d&lt;/b&gt;). Pneumothorax and pleural effusion regions are shown, respectively, in blue and red.&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g002.png?1694251104" title=" <strong>Figure 2</strong><br/> &lt;p&gt;Dataset samples: capacitance measurements (log scale), normalized capacitance measurements (linear scale), and corresponding conductivity distribution.&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g003.png?1694251106" title=" <strong>Figure 3</strong><br/> &lt;p&gt;FCNN architecture: Gemm—linear layers; Relu—ReLU activation.&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g004.png?1694251107" title=" <strong>Figure 4</strong><br/> &lt;p&gt;cGAN training scheme.&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g005.png?1694251107" title=" <strong>Figure 5</strong><br/> &lt;p&gt;cGAN generator architecture: K—convolutional block, DK—deconvolutional block, CV—convolutional layer, L—linear layer.&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g006.png?1694251109" title=" <strong>Figure 6</strong><br/> &lt;p&gt;cGAN discriminator architecture: K—convolutional block, CV—convolutional layer, L—linear layer.&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g007.png?1694251109" title=" <strong>Figure 7</strong><br/> &lt;p&gt;cGAN generator blocks: (&lt;b&gt;a&lt;/b&gt;) convolutional (K) and (&lt;b&gt;b&lt;/b&gt;) deconvolutional (DK).&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g008.png?1694251111" title=" <strong>Figure 8</strong><br/> &lt;p&gt;ANN classifier architecture.&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009a.png?1694251113" title=" <strong>Figure 9</strong><br/> &lt;p&gt;Conductivity in a thorax slice: (&lt;b&gt;a&lt;/b&gt;) healthy lungs; (&lt;b&gt;b&lt;/b&gt;) both lungs affected by pneumothorax; (&lt;b&gt;c&lt;/b&gt;) both lungs affected by pleural effusion; (&lt;b&gt;d&lt;/b&gt;) lungs affected by hydropneumothorax; (&lt;b&gt;e&lt;/b&gt;) random ellipses in the thorax. From left to right: ground truth image; image reconstructed by FCNN, cGAN, LBP, and TPINV.&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009b.png?1694251115" title=" <strong>Figure 9 Cont.</strong><br/> &lt;p&gt;Conductivity in a thorax slice: (&lt;b&gt;a&lt;/b&gt;) healthy lungs; (&lt;b&gt;b&lt;/b&gt;) both lungs affected by pneumothorax; (&lt;b&gt;c&lt;/b&gt;) both lungs affected by pleural effusion; (&lt;b&gt;d&lt;/b&gt;) lungs affected by hydropneumothorax; (&lt;b&gt;e&lt;/b&gt;) random ellipses in the thorax. From left to right: ground truth image; image reconstructed by FCNN, cGAN, LBP, and TPINV.&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g010.png?1694251117" title=" <strong>Figure 10</strong><br/> &lt;p&gt;Conductivity in a thorax slice reconstructed by FCNN for different level of noise added to measurements in training and testing datasets.&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g011.png?1694251118" title=" <strong>Figure 11</strong><br/> &lt;p&gt;Conductivity in a thorax slice reconstructed by cGAN for different level of noise added to measurements in training and testing datasets.&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g012.png?1694251120" title=" <strong>Figure 12</strong><br/> &lt;p&gt;Distribution of the image quality norm for the elements of the testing dataset: (&lt;b&gt;a&lt;/b&gt;) RMSE, (&lt;b&gt;b&lt;/b&gt;) PSNR, (&lt;b&gt;c&lt;/b&gt;) SSIM, and (&lt;b&gt;d&lt;/b&gt;) 2D correlation. Image reconstruction methods: FCNN (red dashed line), cGAN (blue solid line), LBP (black dotted line), pseudoinverse with Tikhonov regularization (TPINV) (magenta dashed-dotted line).&lt;/p&gt; "> </a> <a href="https://pub.mdpi-res.com/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g013.png?1694251122" title=" <strong>Figure 13</strong><br/> &lt;p&gt;“One-versus-rest” integral ROC curves representing the classifier performance: (&lt;b&gt;a&lt;/b&gt;) training and testing without noise, (&lt;b&gt;b&lt;/b&gt;) training and testing with noise introduction.&lt;/p&gt; "> </a> </div> <a class="button button--color-inversed" href="/1424-8220/23/18/7774/notes">Versions&nbsp;Notes</a> </div> </div> <div class="responsive-moving-container small hidden" data-id="article-counters" style="margin-top: 15px;"></div> <div class="html-dynamic"> <section> <div class="art-abstract art-abstract-new in-tab hypothesis_container"> <p> <div><section class="html-abstract" id="html-abstract"> <h2 id="html-abstract-title">Abstract</h2><b>:</b> <div class="html-p">Electrical impedance tomography (EIT) is a non-invasive technique for visualizing the internal structure of a human body. Capacitively coupled electrical impedance tomography (CCEIT) is a new contactless EIT technique that can potentially be used as a wearable device. Recent studies have shown that a machine learning-based approach is very promising for EIT image reconstruction. Most of the studies concern models containing up to 22 electrodes and focus on using different artificial neural network models, from simple shallow networks to complex convolutional networks. However, the use of convolutional networks in image reconstruction with a higher number of electrodes requires further investigation. In this work, two different architectures of artificial networks were used for CCEIT image reconstruction: a fully connected deep neural network and a conditional generative adversarial network (cGAN). The training dataset was generated by the numerical simulation of a thorax phantom with healthy and illness-affected lungs. Three kinds of illnesses, pneumothorax, pleural effusion, and hydropneumothorax, were modeled using the electrical properties of the tissues. The thorax phantom included the heart, aorta, spine, and lungs. The sensor with 32 area electrodes was used in the numerical model. The ECTsim custom-designed toolbox for Matlab was used to solve the forward problem and measurement simulation. Two artificial neural networks were trained with supervision for image reconstruction. Reconstruction quality was compared between those networks and one-step algebraic reconstruction methods such as linear back projection and pseudoinverse with Tikhonov regularization. This evaluation was based on pixel-to-pixel metrics such as root-mean-square error, structural similarity index, 2D correlation coefficient, and peak signal-to-noise ratio. Additionally, the diagnostic value measured by the ROC AUC metric was used to assess the image quality. The results showed that obtaining information about regional lung function (regions affected by pneumothorax or pleural effusion) is possible using image reconstruction based on supervised learning and deep neural networks in EIT. The results obtained using cGAN are strongly better than those obtained using a fully connected network, especially in the case of noisy measurement data. However, diagnostic value estimation showed that even algebraic methods allow us to obtain satisfactory results.</div> </section> <div id="html-keywords"> <div class="html-gwd-group"><div id="html-keywords-title">Keywords: </div><a href="/search?q=capacitively+coupled+electrical+impedance+tomography">capacitively coupled electrical impedance tomography</a>; <a href="/search?q=image+reconstruction">image reconstruction</a>; <a href="/search?q=inverse+problem">inverse problem</a>; <a href="/search?q=deep+neural+networks">deep neural networks</a>; <a href="/search?q=deep+learning">deep learning</a>; <a href="/search?q=fully+connected+neural+networks">fully connected neural networks</a>; <a href="/search?q=cGAN">cGAN</a>; <a href="/search?q=medical+imaging">medical imaging</a>; <a href="/search?q=lung+imaging">lung imaging</a>; <a href="/search?q=pneumothorax">pneumothorax</a>; <a href="/search?q=pleural+effusion">pleural effusion</a></div> <div> </div> </div> </div> </p> </div> </section> </div> <div class="hypothesis_container"> <ul class="menu html-nav" data-prev-node="#html-quick-links-title"> </ul> <div class="html-body"> <section id='sec1-sensors-23-07774' type='intro'><h2 data-nested='1'> 1. Introduction</h2><div class='html-p'>Electrical impedance tomography (EIT) is a low-cost and non-invasive technique for visualizing the internal structure of the human body [<a href="#B1-sensors-23-07774" class="html-bibr">1</a>,<a href="#B2-sensors-23-07774" class="html-bibr">2</a>,<a href="#B3-sensors-23-07774" class="html-bibr">3</a>]. The image of conductance distribution is reconstructed from measurements made using the electrodes surrounding the body. The reconstruction of electric permittivity is possible when both impedance components are measured [<a href="#B4-sensors-23-07774" class="html-bibr">4</a>]. EIT relies on the different electrical properties of tissues and organs, and those properties could vary depending on tissue condition [<a href="#B5-sensors-23-07774" class="html-bibr">5</a>,<a href="#B6-sensors-23-07774" class="html-bibr">6</a>]. Different electric properties in different image regions represent the various tissues and organs. Healthy and disease-affected tissues can be localized on the image.</div><div class='html-p'>Image quality in EIT is limited due to very poor spatial image resolution and low sensitivity in the center of the field of view. Despite this, EIT has shown its potential in medical diagnostics [<a href="#B7-sensors-23-07774" class="html-bibr">7</a>], especially in lung ventilation monitoring, which is possible due to the high impedance of lung tissue in comparison to other tissues and to a significant difference in impedance between inspiration and expiration phases [<a href="#B8-sensors-23-07774" class="html-bibr">8</a>]. A few commercial EIT devices are available on the market currently, and some are in development [<a href="#B9-sensors-23-07774" class="html-bibr">9</a>]. In addition to lung ventilation evaluation, promising studies are devoted to lung perfusion evaluation [<a href="#B10-sensors-23-07774" class="html-bibr">10</a>]. However, other emerging medical applications of EIT, still in the research phase, are brain imaging [<a href="#B11-sensors-23-07774" class="html-bibr">11</a>], cardiac volume monitoring [<a href="#B12-sensors-23-07774" class="html-bibr">12</a>], and gastric emptying [<a href="#B13-sensors-23-07774" class="html-bibr">13</a>].</div><div class='html-p'>A barrier to the development of EIT is the problem of measuring very small signals. The very low signal-to-noise ratio makes it impossible to increase the number of electrodes. It leads to poor sampling of the signal at the edge of the field of view, which translates into a small number of measurements and poor spatial resolution of the image. In addition, a large difference in the measurement values for adjacent and opposite electrodes leads to poor numerical conditioning of the linear system matrix.</div><div class='html-p'>The new approaches for measurement in EIT were presented in the literature, for example, dual frequency EIT, multiple frequency EIT [<a href="#B14-sensors-23-07774" class="html-bibr">14</a>], and a new emerging technique named capacitively coupled electrical impedance tomography (CCEIT). The introduction of CCEIT allows for overcoming the most challenging issue of conventional EIT, such as variable electrode–skin impedance, which negatively affects measurement accuracy. Unlike EIT, where the measurement is four-electrode, CCEIT is a two-electrode measurement similar to electrical capacitance tomography (ECT). Capacitance measurement eliminates the negative effects of contact resistance as it does not require direct contact between electrodes and conductive tissue. This measurement method is less burdensome for the patient because a tomographic sensor may be constructed in the form of a wearable belt with electrodes. The tomographic sensor can also be embedded in clothes and used as a wearable tomographic sensor [<a href="#B15-sensors-23-07774" class="html-bibr">15</a>,<a href="#B16-sensors-23-07774" class="html-bibr">16</a>].</div><div class='html-p'>A big challenge in the EIT is to solve the inverse problem, which consists of determining the cross-sections of the tested object on the basis of measurements made using electrodes placed around this object. Ongoing research is conducted in various directions. In the analytical approach to the inverse problem, the D-Bar method, a generalization of the Calderon method, is of great interest [<a href="#B17-sensors-23-07774" class="html-bibr">17</a>,<a href="#B18-sensors-23-07774" class="html-bibr">18</a>]. The D-Bar method solves a non-linear problem, but like other algorithms, it requires regularization to reduce artifacts.</div><div class='html-p'>In the algebraic approach to tomographic reconstruction in EIT and CCEIT, the inverse problem is non-linear and severely ill-posed because the number of measurements is low in relation to the reconstructed image’s number of pixels. The extremely poor numerical conditioning of the sensitivity matrix makes the inverse problem difficult to regularize [<a href="#B19-sensors-23-07774" class="html-bibr">19</a>,<a href="#B20-sensors-23-07774" class="html-bibr">20</a>]. Unfortunately, the known methods of automatic selection of the regularization parameter, the cross-validation method (CGV) or the L-curve method, do not work well in the case of such poor numerical conditioning as in electrical tomography [<a href="#B21-sensors-23-07774" class="html-bibr">21</a>]. The regularization problem concerns one-step methods and iterative methods such as the Landweber algorithm.</div><div class='html-p'>The best image reconstruction results are obtained by non-linear algorithms, such as the Levenberg–Marquardt (LM) algorithm, in which the non-linear function is minimized [<a href="#B22-sensors-23-07774" class="html-bibr">22</a>]. Non-linear algorithms are very computationally expensive because, in each step, a sensitivity matrix (the Jacobi matrix) is determined by numerically solving the differential equation for the electric field [<a href="#B23-sensors-23-07774" class="html-bibr">23</a>]. Usage of algebraic reconstruction methods leads to a quality vs. speed trade-off: fast methods, such as linear back projection [<a href="#B24-sensors-23-07774" class="html-bibr">24</a>] or pseudoinverse, provide poor quality, and methods giving the best possible quality, such as the Levenberg–Marquardt [<a href="#B25-sensors-23-07774" class="html-bibr">25</a>] algorithm, are extremely slow to be used for real-time reconstruction.</div><div class='html-p'>While the progress of classical reconstruction methods is slow, deep learning methods are developing dynamically. Over the past few years, machine learning [<a href="#B26-sensors-23-07774" class="html-bibr">26</a>] and deep learning (DL), in particular, have become a new trend in tomographic image reconstruction [<a href="#B27-sensors-23-07774" class="html-bibr">27</a>]. A general description of the inverse problem in tomographic imaging and an overview of various approaches to using neural networks to solve it can be found in [<a href="#B28-sensors-23-07774" class="html-bibr">28</a>]. The idea of image reconstruction using an artificial neural network (ANN) in EIT has been known for decades [<a href="#B29-sensors-23-07774" class="html-bibr">29</a>]. With the progressive increase in the computing power of computers, interest in deep networks has increased [<a href="#B30-sensors-23-07774" class="html-bibr">30</a>]. The ability of deep neural networks to map complex non-linear functions predestines them for image reconstruction in electrical tomography. A comprehensive overview of deep learning applications in the EIT can be found in [<a href="#B31-sensors-23-07774" class="html-bibr">31</a>].</div><div class='html-p'>There are three main machine learning approaches to solving inverse EIT problems: single network direct reconstruction, joint reconstruction, and hybrid reconstruction [<a href="#B32-sensors-23-07774" class="html-bibr">32</a>]. The single network approach is based on using either linear or convolutional ANN. In this case, the network is trained directly on samples containing generated permittivity distributions and corresponding calculated measurements. Joint reconstruction consists of two steps: preliminary reconstruction via solving the linear approximation of the inverse problem using the traditional numerical method such as Gauss–Newton, and then enhancing the result by applying an ANN to post-produce the reconstructed image. This approach provides better stability and accuracy. An example of the hybrid approach is reconstruction, which relies on the combined usage of fully connected ANN and U-Net-based ANN. A fully connected ANN is used to obtain the preliminary physical parameter distribution [<a href="#B33-sensors-23-07774" class="html-bibr">33</a>]. Then, U-Net-based ANN is used to enhance the result of the initial reconstruction and obtain the final image.</div><div class='html-p'>Training ANN to reconstruct images in EIT requires a large dataset containing a correspondence between capacitance measurements and the corresponding electrical parameters’ spatial distribution. Gathering a set of patient medical records of this size is problematic. The numerical modeling of the chest phantom and the generation of measurements by solving a simple problem allow us to obtain a relatively large dataset in a reasonable time. During ANN training, it is important to artificially introduce noise into simulated data as it increases model stability and decreases the possibility of overfitting [<a href="#B34-sensors-23-07774" class="html-bibr">34</a>]. Overfitting is a well-known issue that causes the loss of ANN’s generalization ability. Additionally, it is possible to prevent overfitting by increasing the diversity of the training dataset. The ANN-based approach requires a significant amount of computational resources (memory and time) only during the training phase. The response of trained networks is very fast compared to iterative algorithms.</div><div class='html-p'>Examples of the ANN’s use for electrical tomography image reconstruction can be found in many scientific papers, for example, in [<a href="#B35-sensors-23-07774" class="html-bibr">35</a>,<a href="#B36-sensors-23-07774" class="html-bibr">36</a>]. Different architectures of ANNs were proposed for electrical tomography. These include feed-forward neural networks (FFNN) used in ECT [<a href="#B37-sensors-23-07774" class="html-bibr">37</a>], single-hidden layer feed-forward neural networks (SLFNs) used in EIT [<a href="#B35-sensors-23-07774" class="html-bibr">35</a>], Hopfield networks used in ECT [<a href="#B38-sensors-23-07774" class="html-bibr">38</a>], fully connected layers used in EIT [<a href="#B39-sensors-23-07774" class="html-bibr">39</a>,<a href="#B40-sensors-23-07774" class="html-bibr">40</a>], U-Net used both in ECT and EIT [<a href="#B33-sensors-23-07774" class="html-bibr">33</a>,<a href="#B41-sensors-23-07774" class="html-bibr">41</a>], a generative adversarial network used in both ECT [<a href="#B42-sensors-23-07774" class="html-bibr">42</a>] and EIT [<a href="#B43-sensors-23-07774" class="html-bibr">43</a>], and SegNet used in electrical resistance tomography (ERT) [<a href="#B44-sensors-23-07774" class="html-bibr">44</a>]. For capacitively coupled electrical resistance tomography, an approach based on convolutional neural networks (CNN) was proposed [<a href="#B45-sensors-23-07774" class="html-bibr">45</a>]. In [<a href="#B46-sensors-23-07774" class="html-bibr">46</a>], results obtained by the ANN are compared with well-validated algebraic solvers like the LM algorithm. Encoder–decoder-based networks are widely used to solve the inverse problem [<a href="#B47-sensors-23-07774" class="html-bibr">47</a>], and the brightest example of such a network is the U-Net architecture. It can be used as a sole network with slight modifications [<a href="#B48-sensors-23-07774" class="html-bibr">48</a>,<a href="#B49-sensors-23-07774" class="html-bibr">49</a>,<a href="#B50-sensors-23-07774" class="html-bibr">50</a>] and also as a part of the GAN training approach, for example, in SAR-CGAN [<a href="#B51-sensors-23-07774" class="html-bibr">51</a>]. Besides the U-Net-based approach, more complex encoder–decoder convolutional networks are used [<a href="#B52-sensors-23-07774" class="html-bibr">52</a>], which can also be used with non-uniform mesh [<a href="#B53-sensors-23-07774" class="html-bibr">53</a>].</div><div class='html-p'>From the point of view of the clinical application of electrical impedance tomography, there is an interest in anomaly detection in the lung region [<a href="#B54-sensors-23-07774" class="html-bibr">54</a>]. In the case of lung diagnostics, diseases such as pneumothorax and pleural effusion are particularly relevant. Recent studies have shown that these diseases cause observable changes in the measurement signal [<a href="#B55-sensors-23-07774" class="html-bibr">55</a>,<a href="#B56-sensors-23-07774" class="html-bibr">56</a>,<a href="#B57-sensors-23-07774" class="html-bibr">57</a>,<a href="#B58-sensors-23-07774" class="html-bibr">58</a>]. A large tissue conductivity difference from the normal state causes a significant change in the measured signal, negative in the case of pneumothorax and positive in the case of pleural effusion. The wearable sensor could be used by bedridden patients to monitor their lungs continuously.</div><section id='TheAimoftheWork' type=''><h4 class='html-italic' data-nested='2'> The Aim of the Work</h4><div class='html-p'>Considering the promising properties of CCEIT, the mentioned potential of neural networks in solving the inverse problem, and the need to identify lung regions with pathological changes, we attempted to use deep neural networks to reconstruct thorax images, based on which it would be possible to identify areas affected by pathological changes. In this paper, we present a new enhanced chest phantom that takes into account the appearance of three pleural diseases (pneumothorax, pleural effusion, and hydropneumothorax) causing pathological accumulation of air or free fluid in the pleural cavity and leading to the collapse of the adjacent lung regions.</div><div class='html-p'>Discovering the lung lobes with disease presence is described in [<a href="#B59-sensors-23-07774" class="html-bibr">59</a>]. However, the presented method allows obtaining only five average conductivity values in such regions, and it is based on the strong assumption that the geometry of lobes is known.</div><div class='html-p'>We propose a modification of the thorax numerical phantom presented in [<a href="#B60-sensors-23-07774" class="html-bibr">60</a>], in which the lung displacement in the pleural cavity and shape deformation are modeled using elliptical regions. We assume individual differences in the shape and position of the organs in the patient’s chest. The use of simple geometric elements in chest modeling arises from the need to build an appropriate physical phantom to carry out real measurements. The proposed numerical phantom is suitable for reuse by other researchers.</div><div class='html-p'>In our work, the image quality of a 32-electrode CCEIT was studied, whereas in other papers, a smaller number of electrodes is used. For example, in the study [<a href="#B58-sensors-23-07774" class="html-bibr">58</a>] devoted to pleural disease discovery, the 22-electrode sensor was modeled. We want to show that the use of ANNs in the reconstruction of electrical tomography images using the sensor with 32 area electrodes allows us to obtain more detailed images with 64 × 64 resolution.</div><div class='html-p'>The use of ANN in electrical tomography image reconstruction is commonly known. However, in this paper, we use ANN-based reconstruction for the first time in the case of a lung model with regions affected by pleural diseases. This work is focused on exploring the required ANN complexity depending on the assumed level of noise. To achieve that goal, we have chosen a very simple shallow and sophisticated deep convolutional neural network, a fully connected network (FCNN), and a conditional generative adversarial network (cGAN), respectively. Pixel-based metrics are used to assess the results in most papers. In this work, we have proposed the use of the diagnostic value measured using the area under the receiver operating characteristic (ROC) curve (AUC) metric to evaluate image reconstruction quality.</div></section></section><section id='sec2-sensors-23-07774' type=''><h2 data-nested='1'> 2. Machine Learning Approach in CCEIT</h2><div class='html-p'>This work uses supervised learning, which requires a training dataset containing ordered input and output data pairs. In electrical tomography, a pair of data is an image of a tomographic cross-section of the tested object and electrode measurements for this object.</div><div class='html-p'>In CCEIT, the input data are the measurements of inter-electrode capacitances, and the output data are the images of the permittivity distribution in the examined space. In the case of patients’ thorax exams, it is unrealistic to collect a large amount of training data in the early stages of the study. Due to the large size of the training set, we decided to generate data using numerical simulation.</div><div class='html-p'>The ECTsim software package, dedicated to the Matlab environment, was used to generate training data [<a href="#B61-sensors-23-07774" class="html-bibr">61</a>]. The ECTsim package enables numerical modeling of electrical capacitance tomography in 2D and 3D geometry, but it can also be used to simulate capacitively coupled electrical impedance tomography [<a href="#B12-sensors-23-07774" class="html-bibr">12</a>,<a href="#B13-sensors-23-07774" class="html-bibr">13</a>]. In the ECTsim package, calculations are performed in the complex domain. The medium is described by a complex permittivity:<div class='html-disp-formula-info' id='FD1-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <mi>ε</mi> <mo>=</mo> <msup> <mi>ε</mi> <mo>′</mo> </msup> <mo>−</mo> <mi>j</mi> <mfrac> <mi>σ</mi> <mi>ω</mi> </mfrac> </mrow> </semantics></math> </div> <div class='l'> <label >(1)</label> </div> </div> where <math display='inline'><semantics> <msup> <mi>ε</mi> <mo>′</mo> </msup> </semantics></math> is permittivity, <math display='inline'><semantics> <mi>σ</mi> </semantics></math> is conductivity, and <math display='inline'><semantics> <mi>ω</mi> </semantics></math> is the angular frequency. The measurements are calculated using the finite volume method [<a href="#B62-sensors-23-07774" class="html-bibr">62</a>] from the Gauss law equation:<div class='html-disp-formula-info' id='FD2-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <msub> <mi>C</mi> <mrow> <mi>m</mi> <mrow> <mo>(</mo> <mrow> <mi>i</mi> <mo>,</mo> <mi>j</mi> </mrow> <mo>)</mo> </mrow> </mrow> </msub> <mo>=</mo> <mfrac> <mn>1</mn> <mrow> <msub> <mi>V</mi> <mi>i</mi> </msub> <mo>−</mo> <msub> <mi>V</mi> <mrow> <mi>j</mi> <mo>,</mo> <mo> </mo> <mi>j</mi> <mo>≠</mo> <mi>i</mi> </mrow> </msub> </mrow> </mfrac> <munderover> <mo>∯</mo> <mrow> <mo>∂</mo> <msub> <mi>Ω</mi> <mi>j</mi> </msub> </mrow> <mrow/> </munderover> <mi>ε</mi> <mrow> <mo>(</mo> <mi>r</mi> <mo>)</mo> </mrow> <mstyle mathvariant="bold" mathsize="normal"> <mi>E</mi> </mstyle> <mrow> <mo>(</mo> <mi>r</mi> <mo>)</mo> </mrow> <mi>d</mi> <mstyle mathvariant="bold" mathsize="normal"> <mi>s</mi> </mstyle> </mrow> </semantics></math> </div> <div class='l'> <label >(2)</label> </div> </div> where <math display='inline'><semantics> <mrow> <mi>ε</mi> <mrow> <mo>(</mo> <mi>r</mi> <mo>)</mo> </mrow> </mrow> </semantics></math> is complex permittivity at position <math display='inline'><semantics> <mi>r</mi> </semantics></math> in the examined volume, <math display='inline'><semantics> <mrow> <mstyle mathvariant="bold" mathsize="normal"> <mi>E</mi> </mstyle> <mrow> <mo>(</mo> <mi>r</mi> <mo>)</mo> </mrow> </mrow> </semantics></math> is an electric field, <math display='inline'><semantics> <mrow> <mo>∂</mo> <msub> <mi>Ω</mi> <mi>j</mi> </msub> </mrow> </semantics></math> is a surface surrounding the measuring electrode, <math display='inline'><semantics> <mrow> <mi>d</mi> <mstyle mathvariant="bold" mathsize="normal"> <mi>s</mi> </mstyle> </mrow> </semantics></math> is a normal vector to a small element of a surface, <math display='inline'><semantics> <mrow> <msub> <mi>V</mi> <mi>i</mi> </msub> </mrow> </semantics></math> is a potential at the <math display='inline'><semantics> <mi>i</mi> </semantics></math>-th electrode, and <math display='inline'><semantics> <mrow> <msub> <mi>C</mi> <mrow> <mi>m</mi> <mrow> <mo>(</mo> <mrow> <mi>i</mi> <mo>,</mo> <mi>j</mi> </mrow> <mo>)</mo> </mrow> </mrow> </msub> </mrow> </semantics></math> is a complex capacitance measured for a pair of electrodes <math display='inline'><semantics> <mrow> <mrow> <mo>(</mo> <mrow> <mi>i</mi> <mo>,</mo> <mi>j</mi> </mrow> <mo>)</mo> </mrow> </mrow> </semantics></math>. The ECTsim toolbox allows defining a model describing the permittivity and conductivity of the object and uses a finite volumes method to calculate corresponding mutual capacitances between all defined electrodes. The simulation of the electric field is performed on a non-uniformly refined Cartesian mesh. Solving the forward problem with ECTsim requires defining the geometrical and electrical parameters of the model, which should contain electrodes, isolation, and objects in the field of view.</div><section id='sec2dot1-sensors-23-07774' type=''><h4 class='html-italic' data-nested='2'> 2.1. Model of the Human Thorax</h4><div class='html-p'>A model of a 2D slice of the human thorax, proposed in [<a href="#B60-sensors-23-07774" class="html-bibr">60</a>], was used in the generation of the training dataset. The proposed thorax phantom is based on axial lung CT slice segmentation [<a href="#B63-sensors-23-07774" class="html-bibr">63</a>] and presents the patient lying on his back. The model includes the heart, aorta, spine, and lungs (<a href="#sensors-23-07774-f001" class="html-fig">Figure 1</a>a). A sensor with 32 electrodes was used. Permittivity and conductivity values of the model elements are given in <a href="#sensors-23-07774-t001" class="html-table">Table 1</a> and <a href="#sensors-23-07774-t002" class="html-table">Table 2</a>. Lung tissue permittivity and conductivity values (<a href="#sensors-23-07774-t001" class="html-table">Table 1</a>) for inspiration and expiration phases are defined for excitation frequency 100 MHz for healthy lungs and two conditions: pneumothorax (further referred to as illness A) and pleural effusion (illness B) [<a href="#B56-sensors-23-07774" class="html-bibr">56</a>]. The lung and pleura can be affected by either one or both conditions. Pneumothorax occurs when there is a pathological accumulation of air between the collapsed lung tissue and the interior wall of the thorax. When a patient is lying in a supine position, the air in the pleural cavity moves towards the anterior chest wall, which corresponds to the top of the image. Therefore, pneumothorax is modeled by generating an ellipse shifted to the bottom and center of the model. The intersection of this ellipse with the lung represents the partially collapsed lung tissue, and the rest of the thorax represents the air trapped in the pleural cavity. Pleural effusion is defined as the pathological accumulation of excessive fluid in the pleura. In the supine position, free fluid tends to collect in the dorsal parts of the pleura, which corresponds to the bottom of the image. Pleural effusion is modeled in a similar way as pneumothorax, but the ellipse is shifted to the top and center of the image. Hydropneumothorax occurs when the pleura is affected simultaneously by both illnesses (the concurrent presence of air and fluid in the pleural cavity). In this case, the pleural effusion is almost perfectly horizontal in the supine position, with no characteristic meniscus. Therefore, the healthy part of the thorax is represented by the intersection of the lung with an ellipse shifted towards the center of the model. The pleura is divided by a straight line into the upper part corresponding to the pneumothorax and the lower part corresponding to the pleural effusion. The dielectric properties of the effusion fluid are close enough to those of blood with extremely low red blood cell amounts. According to cow blood-based studies, blood’s electrical parameters (relative permittivity and conductivity) depend on the hematocrit level [<a href="#B64-sensors-23-07774" class="html-bibr">64</a>]. In accordance with human blood-based studies, we can take the conductivity value for hematocrit 0 to be equal to 1.4 S/m [<a href="#B65-sensors-23-07774" class="html-bibr">65</a>]. This value is consistent with the value obtained from cow blood, so for the purpose of this work, we can assume a permittivity value of 70. Both conditions imply a reduction in the image region with electrical properties corresponding to the healthy lung and growth in the area with abnormal electrical properties.</div><div class='html-p'>When generating the training set, possible individual changes were taken into account, i.e., the location and size of the lungs, the location and size of the heart, aorta, and spine. A certain range of change in the center position and the angle of rotation was adopted for all organs. All organs could also change their size, but the change in both axes was independent—all organs were modeled using ellipses. A uniform probability distribution was used for all ranges when selecting parameters from the range. Whether the lungs were healthy or affected by diseases A or B was randomized. The size and location of the illness-affected regions were randomized, with pneumothorax appearing only at the top (front of the supine patient) and effusion at the bottom of the image (back of the supine patient).</div><div class='html-p'>In ECTsim, the numerical simulation is performed on a dense, non-uniform mesh, but the reconstruction is conducted on a reduced mesh. In this work, the matrix of the reconstructed image was 64 × 64 pixels. Since only pixels in the field of view of the tomographic probe are reconstructed, the number of pixels that are reconstructed is smaller and equal to 1856 for the adopted model. This significantly reduces the size of the output data vector of the neural network.</div></section><section id='sec2dot2-sensors-23-07774' type=''><h4 class='html-italic' data-nested='2'> 2.2. Measurement Simulation in Electrical Capacitively Coupled Electrical Impedance Tomography</h4><div class='html-p'>By solving a forward problem in CCEIT, i.e., solving the equation for the electric field, it is possible to calculate capacitance measurements between electrodes based on the known distribution of complex electrical permittivity in the examined volume. The forward problem can be written as:<div class='html-disp-formula-info' id='FD3-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <mi>c</mi> <mo>=</mo> <mi>f</mi> <mrow> <mo>(</mo> <mi>ε</mi> <mo>)</mo> </mrow> </mrow> </semantics></math> </div> <div class='l'> <label >(3)</label> </div> </div> where <math display='inline'><semantics> <mi>f</mi> </semantics></math> is a non-linear vector function, <math display='inline'><semantics> <mrow> <mi>c</mi> <mo> </mo> <mo>∈</mo> <msup> <mi>R</mi> <mi>M</mi> </msup> </mrow> </semantics></math> is a complex capacitance vector, and <math display='inline'><semantics> <mrow> <mi>ε</mi> <mo> </mo> <mo>∈</mo> <mo> </mo> <msup> <mi>R</mi> <mi>N</mi> </msup> </mrow> </semantics></math> is a complex electrical permittivity vector. It is possible to use the linear approximation of the non-linear function <math display='inline'><semantics> <mi>f</mi> </semantics></math> as follows:<div class='html-disp-formula-info' id='FD4-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <msub> <mi>c</mi> <mrow> <mi>M</mi> <mo>×</mo> <mn>1</mn> </mrow> </msub> <mo>=</mo> <msub> <mi>S</mi> <mrow> <mi>M</mi> <mo>×</mo> <mi>N</mi> </mrow> </msub> <msub> <mi>ε</mi> <mrow> <mi>N</mi> <mo>×</mo> <mn>1</mn> </mrow> </msub> </mrow> </semantics></math> </div> <div class='l'> <label >(4)</label> </div> </div> where <math display='inline'><semantics> <mi>S</mi> </semantics></math> is the so-called sensitivity matrix, which is a Jacobian of the capacitance with respect to pixel values, representing electrical permittivity [<a href="#B66-sensors-23-07774" class="html-bibr">66</a>]. By having <math display='inline'><semantics> <mi>K</mi> </semantics></math> electrodes and measuring the capacitance between each pair of electrodes, it is possible to acquire:<div class='html-disp-formula-info' id='FD5-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <mi>M</mi> <mo>=</mo> <mi>K</mi> <mrow> <mo>(</mo> <mrow> <mi>K</mi> <mo>−</mo> <mn>1</mn> </mrow> <mo>)</mo> </mrow> <mo>/</mo> <mn>2</mn> <mo> </mo> </mrow> </semantics></math> </div> <div class='l'> <label >(5)</label> </div> </div> measurements [<a href="#B67-sensors-23-07774" class="html-bibr">67</a>]. If the measurement for a pair of electrodes is made twice by exchanging the roles of the electrodes (the exciting electrode changes with the measuring electrode), there are twice as many measurements. For <math display='inline'><semantics> <mrow> <mi>K</mi> <mo>=</mo> <mn>32</mn> </mrow> </semantics></math> electrodes, there are <math display='inline'><semantics> <mrow> <mi>M</mi> <mo>=</mo> <mn>992</mn> </mrow> </semantics></math> capacitance measurements with repetitions. The size of the measurement vector corresponds to the size of the input of the neural network. The number of capacitance measurements and the number of pixels to reconstruct determine the size of the model. In the experiments conducted, the size of the input vector was <math display='inline'><semantics> <mi>M</mi> </semantics></math> = 992, and the size of the output vector was <math display='inline'><semantics> <mi>N</mi> </semantics></math> = 1856.</div></section><section id='sec2dot3-sensors-23-07774' type=''><h4 class='html-italic' data-nested='2'> 2.3. Training Dataset</h4><div class='html-p'>The decision on the number of samples in the training dataset is one of the most difficult, as there is no accurate method to estimate the necessary dataset size. However, our experiments show that a model with an input size of 992 and an output size of 1856 trained on a dataset containing around 200,000 samples provides good results. The dataset should include randomly generated samples representing healthy lungs and lungs partially affected by the illnesses. Because lungs can be affected by one or both conditions, it is possible to have 16 different cases depending on which lung is affected by which condition. We created a dataset that evenly represented all 16 cases. Additionally, to mitigate overfitting, we introduced samples with random ellipses.</div><div class='html-p'>For ANN training, it is necessary to divide the whole dataset into training, validation, and testing subsets. The training subset is used as the source of experience during model training. The validation subset is used to monitor the main metric while training, and the testing subset is used for the final model performance evaluation. Two datasets were generated with the model shown in <a href="#sensors-23-07774-f001" class="html-fig">Figure 1</a>. Subsequently, samples with random ellipses were introduced into datasets, resulting in:</div><div class='html-p'><ul class='html-bullet'><li><div class='html-p'>The training set contained 9375 samples from each class and 44,000 samples with random ellipses, for a total of 194,000 samples. During training, this set was randomly split on the fly into learning and validation subsets with a ratio of 75:25.</div></li><li><div class='html-p'>The test set contained 2500 samples from each class and 2500 samples with random ellipses, for a total of 42,500 samples.</div></li></ul></div><div class='html-p'>Each sample of the dataset generated by ECTsim contains a complex capacitance measurements vector of size 992 and a complex electrical permittivity values vector for 1856 pixels from the field of view. Due to the limitations of the PyTorch framework, which is capable only of ANN training on real numbers, only the imaginary components of capacitance measurements and electrical permittivity were taken for ANN training purposes. Capacitance measurements depend on the electrode’s relative position and the electrical properties of the material inside the sensor. To mitigate mutual electrode arrangement influence, one can normalize measurements by calculating capacitance for the empty sensor and the sensor filled with a high-conductivity material. Capacitance measurements can be stored as a 2D matrix where the column and row indices correspond, respectively, to the excitation and sensing electrodes. Example color maps of conductivity distributions and resulting capacitance measurements (imaginary component) are presented in <a href="#sensors-23-07774-f002" class="html-fig">Figure 2</a>.</div></section><section id='sec2dot4-sensors-23-07774' type=''><h4 class='html-italic' data-nested='2'> 2.4. ANN Architecture Used</h4><div class='html-p'>Two different neural network architectures were used in the experiments: a fully connected neural network (FCNN) similar to EIT-4LDNN [<a href="#B39-sensors-23-07774" class="html-bibr">39</a>] and a convolutional network based on a conditional generative adversarial network (cGAN) architecture. We will show that with the usage of modern ANN training techniques, such as the AdAM optimization algorithm [<a href="#B68-sensors-23-07774" class="html-bibr">68</a>] and batch normalization layers [<a href="#B69-sensors-23-07774" class="html-bibr">69</a>], it is possible to obtain satisfactory results with the usage of a simple, fully connected ANN with one hidden layer.</div><div class='html-p'>As a fully connected NN (FCNN), we used the network architecture shown in <a href="#sensors-23-07774-f003" class="html-fig">Figure 3</a>. It takes a vector of size 992 as an input, then applies batch normalization to the input, applies one linear layer (implemented as a general matrix multiply (GeMM) algorithm) with batch normalization and rectified linear unit (ReLU) activation, and then one more linear layer.</div><div class='html-p'>A different model used in this work was a convolutional network based on the cGAN architecture (<a href="#sensors-23-07774-f004" class="html-fig">Figure 4</a>). It consisted of a U-Net-based generator (<a href="#sensors-23-07774-f005" class="html-fig">Figure 5</a>) and a simple discriminator consisting of several convolutional layers (<a href="#sensors-23-07774-f006" class="html-fig">Figure 6</a>). The convolutional and deconvolutional blocks were used as the building blocks for the generator and discriminator, as shown in <a href="#sensors-23-07774-f007" class="html-fig">Figure 7</a>. Usually, the generator in the cGAN approach takes a random latent vector and condition vector as input and generates an image as an output. The latent vector is intended to provide sufficient intraclass variability, which helps to obtain a more reliable output. Such an approach works well in the case of a relatively small condition vector, for example, as in the case of 12 electrodes corresponding to the size 66 of the condition vector [<a href="#B70-sensors-23-07774" class="html-bibr">70</a>]. When the condition size increases, the network starts to lose stability, and some modifications are required. Our experiments found that using the Pix2Pix approach [<a href="#B71-sensors-23-07774" class="html-bibr">71</a>] allowed us to create a stable cGAN network in the case of 992 measurements. Pix2Pix approach implies two main modifications to the traditional cGAN model: usage of dropouts inside the generator instead of latent vectors and modification of the generator loss function, which is based on adding L2 and L1 loss components referencing true images for the given condition as follows:<div class='html-disp-formula-info' id='FD6-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <mi>f</mi> <mrow> <mo>(</mo> <mrow> <msub> <mi>l</mi> <mi>r</mi> </msub> <mo>,</mo> <msub> <mi>l</mi> <mi>p</mi> </msub> <mo>,</mo> <msub> <mi>y</mi> <mi>r</mi> </msub> <mo>,</mo> <msub> <mi>y</mi> <mi>p</mi> </msub> </mrow> <mo>)</mo> </mrow> <mo>=</mo> <mi>BCR</mi> <mrow> <mo>(</mo> <mrow> <msub> <mi>l</mi> <mi>p</mi> </msub> <mo>,</mo> <msub> <mi>l</mi> <mi>r</mi> </msub> </mrow> <mo>)</mo> </mrow> <mo>+</mo> <mn>100</mn> <mi>MSE</mi> <mrow> <mo>(</mo> <mrow> <msub> <mi>y</mi> <mi>r</mi> </msub> <mo>,</mo> <msub> <mi>y</mi> <mi>p</mi> </msub> </mrow> <mo>)</mo> </mrow> <mo>+</mo> <mn>100</mn> <mrow> <mo>|</mo> <mrow> <msub> <mi>y</mi> <mi>r</mi> </msub> <mo>−</mo> <msub> <mi>y</mi> <mi>p</mi> </msub> </mrow> <mo>|</mo> </mrow> <mo> </mo> </mrow> </semantics></math> </div> <div class='l'> <label >(6)</label> </div> </div> where <math display='inline'><semantics> <mrow> <msub> <mi>l</mi> <mi>r</mi> </msub> </mrow> </semantics></math> is the reference value of the probability that the image is true, and <math display='inline'><semantics> <mrow> <msub> <mi>l</mi> <mi>p</mi> </msub> </mrow> </semantics></math> is the probability value of the image being true, determined by the discriminator. <math display='inline'><semantics> <mrow> <msub> <mi>y</mi> <mi>r</mi> </msub> </mrow> </semantics></math> and <math display='inline'><semantics> <mrow> <msub> <mi>y</mi> <mi>p</mi> </msub> </mrow> </semantics></math> are reference and predicted images, respectively; BCR is a binary cross-entropy and MSE is a mean square error.</div><div class='html-p'>We divided the whole dataset into training and testing subsets to train the reconstruction model as described above. A binary cross-entropy was used as a loss function while training the discriminator, and the AdAM was used as an optimizer.</div></section><section id='sec2dot5-sensors-23-07774' type=''><h4 class='html-italic' data-nested='2'> 2.5. Reconstruction Quality Assessment</h4><div class='html-p'>To correctly assess ANN training quality, it is necessary to divide the dataset into three parts: training, validation, and test subsets. The training subset is used for the actual ANN training. The validation subset allows adopted metrics to be calculated on the fly during training, and the test dataset is necessary for the final evaluation of the training result.</div><div class='html-p'>A widespread quality evaluation strategy in EIT image reconstruction mainly focuses on adopting simple pixel-to-pixel metrics such as average relative error, L2 norm (RMSE), or 2D correlation coefficient [<a href="#B39-sensors-23-07774" class="html-bibr">39</a>,<a href="#B70-sensors-23-07774" class="html-bibr">70</a>]. Such metrics allow estimating conditional consistency—the likelihood of reconstructing the same image for the same given input [<a href="#B72-sensors-23-07774" class="html-bibr">72</a>], but they are limited because averaging the metric on a test dataset does not take into account outlying cases. Therefore, in this paper, we propose calculating metric dispersion in addition to the metric mean value on the test dataset.</div><div class='html-p'>In this work, we use the following pixel-based metrics to evaluate reconstruction quality: root mean square error (RMSE), 2D correlation coefficient (CC), peak signal-to-noise ratio (PSNR), and structural similarity index measure (SSIM). These metrics are defined as follows:<div class='html-disp-formula-info' id='FD7-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <mi>RMSE</mi> <mrow> <mo>(</mo> <mrow> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mo>,</mo> <mi>y</mi> </mrow> <mo>)</mo> </mrow> <mo>=</mo> <msqrt> <mrow> <mfrac> <mn>1</mn> <mi>N</mi> </mfrac> <munderover> <mstyle mathsize="100%" displaystyle="true"> <mo>∑</mo> </mstyle> <mrow> <mi>i</mi> <mo>=</mo> <mn>1</mn> </mrow> <mi>N</mi> </munderover> <msup> <mrow> <mrow> <mo>|</mo> <mrow> <msub> <mi>y</mi> <mi>i</mi> </msub> <mo>−</mo> <msub> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mi>i</mi> </msub> </mrow> <mo>|</mo> </mrow> </mrow> <mn>2</mn> </msup> </mrow> </msqrt> <mo> </mo> </mrow> </semantics></math> </div> <div class='l'> <label >(7)</label> </div> </div><div class='html-disp-formula-info' id='FD8-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <mi>CC</mi> <mrow> <mo>(</mo> <mrow> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mo>,</mo> <mi>y</mi> </mrow> <mo>)</mo> </mrow> <mo>=</mo> <mfrac> <mrow> <msubsup> <mstyle mathsize="100%" displaystyle="true"> <mo>∑</mo> </mstyle> <mrow> <mi>i</mi> <mo>=</mo> <mn>1</mn> </mrow> <mi>N</mi> </msubsup> <mrow> <mo>(</mo> <mrow> <msub> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mi>i</mi> </msub> <mo>−</mo> <mover accent="true"> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mo stretchy="true">¯</mo> </mover> </mrow> <mo>)</mo> </mrow> <mrow> <mo>(</mo> <mrow> <msub> <mi>y</mi> <mi>i</mi> </msub> <mo>−</mo> <mover accent="true"> <mi>y</mi> <mo>¯</mo> </mover> </mrow> <mo>)</mo> </mrow> </mrow> <mrow> <msqrt> <mrow> <msubsup> <mstyle mathsize="100%" displaystyle="true"> <mo>∑</mo> </mstyle> <mrow> <mi>i</mi> <mo>=</mo> <mn>1</mn> </mrow> <mi>N</mi> </msubsup> <msup> <mrow> <mrow> <mo>(</mo> <mrow> <msub> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mi>i</mi> </msub> <mo>−</mo> <mover accent="true"> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mo stretchy="true">¯</mo> </mover> </mrow> <mo>)</mo> </mrow> </mrow> <mn>2</mn> </msup> <msubsup> <mstyle mathsize="100%" displaystyle="true"> <mo>∑</mo> </mstyle> <mrow> <mi>i</mi> <mo>=</mo> <mn>1</mn> </mrow> <mi>N</mi> </msubsup> <msup> <mrow> <mrow> <mo>(</mo> <mrow> <msub> <mi>y</mi> <mi>i</mi> </msub> <mo>−</mo> <mover accent="true"> <mi>y</mi> <mo>¯</mo> </mover> </mrow> <mo>)</mo> </mrow> </mrow> <mn>2</mn> </msup> </mrow> </msqrt> </mrow> </mfrac> </mrow> </semantics></math> </div> <div class='l'> <label >(8)</label> </div> </div><div class='html-disp-formula-info' id='FD9-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <mi>PSNR</mi> <mrow> <mo>(</mo> <mrow> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mo>,</mo> <mi>y</mi> </mrow> <mo>)</mo> </mrow> <mo>=</mo> <mn>10</mn> <msub> <mrow> <mi>log</mi> </mrow> <mrow> <mn>10</mn> </mrow> </msub> <mi>N</mi> <mfrac> <mrow> <mi>max</mi> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> </mrow> <mrow> <msubsup> <mstyle mathsize="100%" displaystyle="true"> <mo>∑</mo> </mstyle> <mrow> <mi>i</mi> <mo>=</mo> <mn>1</mn> </mrow> <mi>N</mi> </msubsup> <msup> <mrow> <mrow> <mo>|</mo> <mrow> <msub> <mi>y</mi> <mi>i</mi> </msub> <mo>−</mo> <msub> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mi>i</mi> </msub> </mrow> <mo>|</mo> </mrow> </mrow> <mn>2</mn> </msup> </mrow> </mfrac> </mrow> </semantics></math> </div> <div class='l'> <label >(9)</label> </div> </div><div class='html-disp-formula-info' id='FD10-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <mi>SSIM</mi> <mrow> <mo>(</mo> <mrow> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mo>,</mo> <mi>y</mi> </mrow> <mo>)</mo> </mrow> <mo>=</mo> <mfrac> <mrow> <mrow> <mo>(</mo> <mrow> <mn>2</mn> <msub> <mi>μ</mi> <mi>y</mi> </msub> <msub> <mi>μ</mi> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> </msub> <mo>+</mo> <msub> <mi>c</mi> <mn>1</mn> </msub> </mrow> <mo>)</mo> </mrow> <mrow> <mo>(</mo> <mrow> <mn>2</mn> <msub> <mi>σ</mi> <mrow> <mi>y</mi> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> </mrow> </msub> <mo>+</mo> <msub> <mi>c</mi> <mn>2</mn> </msub> </mrow> <mo>)</mo> </mrow> </mrow> <mrow> <mrow> <mo>(</mo> <mrow> <msub> <mi>μ</mi> <mi>y</mi> </msub> <mo>+</mo> <msub> <mi>μ</mi> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> </msub> <mo>+</mo> <msub> <mi>c</mi> <mn>1</mn> </msub> </mrow> <mo>)</mo> </mrow> <mrow> <mo>(</mo> <mrow> <msubsup> <mi>σ</mi> <mi>y</mi> <mn>2</mn> </msubsup> <mo>+</mo> <msubsup> <mi>σ</mi> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mn>2</mn> </msubsup> <mo>+</mo> <msub> <mi>c</mi> <mn>2</mn> </msub> </mrow> <mo>)</mo> </mrow> </mrow> </mfrac> <mo> </mo> <mo> </mo> </mrow> </semantics></math> </div> <div class='l'> <label >(10)</label> </div> </div> where</div><div class='html-p'><math display='inline'><semantics> <mi>N</mi> </semantics></math>—number of pixels,</div><div class='html-p'><math display='inline'><semantics> <mrow> <msub> <mi>y</mi> <mi>i</mi> </msub> </mrow> </semantics></math>—expected conductivity value at pixel <math display='inline'><semantics> <mi>i</mi> </semantics></math>,</div><div class='html-p'><math display='inline'><semantics> <mrow> <msub> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mi>i</mi> </msub> </mrow> </semantics></math>—reconstructed conductivity value at pixel <math display='inline'><semantics> <mi>i</mi> </semantics></math>,</div><div class='html-p'><math display='inline'><semantics> <mrow> <msub> <mi>μ</mi> <mi>y</mi> </msub> </mrow> </semantics></math>—mean value of vector <math display='inline'><semantics> <mi>y</mi> </semantics></math>,</div><div class='html-p'><math display='inline'><semantics> <mrow> <msub> <mi>μ</mi> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> </msub> </mrow> </semantics></math>—mean value of vector <math display='inline'><semantics> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> </semantics></math>,</div><div class='html-p'><math display='inline'><semantics> <mrow> <msubsup> <mi>σ</mi> <mi>y</mi> <mn>2</mn> </msubsup> </mrow> </semantics></math>—variance of vector <math display='inline'><semantics> <mi>y</mi> </semantics></math>,</div><div class='html-p'><math display='inline'><semantics> <mrow> <msubsup> <mi>σ</mi> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> <mn>2</mn> </msubsup> </mrow> </semantics></math>—variance of vector <math display='inline'><semantics> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> </semantics></math>,</div><div class='html-p'><math display='inline'><semantics> <mrow> <msub> <mi>σ</mi> <mrow> <mi>y</mi> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> </mrow> </msub> </mrow> </semantics></math>—covariance of vectors <math display='inline'><semantics> <mi>y</mi> </semantics></math> and <math display='inline'><semantics> <mover accent="true"> <mi>y</mi> <mo>^</mo> </mover> </semantics></math>,</div><div class='html-p'><math display='inline'><semantics> <mrow> <msub> <mi>c</mi> <mn>1</mn> </msub> </mrow> </semantics></math> and <math display='inline'><semantics> <mrow> <msub> <mi>c</mi> <mn>2</mn> </msub> </mrow> </semantics></math>—stabilization variables to prevent division by zero on weak denominators.</div></section><section id='sec2dot6-sensors-23-07774' type=''><h4 class='html-italic' data-nested='2'> 2.6. Image Diagnostical Value Evaluation Using ANN Classifier</h4><div class='html-p'>Considering the medical applications of EIT, it is implied to evaluate its model diagnostic value. For this purpose, in this work, we proposed the following procedure: Labeled samples corresponding to different medical conditions are entered into the training dataset. The classifying neural network is trained on the generated image data (true images). The classifying neural network is verified on reconstructed images. Comparison of the classification results allows us to assess the diagnostic value of the reconstructed images.</div><div class='html-p'>A well-known approach to the comparison of classification models is the use of receiver operating characteristic curves (ROC). The ROC curve demonstrates the diagnostic ability of a binary classifier depending on changes in the discrimination threshold. The value of the AUC (area under the ROC curve) metric shows the model’s performance and ranges from 0 to 1, where 1 means perfect performance, 0.9 means very good performance, and 0.5 means no performance.</div><div class='html-p'>For multi-class classification, as in our case, the analysis of the AUC metric needs to be adjusted. In our model, we have 16 classes of lung status, starting with a class when both lungs are healthy and ending with a class when both lungs are affected by both diseases. One additional class (17th) corresponds to random ellipses in the thorax phantom. The OvR (one versus the rest) method was used, which consists of calculating the ROC curve for each of the 17 classes, where in each step, a given class is considered a positive class and the remaining classes are given a negative weight. The average ROC curve is used to evaluate the multi-class classifier [<a href="#B73-sensors-23-07774" class="html-bibr">73</a>].</div><div class='html-p'>To evaluate reconstruction quality, we used the classifier ANN with the architecture shown in <a href="#sensors-23-07774-f008" class="html-fig">Figure 8</a>. A conductivity vector dataset of size 1856 reshaped to a 64×64 matrix is used as an input to the ANN classifier. The classifier produces the probability of belonging to one of the 17 classes. Because of the two-dimensional input, we selected the following network architecture: We used 2D convolutions with batch normalization and then applied a fully connected layer with softmax activation to produce a vector of probabilities. Softmax activation has the property that the sum of all vector elements is one. Therefore, it is possible to interpret those values as probabilities.</div><div class='html-p'>The model was trained on generated images from training and validation subsets and corresponding labels saved during dataset generation. Those samples were divided into training and validation parts in a ratio of 75:25. Binary cross-entropy was used as a loss function, and the AdAM optimizer was used as an optimizer.</div></section></section><section id='sec3-sensors-23-07774' type='results'><h2 data-nested='1'> 3. Results</h2><div class='html-p'>After training the FCNN and cGAN networks, reconstruction quality evaluations have been carried out. We reconstructed images from the testing dataset containing 42,500 samples using trained ANNs and selected algebraic methods (<a href="#sensors-23-07774-f009" class="html-fig">Figure 9</a>). Because the response of ANN is very fast, we compared ANN reconstruction with fast one-step algebraic methods:</div><ul class='html-bullet'><li><div class='html-p'>The linear back projection (LBP) given by <div class='html-disp-formula-info' id='FD11-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <mi>ε</mi> <mo>=</mo> <msub> <mi>ε</mi> <mrow> <mi>m</mi> <mi>i</mi> <mi>n</mi> </mrow> </msub> <mo>+</mo> <msup> <mover accent="true"> <mi>S</mi> <mo>˜</mo> </mover> <mi>T</mi> </msup> <msub> <mi>c</mi> <mi>n</mi> </msub> <mo>,</mo> </mrow> </semantics></math> </div> <div class='l'> <label >(11)</label> </div> </div> where <math display='inline'><semantics> <mrow> <msup> <mover accent="true"> <mi>S</mi> <mo>˜</mo> </mover> <mi>T</mi> </msup> </mrow> </semantics></math> is a transpose of the normalized sensitivity matrix and <math display='inline'><semantics> <mrow> <msub> <mi>c</mi> <mi>n</mi> </msub> </mrow> </semantics></math> is a vector of normalized measurements.</div></li><li><div class='html-p'>The pseudoinverse with Tikhonov regularization (TPINV) given by <div class='html-disp-formula-info' id='FD13-sensors-23-07774'> <div class='f'> <math display='block'><semantics> <mrow> <mi>ε</mi> <mo>=</mo> <msup> <mrow> <mrow> <mo>(</mo> <mrow> <msup> <mi>S</mi> <mi>T</mi> </msup> <mi>S</mi> <mo>+</mo> <mi>α</mi> <mi>I</mi> </mrow> <mo>)</mo> </mrow> </mrow> <mrow> <mo>−</mo> <mn>1</mn> </mrow> </msup> <msup> <mi>S</mi> <mi>T</mi> </msup> <msub> <mi>c</mi> <mi>n</mi> </msub> <mo>.</mo> </mrow> </semantics></math> </div> <div class='l'> <label >(12)</label> </div> </div></div></li></ul><div class='html-p'>The regularization parameter <math display='inline'><semantics> <mi>α</mi> </semantics></math> was chosen heuristically as 10<sup>−9</sup> to achieve the best possible results.</div><div class='html-p'>The abovementioned image quality measures, such as RMSE, PSNR, SSIM, and 2D correlation coefficient, were computed for each pair of ground truth and reconstructed images. The mean value, standard deviation, and median over the testing dataset were calculated (<a href="#sensors-23-07774-t003" class="html-table">Table 3</a>). Image quality norms for the testing data with a high level of noise (30 dB) calculated for an image reconstructed using ANNs are given in <a href="#sensors-23-07774-t004" class="html-table">Table 4</a>. It is easy to see that reconstruction with the use of ANNs gives significantly better results than algebraic methods. The image quality norms for reconstruction using very noisy measurement data (30 dB and 10 dB) obtained by neural networks are better than the norms obtained for low-noise data by classical methods (LBP, TPINV). And among ANNs, the cGAN-based network produces more accurate images. It is easy to see that a cGAN-based network allows us to obtain very good results even in the case of very high noise (10 dB). Introducing noise during training makes results from both FCNN and cGAN-based networks even better and allows us to obtain images comparable to those obtained from measurements without noise. However, the simple network (<a href="#sensors-23-07774-f010" class="html-fig">Figure 10</a>) cannot achieve the level of quality produced by the complex convolutional network (<a href="#sensors-23-07774-f011" class="html-fig">Figure 11</a>). The histograms of the distribution of image distance measures are shown in <a href="#sensors-23-07774-f012" class="html-fig">Figure 12</a>.</div><div class='html-p'>The diagnostic value of reconstructed images was evaluated using the classifier network trained on ground truth images. The OvR (one versus the rest) method was used to assess multi-class classification. We calculated mean ROC curves for each reconstruction method without introducing noise into measurements and then calculated corresponding AUC metric values (<a href="#sensors-23-07774-f013" class="html-fig">Figure 13</a>a). Cases involving the introduction of noise into training and testing datasets were also evaluated (<a href="#sensors-23-07774-f013" class="html-fig">Figure 13</a>b). The resulting values were added to <a href="#sensors-23-07774-t003" class="html-table">Table 3</a> and <a href="#sensors-23-07774-t004" class="html-table">Table 4</a>, respectively. The obtained values show that ANN-based image reconstruction methods allow for a comparably high ability of the classifier to recognize lung illness conditions. However, it is interesting that even one-step algebraic methods allow for obtaining results confirming that the reconstructed images contain information about the presence of the disease in lung regions. Differences in the conductivity values in the reconstructed images resulting from the presence of the disease are sufficient to identify the disease entity.</div></section><section id='sec4-sensors-23-07774' type='discussion'><h2 data-nested='1'> 4. Discussion</h2><div class='html-p'>In our work, we have shown the reconstruction of human thorax numerical phantom images in 32-electrode CCEIT. The two artificial neural networks were used to solve an inverse problem. As expected, the more complex NN based on the CGAN architecture achieved better results than the simple, fully connected NN (FCNN). The FCNN produces acceptable results but has reduced generalization ability. This is evidenced by the second peak, for a lower SSIM value, visible in the SSIM measure histogram for the test dataset (<a href="#sensors-23-07774-f012" class="html-fig">Figure 12</a>c). This peak corresponds to the images of random ellipses. The CGAN-based network does not show any irregularities on metrics histograms.</div><div class='html-p'>Diagnostic ability evaluation showed very good classification performance on ANN-reconstructed images. However, unexpectedly, the reconstruction with the use of simple one-step algebraic methods also showed acceptable performance. That leads to the hypothesis that it is potentially possible to detect illnesses based on simple reconstruction. Also, it may be possible to create a classifier network able to detect illness conditions directly from measurements.</div><div class='html-p'>Algebraic methods require both real and imaginary parts of measurements represented by complex numbers. However, ANN training on complex numbers is only at its beginning. Therefore, we trained networks only on the imaginary part of the measurements and electrical permittivity. It might be interesting to train a network using complex input and output and compare the robustness of such a complex weighted ANN.</div></section><section id='sec5-sensors-23-07774' type='conclusions'><h2 data-nested='1'> 5. Conclusions</h2><div class='html-p'>The use of CCEIT in wearable solutions requires a fast and computationally efficient reconstruction method. Our work shows that such a solution may be obtained with the use of an artificial neural network. We have developed a numerical phantom modeling the transversal slice of the human thorax, allowing simulation of different illness conditions arising in the regions of the image showing a conductivity distribution in the thorax. Such conditions correspond to the cases of lung reduction on the image and the appearance of regions with extremely high or low conductivity on the image. We have shown that numerical simulation of the reconstruction process with an ANN-based approach shows very promising results: while the reconstruction speed is comparable to a one-step algebraic approach, reconstruction quality, assessed with the use of image distance measures, is much higher in the case of the ANN-based approach.</div><div class='html-p'>In addition to the evaluation of results based on image quality metrics, an approach based on the assessment of the classifier’s ability to recognize a disease state using reconstructed images was proposed. The assessment using the classifier showed that even the images reconstructed by the simplest reconstruction methods still contained information about the location of the zones with changed conductivity.</div><div class='html-p'>As a result of the analysis of neural network models, it was concluded that a more complex network based on the cGAN architecture gives better reconstruction results, although the simplest possible network gives relatively good results.</div><div class='html-p'>Due to the limitations of the tools used, training of deep networks was carried out using only an imaginary part of capacitance and electrical permittivity. However, the real processes occurring during electrical capacitance tomography are better described by complex numbers.</div><div class='html-p'>Due to the simulation nature of the above work, the obtained results cannot be directly used for medical imaging. It is, therefore, necessary to conduct further research, both in simulation and, if possible, on real data. Based on the obtained results, it can be concluded that there are three directions for further research: clarification and extension of the model based on convolutional networks, ANN training using complex numbers, and examination of the classification possibilities of data reconstructed by classical reconstruction algorithms.</div></section> </div> <div class="html-back"> <section class='html-notes'><h2 >Author Contributions</h2><div class='html-p'>Conceptualization, W.T.S., M.M. and X.Y.; methodology, M.I., W.T.S., D.W. and M.M.; software, M.I., D.W., P.W. and X.H.; validation, M.I., D.W., M.M. and X.H.; formal analysis, X.Y.; investigation, M.I., D.W., P.W. and X.H.; resources, P.W.; data curation, P.W.; writing—original draft preparation, M.I. and W.T.S.; writing—review and editing, W.T.S., M.M. and X.Y.; visualization, M.I., X.H. and D.W.; supervision, W.T.S. and X.Y.; project administration, W.T.S. All authors have read and agreed to the published version of the manuscript.</div></section><section class='html-notes'><h2>Funding</h2><div class='html-p'>This research was funded by a YOUNG PW grant under the Initiative of Excellence—Research University program by the Ministry of Education and Science (PL), grant number 504/04496/1034/45.010003—1820/100/Z01/2023.</div></section><section class='html-notes'><h2 >Institutional Review Board Statement</h2><div class='html-p'>Not applicable.</div></section><section class='html-notes'><h2 >Informed Consent Statement</h2><div class='html-p'>Not applicable.</div></section><section class='html-notes'><h2 >Data Availability Statement</h2><div class='html-p'>Not applicable.</div></section><section class='html-notes'><h2 >Conflicts of Interest</h2><div class='html-p'>The authors declare no conflict of interest.</div></section><section id='html-references_list'><h2>References</h2><ol class='html-xx'><li id='B1-sensors-23-07774' class='html-x' data-content='1.'>Holder, D.S. Introduction to biomedical electrical impedance tomography. In <span class='html-italic'>Electrical Impedance Tomography: Methods, History and Applications (Series in Medical Physics and Biomedical Engineering)</span>; Holder, D.S., Ed.; Institute of Physics Publishing: Bristol, UK; Philadephia, PA, USA, 2005; pp. 423–450. ISBN 0 7503 0952 0. [<a href="https://scholar.google.com/scholar_lookup?title=Introduction+to+biomedical+electrical+impedance+tomography&author=Holder,+D.S.&publication_year=2005&pages=423%E2%80%93450" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>]</li><li id='B2-sensors-23-07774' class='html-x' data-content='2.'>Brown, B.H.; Barber, D.C. Electrical Impedance Tomography; the Construction and Application to Physiological Measurement of Electrical Impedance Images. <span class='html-italic'>Med. Prog. Technol.</span> <b>1987</b>, <span class='html-italic'>13</span>, 69–75. [<a href="https://scholar.google.com/scholar_lookup?title=Electrical+Impedance+Tomography;+the+Construction+and+Application+to+Physiological+Measurement+of+Electrical+Impedance+Images&author=Brown,+B.H.&author=Barber,+D.C.&publication_year=1987&journal=Med.+Prog.+Technol.&volume=13&pages=69%E2%80%9375" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>]</li><li id='B3-sensors-23-07774' class='html-x' data-content='3.'>Adler, A.; Arnold, J.H.; Bayford, R.; Borsic, A.; Brown, B.; Dixon, P.; Faes, T.J.C.; Frerichs, I.; Gagnon, H.; Gärber, Y.; et al. GREIT: A unified approach to 2D linear EIT reconstruction of lung images. <span class='html-italic'>Physiol. Meas.</span> <b>2009</b>, <span class='html-italic'>30</span>. [<a href="https://scholar.google.com/scholar_lookup?title=GREIT:+A+unified+approach+to+2D+linear+EIT+reconstruction+of+lung+images&author=Adler,+A.&author=Arnold,+J.H.&author=Bayford,+R.&author=Borsic,+A.&author=Brown,+B.&author=Dixon,+P.&author=Faes,+T.J.C.&author=Frerichs,+I.&author=Gagnon,+H.&author=G%C3%A4rber,+Y.&publication_year=2009&journal=Physiol.+Meas.&volume=30&doi=10.1088/0967-3334/30/6/S03&pmid=19491438" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/0967-3334/30/6/S03" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>] [<a href="https://www.ncbi.nlm.nih.gov/pubmed/19491438" class='cross-ref' data-typ='pmid' target='_blank' rel='noopener noreferrer'>PubMed</a>]</li><li id='B4-sensors-23-07774' class='html-x' data-content='4.'>Sapuan, I.; Yasin, M.; Ain, K.; Apsari, R. Anomaly Detection Using Electric Impedance Tomography Based on Real and Imaginary Images. <span class='html-italic'>Sensors</span> <b>2020</b>, <span class='html-italic'>20</span>, 1907. [<a href="https://scholar.google.com/scholar_lookup?title=Anomaly+Detection+Using+Electric+Impedance+Tomography+Based+on+Real+and+Imaginary+Images&author=Sapuan,+I.&author=Yasin,+M.&author=Ain,+K.&author=Apsari,+R.&publication_year=2020&journal=Sensors&volume=20&pages=1907&doi=10.3390/s20071907&pmid=32235454" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3390/s20071907" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>] [<a href="https://www.ncbi.nlm.nih.gov/pubmed/32235454" class='cross-ref' data-typ='pmid' target='_blank' rel='noopener noreferrer'>PubMed</a>]</li><li id='B5-sensors-23-07774' class='html-x' data-content='5.'>Peyman, A.; Gabriel, C.; Grant, E.H.; Vermeeren, G.; Martens, L. Variation of the dielectric properties of tissues with age: The effect on the values of SAR in children when exposed to walkie–talkie devices. <span class='html-italic'>Phys. Med. Biol.</span> <b>2008</b>, <span class='html-italic'>54</span>, 227. [<a href="https://scholar.google.com/scholar_lookup?title=Variation+of+the+dielectric+properties+of+tissues+with+age:+The+effect+on+the+values+of+SAR+in+children+when+exposed+to+walkie%E2%80%93talkie+devices&author=Peyman,+A.&author=Gabriel,+C.&author=Grant,+E.H.&author=Vermeeren,+G.&author=Martens,+L.&publication_year=2008&journal=Phys.+Med.+Biol.&volume=54&pages=227&doi=10.1088/0031-9155/54/2/004" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/0031-9155/54/2/004" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B6-sensors-23-07774' class='html-x' data-content='6.'>Kay, C.F.; Schwan, H.P. Specific Resistance of Body Tissues. <span class='html-italic'>Circ. Res.</span> <b>1956</b>, <span class='html-italic'>4</span>, 664–670. [<a href="https://scholar.google.com/scholar_lookup?title=Specific+Resistance+of+Body+Tissues&author=Kay,+C.F.&author=Schwan,+H.P.&publication_year=1956&journal=Circ.+Res.&volume=4&pages=664%E2%80%93670&doi=10.1161/01.res.4.6.664&pmid=13365072" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1161/01.res.4.6.664" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>] [<a href="https://www.ncbi.nlm.nih.gov/pubmed/13365072" class='cross-ref' data-typ='pmid' target='_blank' rel='noopener noreferrer'>PubMed</a>]</li><li id='B7-sensors-23-07774' class='html-x' data-content='7.'>Shi, Y.; Yang, Z.G.; Xie, F.; Ren, S.; Xu, S.F. The Research Progress of Electrical Impedance Tomography for Lung Monitoring. <span class='html-italic'>Front. Bioeng. Biotechnol.</span> <b>2021</b>, <span class='html-italic'>9</span>, 1–16. [<a href="https://scholar.google.com/scholar_lookup?title=The+Research+Progress+of+Electrical+Impedance+Tomography+for+Lung+Monitoring&author=Shi,+Y.&author=Yang,+Z.G.&author=Xie,+F.&author=Ren,+S.&author=Xu,+S.F.&publication_year=2021&journal=Front.+Bioeng.+Biotechnol.&volume=9&pages=1%E2%80%9316&doi=10.3389/fbioe.2021.726652" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3389/fbioe.2021.726652" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B8-sensors-23-07774' class='html-x' data-content='8.'>Christanto, A.; Darina, I. The Role of Electrical Impedance Tomography in Lung Imaging. <span class='html-italic'>Cermin Dunia Kedokt.</span> <b>2021</b>, <span class='html-italic'>48</span>, 372–375. [<a href="https://scholar.google.com/scholar_lookup?title=The+Role+of+Electrical+Impedance+Tomography+in+Lung+Imaging&author=Christanto,+A.&author=Darina,+I.&publication_year=2021&journal=Cermin+Dunia+Kedokt.&volume=48&pages=372%E2%80%93375&doi=10.55175/cdk.v48i9.132" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.55175/cdk.v48i9.132" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B9-sensors-23-07774' class='html-x' data-content='9.'>Qu, S.Y.; Dai, M.; Wu, S.; Lv, Z.R.; Ti, X.Y.; Fu, F. System Introduction and Evaluation of the First Chinese Chest EIT Device for ICU Applications. <span class='html-italic'>Sci. Rep.</span> <b>2021</b>, <span class='html-italic'>11</span>, 19273. [<a href="https://scholar.google.com/scholar_lookup?title=System+Introduction+and+Evaluation+of+the+First+Chinese+Chest+EIT+Device+for+ICU+Applications&author=Qu,+S.Y.&author=Dai,+M.&author=Wu,+S.&author=Lv,+Z.R.&author=Ti,+X.Y.&author=Fu,+F.&publication_year=2021&journal=Sci.+Rep.&volume=11&pages=19273&doi=10.1038/s41598-021-98793-0" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1038/s41598-021-98793-0" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B10-sensors-23-07774' class='html-xx' data-content='10.'>Adler, A.; Amato, M.B.; Arnold, J.H.; Bayford, R.; Bodenstein, M.; Böhm, S.H.; Brown, B.H.; Frerichs, I.; Stenqvist, O.; Weiler, N.; et al. Whither Lung EIT: Where Are We, Where Do We Want to Go and What Do We Need to Get There? <span class='html-italic'>Physiol. Meas.</span> <b>2012</b>, <span class='html-italic'>33</span>, 679–694. [<a href="https://scholar.google.com/scholar_lookup?title=Whither+Lung+EIT:+Where+Are+We,+Where+Do+We+Want+to+Go+and+What+Do+We+Need+to+Get+There?&author=Adler,+A.&author=Amato,+M.B.&author=Arnold,+J.H.&author=Bayford,+R.&author=Bodenstein,+M.&author=B%C3%B6hm,+S.H.&author=Brown,+B.H.&author=Frerichs,+I.&author=Stenqvist,+O.&author=Weiler,+N.&publication_year=2012&journal=Physiol.+Meas.&volume=33&pages=679%E2%80%93694&doi=10.1088/0967-3334/33/5/679" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/0967-3334/33/5/679" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B11-sensors-23-07774' class='html-xx' data-content='11.'>Jiang, Y.D.; Soleimani, M. Capacitively Coupled Electrical Impedance Tomography for Brain Imaging. <span class='html-italic'>IEEE Trans. Med. Imaging</span> <b>2019</b>, <span class='html-italic'>38</span>, 2104–2113. [<a href="https://scholar.google.com/scholar_lookup?title=Capacitively+Coupled+Electrical+Impedance+Tomography+for+Brain+Imaging&author=Jiang,+Y.D.&author=Soleimani,+M.&publication_year=2019&journal=IEEE+Trans.+Med.+Imaging&volume=38&pages=2104%E2%80%932113&doi=10.1109/TMI.2019.2895035" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TMI.2019.2895035" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B12-sensors-23-07774' class='html-xx' data-content='12.'>Min Hyun, C.; Jun Jang, T.; Nam, J.; Kwon, H.; Jeon, K.; Lee, K. Machine Learning-Based Signal Quality Assessment for Cardiac Volume Monitoring in Electrical Impedance Tomography. <span class='html-italic'>Mach. Learn. Sci. Technol.</span> <b>2023</b>, <span class='html-italic'>4</span>, 015034. [<a href="https://scholar.google.com/scholar_lookup?title=Machine+Learning-Based+Signal+Quality+Assessment+for+Cardiac+Volume+Monitoring+in+Electrical+Impedance+Tomography&author=Min+Hyun,+C.&author=Jun+Jang,+T.&author=Nam,+J.&author=Kwon,+H.&author=Jeon,+K.&author=Lee,+K.&publication_year=2023&journal=Mach.+Learn.+Sci.+Technol.&volume=4&pages=015034&doi=10.1088/2632-2153/acc637" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/2632-2153/acc637" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B13-sensors-23-07774' class='html-xx' data-content='13.'>Huerta-Franco, M.R. Electrical Bioimpedance and Other Techniques for Gastric Emptying and Motility Evaluation. <span class='html-italic'>World J. Gastrointest. Pathophysiol.</span> <b>2012</b>, <span class='html-italic'>3</span>, 10. [<a href="https://scholar.google.com/scholar_lookup?title=Electrical+Bioimpedance+and+Other+Techniques+for+Gastric+Emptying+and+Motility+Evaluation&author=Huerta-Franco,+M.R.&publication_year=2012&journal=World+J.+Gastrointest.+Pathophysiol.&volume=3&pages=10&doi=10.4291/wjgp.v3.i1.10" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.4291/wjgp.v3.i1.10" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B14-sensors-23-07774' class='html-xx' data-content='14.'>Mansouri, S.; Alharbi, Y.; Haddad, F.; Chabcoub, S.; Alshrouf, A.; Abd-Elghany, A.A. Electrical Impedance Tomography—Recent Applications and Developments. <span class='html-italic'>J. Electr. Bioimpedance</span> <b>2021</b>, <span class='html-italic'>12</span>, 50–62. [<a href="https://scholar.google.com/scholar_lookup?title=Electrical+Impedance+Tomography%E2%80%94Recent+Applications+and+Developments&author=Mansouri,+S.&author=Alharbi,+Y.&author=Haddad,+F.&author=Chabcoub,+S.&author=Alshrouf,+A.&author=Abd-Elghany,+A.A.&publication_year=2021&journal=J.+Electr.+Bioimpedance&volume=12&pages=50%E2%80%9362&doi=10.2478/joeb-2021-0007&pmid=35069942" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.2478/joeb-2021-0007" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>] [<a href="https://www.ncbi.nlm.nih.gov/pubmed/35069942" class='cross-ref' data-typ='pmid' target='_blank' rel='noopener noreferrer'>PubMed</a>]</li><li id='B15-sensors-23-07774' class='html-xx' data-content='15.'>Pennati, F.; Angelucci, A.; Morelli, L.; Bardini, S.; Barzanti, E.; Cavallini, F.; Conelli, A.; Di Federico, G.; Paganelli, C.; Aliverti, A. Electrical Impedance Tomography: From the Traditional Design to the Novel Frontier of Wearables. <span class='html-italic'>Sensors</span> <b>2023</b>, <span class='html-italic'>23</span>, 1182. [<a href="https://scholar.google.com/scholar_lookup?title=Electrical+Impedance+Tomography:+From+the+Traditional+Design+to+the+Novel+Frontier+of+Wearables&author=Pennati,+F.&author=Angelucci,+A.&author=Morelli,+L.&author=Bardini,+S.&author=Barzanti,+E.&author=Cavallini,+F.&author=Conelli,+A.&author=Di+Federico,+G.&author=Paganelli,+C.&author=Aliverti,+A.&publication_year=2023&journal=Sensors&volume=23&pages=1182&doi=10.3390/s23031182" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3390/s23031182" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B16-sensors-23-07774' class='html-xx' data-content='16.'>Wu, Y.; Jiang, D.; Bardill, A.; De Gelidi, S.; Bayford, R.; Demosthenous, A. A High Frame Rate Wearable EIT System Using Active Electrode ASICs for Lung Respiration and Heart Rate Monitoring. <span class='html-italic'>IEEE Trans. Circuits Syst. I Regul. Pap.</span> <b>2018</b>, <span class='html-italic'>65</span>, 3810–3820. [<a href="https://scholar.google.com/scholar_lookup?title=A+High+Frame+Rate+Wearable+EIT+System+Using+Active+Electrode+ASICs+for+Lung+Respiration+and+Heart+Rate+Monitoring&author=Wu,+Y.&author=Jiang,+D.&author=Bardill,+A.&author=De+Gelidi,+S.&author=Bayford,+R.&author=Demosthenous,+A.&publication_year=2018&journal=IEEE+Trans.+Circuits+Syst.+I+Regul.+Pap.&volume=65&pages=3810%E2%80%933820&doi=10.1109/TCSI.2018.2858148" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TCSI.2018.2858148" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B17-sensors-23-07774' class='html-xx' data-content='17.'>Isaacson, D.; Mueller, J.L.; Newell, J.C.; Siltanen, S. Reconstructions of Chest Phantoms by the D-Bar Method for Electrical Impedance Tomography. <span class='html-italic'>IEEE Trans. Med. Imaging</span> <b>2004</b>, <span class='html-italic'>23</span>, 821–828. [<a href="https://scholar.google.com/scholar_lookup?title=Reconstructions+of+Chest+Phantoms+by+the+D-Bar+Method+for+Electrical+Impedance+Tomography&author=Isaacson,+D.&author=Mueller,+J.L.&author=Newell,+J.C.&author=Siltanen,+S.&publication_year=2004&journal=IEEE+Trans.+Med.+Imaging&volume=23&pages=821%E2%80%93828&doi=10.1109/TMI.2004.827482&pmid=15250634" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TMI.2004.827482" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>] [<a href="https://www.ncbi.nlm.nih.gov/pubmed/15250634" class='cross-ref' data-typ='pmid' target='_blank' rel='noopener noreferrer'>PubMed</a>]</li><li id='B18-sensors-23-07774' class='html-xx' data-content='18.'>Knudsen, K.; Lassas, M.; Mueller, J.L.; Siltanen, S. D-Bar Method for Electrical Impedance Tomography with Discontinuous Conductivities. <span class='html-italic'>SIAM J. Appl. Math.</span> <b>2007</b>, <span class='html-italic'>67</span>, 893–913. [<a href="https://scholar.google.com/scholar_lookup?title=D-Bar+Method+for+Electrical+Impedance+Tomography+with+Discontinuous+Conductivities&author=Knudsen,+K.&author=Lassas,+M.&author=Mueller,+J.L.&author=Siltanen,+S.&publication_year=2007&journal=SIAM+J.+Appl.+Math.&volume=67&pages=893%E2%80%93913&doi=10.1137/060656930" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1137/060656930" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B19-sensors-23-07774' class='html-xx' data-content='19.'>Vauhkonen, M.; Vadâsz, D.; Karjalainen, P.A.; Somersalo, E.; Kaipio, J.P. Tikhonov Regularization and Prior Information in Electrical Impedance Tomography. <span class='html-italic'>IEEE Trans. Med. Imaging</span> <b>1998</b>, <span class='html-italic'>17</span>, 285–293. [<a href="https://scholar.google.com/scholar_lookup?title=Tikhonov+Regularization+and+Prior+Information+in+Electrical+Impedance+Tomography&author=Vauhkonen,+M.&author=Vad%C3%A2sz,+D.&author=Karjalainen,+P.A.&author=Somersalo,+E.&author=Kaipio,+J.P.&publication_year=1998&journal=IEEE+Trans.+Med.+Imaging&volume=17&pages=285%E2%80%93293&doi=10.1109/42.700740" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/42.700740" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B20-sensors-23-07774' class='html-xx' data-content='20.'>Jiang, M.; Xia, L.; Shou, G.; Liu, F.; Crozier, S. Two Hybrid Regularization Frameworks for Solving the Electrocardiography Inverse Problem. <span class='html-italic'>Phys. Med. Biol.</span> <b>2008</b>, <span class='html-italic'>53</span>, 5151–5164. [<a href="https://scholar.google.com/scholar_lookup?title=Two+Hybrid+Regularization+Frameworks+for+Solving+the+Electrocardiography+Inverse+Problem&author=Jiang,+M.&author=Xia,+L.&author=Shou,+G.&author=Liu,+F.&author=Crozier,+S.&publication_year=2008&journal=Phys.+Med.+Biol.&volume=53&pages=5151%E2%80%935164&doi=10.1088/0031-9155/53/18/020" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/0031-9155/53/18/020" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B21-sensors-23-07774' class='html-xx' data-content='21.'>Abascal, J.F.P.J.; Arridge, S.R.; Bayford, R.H.; Holder, D.S. Comparison of Methods for Optimal Choice of the Regularization Parameter for Linear Electrical Impedance Tomography of Brain Function. <span class='html-italic'>Physiol. Meas.</span> <b>2008</b>, <span class='html-italic'>29</span>, 1319–1334. [<a href="https://scholar.google.com/scholar_lookup?title=Comparison+of+Methods+for+Optimal+Choice+of+the+Regularization+Parameter+for+Linear+Electrical+Impedance+Tomography+of+Brain+Function&author=Abascal,+J.F.P.J.&author=Arridge,+S.R.&author=Bayford,+R.H.&author=Holder,+D.S.&publication_year=2008&journal=Physiol.+Meas.&volume=29&pages=1319%E2%80%931334&doi=10.1088/0967-3334/29/11/007&pmid=18854604" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/0967-3334/29/11/007" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>] [<a href="https://www.ncbi.nlm.nih.gov/pubmed/18854604" class='cross-ref' data-typ='pmid' target='_blank' rel='noopener noreferrer'>PubMed</a>]</li><li id='B22-sensors-23-07774' class='html-xx' data-content='22.'>Latourette, K. A Comparison of The Levenberg-Marquardt Method with Standard Optimization Algorithms, in Minimizing the Tikhonov-Total Variational Functional. In <span class='html-italic'>2.1. The Steepest Descent 4. Final Remarks Appendix A. Quadratic Convergence Rate of the Levenberg-Marquard</span>; University of Arizona: Tucson, AZ, USA, 2008. [<a href="https://scholar.google.com/scholar_lookup?title=A+Comparison+of+The+Levenberg-Marquardt+Method+with+Standard+Optimization+Algorithms,+in+Minimizing+the+Tikhonov-Total+Variational+Functional&author=Latourette,+K.&publication_year=2008" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>]</li><li id='B23-sensors-23-07774' class='html-xx' data-content='23.'>Colibazzi, F.; Lazzaro, D.; Morigi, S.; Samoré, A. Learning Nonlinear Electrical Impedance Tomography. <span class='html-italic'>J. Sci. Comput.</span> <b>2022</b>, <span class='html-italic'>90</span>, 58. [<a href="https://scholar.google.com/scholar_lookup?title=Learning+Nonlinear+Electrical+Impedance+Tomography&author=Colibazzi,+F.&author=Lazzaro,+D.&author=Morigi,+S.&author=Samor%C3%A9,+A.&publication_year=2022&journal=J.+Sci.+Comput.&volume=90&pages=58&doi=10.1007/s10915-021-01716-4" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1007/s10915-021-01716-4" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B24-sensors-23-07774' class='html-xx' data-content='24.'>Gamio, J.C.; Ortiz-Aleman, C.; Martin, R. Electrical Capacitance Tomography Two-Phase Oil-Gas Pipe Flow Imaging by the Linear Back-Projection Algorithm. <span class='html-italic'>Geofísic. Int.</span> <b>2005</b>, <span class='html-italic'>44</span>, 265–273. [<a href="https://scholar.google.com/scholar_lookup?title=Electrical+Capacitance+Tomography+Two-Phase+Oil-Gas+Pipe+Flow+Imaging+by+the+Linear+Back-Projection+Algorithm&author=Gamio,+J.C.&author=Ortiz-Aleman,+C.&author=Martin,+R.&publication_year=2005&journal=Geof%C3%ADsic.+Int.&volume=44&pages=265%E2%80%93273" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>]</li><li id='B25-sensors-23-07774' class='html-xx' data-content='25.'>Smolik, W.T. <span class='html-italic'>Rekonstrukcja Obrazów w Elektrycznej Tomografii Pojemnościowej</span>; Oficyna Wydawnicza Politechniki Warszawskiej: Warszawa, Poland, 2013; ISBN 978-83-7814-097-9. [<a href="https://scholar.google.com/scholar_lookup?title=Rekonstrukcja+Obraz%C3%B3w+w+Elektrycznej+Tomografii+Pojemno%C5%9Bciowej&author=Smolik,+W.T.&publication_year=2013" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>]</li><li id='B26-sensors-23-07774' class='html-xx' data-content='26.'>Aller, M.; Mera, D.; Cotos, J.M.; Villaroya, S. Study and Comparison of Different Machine Learning-Based Approaches to Solve the Inverse Problem in Electrical Impedance Tomographies. <span class='html-italic'>Neural Comput. Appl.</span> <b>2023</b>, <span class='html-italic'>35</span>, 5465–5477. [<a href="https://scholar.google.com/scholar_lookup?title=Study+and+Comparison+of+Different+Machine+Learning-Based+Approaches+to+Solve+the+Inverse+Problem+in+Electrical+Impedance+Tomographies&author=Aller,+M.&author=Mera,+D.&author=Cotos,+J.M.&author=Villaroya,+S.&publication_year=2023&journal=Neural+Comput.+Appl.&volume=35&pages=5465%E2%80%935477&doi=10.1007/s00521-022-07988-7" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1007/s00521-022-07988-7" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B27-sensors-23-07774' class='html-xx' data-content='27.'>Wang, G.; Ye, J.C.; Mueller, K.; Fessler, J.A. Image Reconstruction Is a New Frontier of Machine Learning. <span class='html-italic'>IEEE Trans. Med. Imaging</span> <b>2018</b>, <span class='html-italic'>37</span>, 1289–1296. [<a href="https://scholar.google.com/scholar_lookup?title=Image+Reconstruction+Is+a+New+Frontier+of+Machine+Learning&author=Wang,+G.&author=Ye,+J.C.&author=Mueller,+K.&author=Fessler,+J.A.&publication_year=2018&journal=IEEE+Trans.+Med.+Imaging&volume=37&pages=1289%E2%80%931296&doi=10.1109/TMI.2018.2833635" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TMI.2018.2833635" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B28-sensors-23-07774' class='html-xx' data-content='28.'>Ongie, G.; Jalal, A.; Metzler, C.A.; Baraniuk, R.G.; Dimakis, A.G.; Willett, R. Deep Learning Techniques for Inverse Problems in Imaging. <span class='html-italic'>IEEE J. Sel. Areas Inf. Theory</span> <b>2020</b>, <span class='html-italic'>1</span>, 39–56. [<a href="https://scholar.google.com/scholar_lookup?title=Deep+Learning+Techniques+for+Inverse+Problems+in+Imaging&author=Ongie,+G.&author=Jalal,+A.&author=Metzler,+C.A.&author=Baraniuk,+R.G.&author=Dimakis,+A.G.&author=Willett,+R.&publication_year=2020&journal=IEEE+J.+Sel.+Areas+Inf.+Theory&volume=1&pages=39%E2%80%9356&doi=10.1109/JSAIT.2020.2991563" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/JSAIT.2020.2991563" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B29-sensors-23-07774' class='html-xx' data-content='29.'>Adler, A.; Guardo, R. A Neural Network Image Reconstruction Technique for Electrical Impedance Tomography. <span class='html-italic'>IEEE Trans. Med. Imaging</span> <b>1994</b>, <span class='html-italic'>13</span>, 594–600. [<a href="https://scholar.google.com/scholar_lookup?title=A+Neural+Network+Image+Reconstruction+Technique+for+Electrical+Impedance+Tomography&author=Adler,+A.&author=Guardo,+R.&publication_year=1994&journal=IEEE+Trans.+Med.+Imaging&volume=13&pages=594%E2%80%93600&doi=10.1109/42.363109" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/42.363109" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B30-sensors-23-07774' class='html-xx' data-content='30.'>Adler, J.; Öktem, O. Solving Ill-Posed Inverse Problems Using Iterative Deep Neural Networks. <span class='html-italic'>Inverse Probl.</span> <b>2017</b>, <span class='html-italic'>33</span>, 124007. [<a href="https://scholar.google.com/scholar_lookup?title=Solving+Ill-Posed+Inverse+Problems+Using+Iterative+Deep+Neural+Networks&author=Adler,+J.&author=%C3%96ktem,+O.&publication_year=2017&journal=Inverse+Probl.&volume=33&pages=124007&doi=10.1088/1361-6420/aa9581" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/1361-6420/aa9581" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B31-sensors-23-07774' class='html-xx' data-content='31.'>Khan, T.A.; Ling, S.H. Review on Electrical Impedance Tomography: Artificial Intelligence Methods and Its Applications. <span class='html-italic'>Algorithms</span> <b>2019</b>, <span class='html-italic'>12</span>, 88. [<a href="https://scholar.google.com/scholar_lookup?title=Review+on+Electrical+Impedance+Tomography:+Artificial+Intelligence+Methods+and+Its+Applications&author=Khan,+T.A.&author=Ling,+S.H.&publication_year=2019&journal=Algorithms&volume=12&pages=88&doi=10.3390/a12050088" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3390/a12050088" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B32-sensors-23-07774' class='html-xx' data-content='32.'>Zhang, T.; Tian, X.; Liu, X.C.; Ye, J.A.; Fu, F.; Shi, X.T.; Liu, R.G.; Xu, C.H. Advances of Deep Learning in Electrical Impedance Tomography Image Reconstruction. <span class='html-italic'>Front. Bioeng. Biotechnol.</span> <b>2022</b>, <span class='html-italic'>10</span>, 1019531. [<a href="https://scholar.google.com/scholar_lookup?title=Advances+of+Deep+Learning+in+Electrical+Impedance+Tomography+Image+Reconstruction&author=Zhang,+T.&author=Tian,+X.&author=Liu,+X.C.&author=Ye,+J.A.&author=Fu,+F.&author=Shi,+X.T.&author=Liu,+R.G.&author=Xu,+C.H.&publication_year=2022&journal=Front.+Bioeng.+Biotechnol.&volume=10&pages=1019531&doi=10.3389/fbioe.2022.1019531" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3389/fbioe.2022.1019531" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B33-sensors-23-07774' class='html-xx' data-content='33.'>Yang, X.; Zhao, C.; Chen, B.; Zhang, M.; Li, Y. Big Data Driven U-Net Based Electrical Capacitance Image Reconstruction Algorithm. In Proceedings of the 2019 IEEE International Conference on Imaging Systems and Techniques (IST), Abu Dhabi, United Arab, 9–10 December 2019. [<a href="https://scholar.google.com/scholar_lookup?title=Big+Data+Driven+U-Net+Based+Electrical+Capacitance+Image+Reconstruction+Algorithm&conference=Proceedings+of+the+2019+IEEE+International+Conference+on+Imaging+Systems+and+Techniques+(IST)&author=Yang,+X.&author=Zhao,+C.&author=Chen,+B.&author=Zhang,+M.&author=Li,+Y.&publication_year=2019&doi=10.1109/IST48021.2019.9010423" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/IST48021.2019.9010423" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B34-sensors-23-07774' class='html-xx' data-content='34.'>Genzel, M.; Macdonald, J.; Marz, M. Solving Inverse Problems With Deep Neural Networks—Robustness Included. <span class='html-italic'>IEEE Trans. Pattern Anal. Mach. Intell.</span> <b>2022</b>, <span class='html-italic'>8828</span>, 1119–1134. [<a href="https://scholar.google.com/scholar_lookup?title=Solving+Inverse+Problems+With+Deep+Neural+Networks%E2%80%94Robustness+Included&author=Genzel,+M.&author=Macdonald,+J.&author=Marz,+M.&publication_year=2022&journal=IEEE+Trans.+Pattern+Anal.+Mach.+Intell.&volume=8828&pages=1119%E2%80%931134&doi=10.1109/TPAMI.2022.3148324" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TPAMI.2022.3148324" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B35-sensors-23-07774' class='html-xx' data-content='35.'>Kłosowski, G.; Rymarczyk, T.; Cieplak, T.; Niderla, K.; Skowron, Ł. Quality Assessment of the Neural Algorithms on the Example of EIT-UST Hybrid Tomography. <span class='html-italic'>Sensors</span> <b>2020</b>, <span class='html-italic'>20</span>, 3324. [<a href="https://scholar.google.com/scholar_lookup?title=Quality+Assessment+of+the+Neural+Algorithms+on+the+Example+of+EIT-UST+Hybrid+Tomography&author=K%C5%82osowski,+G.&author=Rymarczyk,+T.&author=Cieplak,+T.&author=Niderla,+K.&author=Skowron,+%C5%81.&publication_year=2020&journal=Sensors&volume=20&pages=3324&doi=10.3390/s20113324&pmid=32545221" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3390/s20113324" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>] [<a href="https://www.ncbi.nlm.nih.gov/pubmed/32545221" class='cross-ref' data-typ='pmid' target='_blank' rel='noopener noreferrer'>PubMed</a>]</li><li id='B36-sensors-23-07774' class='html-xx' data-content='36.'>Rymarczyk, T.; Klosowski, G.; Kozlowski, E.; Tchórzewski, P. Comparison of Selected Machine Learning Algorithms for Industrial Electrical Tomography. <span class='html-italic'>Sensors</span> <b>2019</b>, <span class='html-italic'>19</span>, 1521. [<a href="https://scholar.google.com/scholar_lookup?title=Comparison+of+Selected+Machine+Learning+Algorithms+for+Industrial+Electrical+Tomography&author=Rymarczyk,+T.&author=Klosowski,+G.&author=Kozlowski,+E.&author=Tch%C3%B3rzewski,+P.&publication_year=2019&journal=Sensors&volume=19&pages=1521&doi=10.3390/s19071521&pmid=30925825" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3390/s19071521" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>] [<a href="https://www.ncbi.nlm.nih.gov/pubmed/30925825" class='cross-ref' data-typ='pmid' target='_blank' rel='noopener noreferrer'>PubMed</a>]</li><li id='B37-sensors-23-07774' class='html-xx' data-content='37.'>Marashdeh, Q.; Warsito, W.; Fan, L.S.; Teixeira, F.L. Nonlinear Forward Problem Solution for Electrical Capacitance Tomography Using Feed-Forward Neural Network. <span class='html-italic'>IEEE Sens. J.</span> <b>2006</b>, <span class='html-italic'>6</span>, 441–448. [<a href="https://scholar.google.com/scholar_lookup?title=Nonlinear+Forward+Problem+Solution+for+Electrical+Capacitance+Tomography+Using+Feed-Forward+Neural+Network&author=Marashdeh,+Q.&author=Warsito,+W.&author=Fan,+L.S.&author=Teixeira,+F.L.&publication_year=2006&journal=IEEE+Sens.+J.&volume=6&pages=441%E2%80%93448&doi=10.1109/JSEN.2005.860316" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/JSEN.2005.860316" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B38-sensors-23-07774' class='html-xx' data-content='38.'>Marashdeh, Q.; Warsito, W.; Fan, L.S.; Teixeira, F.L. A Nonlinear Image Reconstruction Technique for ECT Using a Combined Neural Network Approach. <span class='html-italic'>Meas. Sci. Technol.</span> <b>2006</b>, <span class='html-italic'>17</span>, 2097–2103. [<a href="https://scholar.google.com/scholar_lookup?title=A+Nonlinear+Image+Reconstruction+Technique+for+ECT+Using+a+Combined+Neural+Network+Approach&author=Marashdeh,+Q.&author=Warsito,+W.&author=Fan,+L.S.&author=Teixeira,+F.L.&publication_year=2006&journal=Meas.+Sci.+Technol.&volume=17&pages=2097%E2%80%932103&doi=10.1088/0957-0233/17/8/007" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/0957-0233/17/8/007" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B39-sensors-23-07774' class='html-xx' data-content='39.'>Zhang, X.; Chen, X.; Wang, Z.; Zhang, M. EIT-4LDNN: A Novel Neural Network for Electrical Impedance Tomography. <span class='html-italic'>J. Phys. Conf. Ser.</span> <b>2021</b>, <span class='html-italic'>1757</span>, 012013. [<a href="https://scholar.google.com/scholar_lookup?title=EIT-4LDNN:+A+Novel+Neural+Network+for+Electrical+Impedance+Tomography&author=Zhang,+X.&author=Chen,+X.&author=Wang,+Z.&author=Zhang,+M.&publication_year=2021&journal=J.+Phys.+Conf.+Ser.&volume=1757&pages=012013&doi=10.1088/1742-6596/1757/1/012013" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/1742-6596/1757/1/012013" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B40-sensors-23-07774' class='html-xx' data-content='40.'>Fu, R.; Wang, Z.; Zhang, X.; Wang, D.; Chen, X.; Wang, H. A Regularization-Guided Deep Imaging Method for Electrical Impedance Tomography. <span class='html-italic'>IEEE Sens. J.</span> <b>2022</b>, <span class='html-italic'>22</span>, 8760–8771. [<a href="https://scholar.google.com/scholar_lookup?title=A+Regularization-Guided+Deep+Imaging+Method+for+Electrical+Impedance+Tomography&author=Fu,+R.&author=Wang,+Z.&author=Zhang,+X.&author=Wang,+D.&author=Chen,+X.&author=Wang,+H.&publication_year=2022&journal=IEEE+Sens.+J.&volume=22&pages=8760%E2%80%938771&doi=10.1109/JSEN.2022.3161025" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/JSEN.2022.3161025" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B41-sensors-23-07774' class='html-xx' data-content='41.'>Coxson, A.; Mihov, I.; Wang, Z.; Avramov, V.; Barnes, F.B.; Slizovskiy, S.; Mullan, C.; Timokhin, I.; Sanderson, D.; Kretinin, A.; et al. Machine Learning Enhanced Electrical Impedance Tomography for 2D Materials. <span class='html-italic'>Inverse Probl.</span> <b>2022</b>, <span class='html-italic'>38</span>, 085007. [<a href="https://scholar.google.com/scholar_lookup?title=Machine+Learning+Enhanced+Electrical+Impedance+Tomography+for+2D+Materials&author=Coxson,+A.&author=Mihov,+I.&author=Wang,+Z.&author=Avramov,+V.&author=Barnes,+F.B.&author=Slizovskiy,+S.&author=Mullan,+C.&author=Timokhin,+I.&author=Sanderson,+D.&author=Kretinin,+A.&publication_year=2022&journal=Inverse+Probl.&volume=38&pages=085007&doi=10.1088/1361-6420/ac7743" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/1361-6420/ac7743" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B42-sensors-23-07774' class='html-xx' data-content='42.'>Deabes, W.; Abdel-Hakim, A.E.; Bouazza, K.E.; Althobaiti, H. Adversarial Resolution Enhancement for Electrical Capacitance Tomography Image Reconstruction. <span class='html-italic'>Sensors</span> <b>2022</b>, <span class='html-italic'>22</span>, 3142. [<a href="https://scholar.google.com/scholar_lookup?title=Adversarial+Resolution+Enhancement+for+Electrical+Capacitance+Tomography+Image+Reconstruction&author=Deabes,+W.&author=Abdel-Hakim,+A.E.&author=Bouazza,+K.E.&author=Althobaiti,+H.&publication_year=2022&journal=Sensors&volume=22&pages=3142&doi=10.3390/s22093142" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3390/s22093142" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B43-sensors-23-07774' class='html-xx' data-content='43.'>Zhang, H.; Wang, Q.; Zhang, R.; Li, X.; Duan, X.; Sun, Y.; Wang, J.; Jia, J. Image Reconstruction for Electrical Impedance Tomography (EIT) With Improved Wasserstein Generative Adversarial Network (WGAN). <span class='html-italic'>IEEE Sens. J.</span> <b>2023</b>, <span class='html-italic'>23</span>, 4466–4475. [<a href="https://scholar.google.com/scholar_lookup?title=Image+Reconstruction+for+Electrical+Impedance+Tomography+(EIT)+With+Improved+Wasserstein+Generative+Adversarial+Network+(WGAN)&author=Zhang,+H.&author=Wang,+Q.&author=Zhang,+R.&author=Li,+X.&author=Duan,+X.&author=Sun,+Y.&author=Wang,+J.&author=Jia,+J.&publication_year=2023&journal=IEEE+Sens.+J.&volume=23&pages=4466%E2%80%934475&doi=10.1109/JSEN.2022.3197663" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/JSEN.2022.3197663" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B44-sensors-23-07774' class='html-xx' data-content='44.'>Vu, M.T.; Jardani, A. Convolutional Neural Networks with SegNet Architecture Applied to Three-Dimensional Tomography of Subsurface Electrical Resistivity: CNN-3D-ERT. <span class='html-italic'>Geophys. J. Int.</span> <b>2021</b>, <span class='html-italic'>225</span>, 1319–1331. [<a href="https://scholar.google.com/scholar_lookup?title=Convolutional+Neural+Networks+with+SegNet+Architecture+Applied+to+Three-Dimensional+Tomography+of+Subsurface+Electrical+Resistivity:+CNN-3D-ERT&author=Vu,+M.T.&author=Jardani,+A.&publication_year=2021&journal=Geophys.+J.+Int.&volume=225&pages=1319%E2%80%931331&doi=10.1093/gji/ggab024" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1093/gji/ggab024" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B45-sensors-23-07774' class='html-xx' data-content='45.'>Chen, Z.; Ma, G.; Jiang, Y.; Wang, B.; Soleimani, M. Application of Deep Neural Network to the Reconstruction of Two-Phase Material Imaging by Capacitively Coupled Electrical Resistance Tomography. <span class='html-italic'>Electronics</span> <b>2021</b>, <span class='html-italic'>10</span>, 1058. [<a href="https://scholar.google.com/scholar_lookup?title=Application+of+Deep+Neural+Network+to+the+Reconstruction+of+Two-Phase+Material+Imaging+by+Capacitively+Coupled+Electrical+Resistance+Tomography&author=Chen,+Z.&author=Ma,+G.&author=Jiang,+Y.&author=Wang,+B.&author=Soleimani,+M.&publication_year=2021&journal=Electronics&volume=10&pages=1058&doi=10.3390/electronics10091058" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3390/electronics10091058" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B46-sensors-23-07774' class='html-xx' data-content='46.'>Fernández-Fuentes, X.; Mera, D.; Gómez, A.; Vidal-Franco, I. Towards a Fast and Accurate EIT Inverse Problem Solver: A Machine Learning Approach. <span class='html-italic'>Electronics</span> <b>2018</b>, <span class='html-italic'>7</span>, 422. [<a href="https://scholar.google.com/scholar_lookup?title=Towards+a+Fast+and+Accurate+EIT+Inverse+Problem+Solver:+A+Machine+Learning+Approach&author=Fern%C3%A1ndez-Fuentes,+X.&author=Mera,+D.&author=G%C3%B3mez,+A.&author=Vidal-Franco,+I.&publication_year=2018&journal=Electronics&volume=7&pages=422&doi=10.3390/electronics7120422" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3390/electronics7120422" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B47-sensors-23-07774' class='html-xx' data-content='47.'>Ren, S.; Guan, R.; Liang, G.; Dong, F. RCRC: A Deep Neural Network for Dynamic Image Reconstruction of Electrical Impedance Tomography. <span class='html-italic'>IEEE Trans. Instrum. Meas.</span> <b>2021</b>, <span class='html-italic'>70</span>, 4505311. [<a href="https://scholar.google.com/scholar_lookup?title=RCRC:+A+Deep+Neural+Network+for+Dynamic+Image+Reconstruction+of+Electrical+Impedance+Tomography&author=Ren,+S.&author=Guan,+R.&author=Liang,+G.&author=Dong,+F.&publication_year=2021&journal=IEEE+Trans.+Instrum.+Meas.&volume=70&pages=4505311&doi=10.1109/TIM.2021.3092061" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TIM.2021.3092061" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B48-sensors-23-07774' class='html-xx' data-content='48.'>Cheng, Y.; Fan, W. R-UNet Deep Learning-Based Damage Detection of CFRP With Electrical Impedance Tomography. <span class='html-italic'>IEEE Trans. Instrum. Meas.</span> <b>2022</b>, <span class='html-italic'>71</span>, 4506308. [<a href="https://scholar.google.com/scholar_lookup?title=R-UNet+Deep+Learning-Based+Damage+Detection+of+CFRP+With+Electrical+Impedance+Tomography&author=Cheng,+Y.&author=Fan,+W.&publication_year=2022&journal=IEEE+Trans.+Instrum.+Meas.&volume=71&pages=4506308&doi=10.1109/TIM.2022.3205908" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TIM.2022.3205908" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B49-sensors-23-07774' class='html-xx' data-content='49.'>Zhu, L.; Lu, W.; Soleimani, M.; Li, Z.; Zhang, M. Electrical Impedance Tomography Guided by Digital Twins and Deep Learning for Lung Monitoring. <span class='html-italic'>IEEE Trans. Instrum. Meas.</span> <b>2023</b>, <span class='html-italic'>72</span>, 4009309. [<a href="https://scholar.google.com/scholar_lookup?title=Electrical+Impedance+Tomography+Guided+by+Digital+Twins+and+Deep+Learning+for+Lung+Monitoring&author=Zhu,+L.&author=Lu,+W.&author=Soleimani,+M.&author=Li,+Z.&author=Zhang,+M.&publication_year=2023&journal=IEEE+Trans.+Instrum.+Meas.&volume=72&pages=4009309&doi=10.1109/TIM.2023.3298389" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TIM.2023.3298389" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B50-sensors-23-07774' class='html-xx' data-content='50.'>Liu, D.; Wang, J.; Shan, Q.; Smyl, D.; Deng, J.; Du, J. DeepEIT: Deep Image Prior Enabled Electrical Impedance Tomography. <span class='html-italic'>IEEE Trans. Pattern Anal. Mach. Intell.</span> <b>2023</b>, <span class='html-italic'>45</span>, 9627–9638. [<a href="https://scholar.google.com/scholar_lookup?title=DeepEIT:+Deep+Image+Prior+Enabled+Electrical+Impedance+Tomography&author=Liu,+D.&author=Wang,+J.&author=Shan,+Q.&author=Smyl,+D.&author=Deng,+J.&author=Du,+J.&publication_year=2023&journal=IEEE+Trans.+Pattern+Anal.+Mach.+Intell.&volume=45&pages=9627%E2%80%939638&doi=10.1109/TPAMI.2023.3240565&pmid=37022376" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TPAMI.2023.3240565" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>] [<a href="https://www.ncbi.nlm.nih.gov/pubmed/37022376" class='cross-ref' data-typ='pmid' target='_blank' rel='noopener noreferrer'>PubMed</a>]</li><li id='B51-sensors-23-07774' class='html-xx' data-content='51.'>Li, X.; Zhang, R.; Wang, Q.; Duan, X.; Sun, Y.; Wang, J. SAR-CGAN: Improved Generative Adversarial Network for EIT Reconstruction of Lung Diseases. <span class='html-italic'>Biomed. Signal Process. Control</span> <b>2023</b>, <span class='html-italic'>81</span>, 104421. [<a href="https://scholar.google.com/scholar_lookup?title=SAR-CGAN:+Improved+Generative+Adversarial+Network+for+EIT+Reconstruction+of+Lung+Diseases&author=Li,+X.&author=Zhang,+R.&author=Wang,+Q.&author=Duan,+X.&author=Sun,+Y.&author=Wang,+J.&publication_year=2023&journal=Biomed.+Signal+Process.+Control&volume=81&pages=104421&doi=10.1016/j.bspc.2022.104421" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1016/j.bspc.2022.104421" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B52-sensors-23-07774' class='html-xx' data-content='52.'>Tian, X.; Liu, X.; Zhang, T.; Ye, J.; Zhang, W.; Zhang, L.; Shi, X.; Fu, F.; Li, Z.; Xu, C. Effective Electrical Impedance Tomography Based on Enhanced Encoder-Decoder Using Atrous Spatial Pyramid Pooling Module. <span class='html-italic'>IEEE J. Biomed. Health Inform.</span> <b>2023</b>, <span class='html-italic'>27</span>, 3282–3291. [<a href="https://scholar.google.com/scholar_lookup?title=Effective+Electrical+Impedance+Tomography+Based+on+Enhanced+Encoder-Decoder+Using+Atrous+Spatial+Pyramid+Pooling+Module&author=Tian,+X.&author=Liu,+X.&author=Zhang,+T.&author=Ye,+J.&author=Zhang,+W.&author=Zhang,+L.&author=Shi,+X.&author=Fu,+F.&author=Li,+Z.&author=Xu,+C.&publication_year=2023&journal=IEEE+J.+Biomed.+Health+Inform.&volume=27&pages=3282%E2%80%933291&doi=10.1109/JBHI.2023.3265385" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/JBHI.2023.3265385" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B53-sensors-23-07774' class='html-xx' data-content='53.'>Herzberg, W.; Rowe, D.B.; Hauptmann, A.; Hamilton, S.J. Graph Convolutional Networks for Model-Based Learning in Nonlinear Inverse Problems. <span class='html-italic'>IEEE Trans. Comput. Imaging</span> <b>2021</b>, <span class='html-italic'>7</span>, 1341–1353. [<a href="https://scholar.google.com/scholar_lookup?title=Graph+Convolutional+Networks+for+Model-Based+Learning+in+Nonlinear+Inverse+Problems&author=Herzberg,+W.&author=Rowe,+D.B.&author=Hauptmann,+A.&author=Hamilton,+S.J.&publication_year=2021&journal=IEEE+Trans.+Comput.+Imaging&volume=7&pages=1341%E2%80%931353&doi=10.1109/TCI.2021.3132190" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TCI.2021.3132190" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B54-sensors-23-07774' class='html-xx' data-content='54.'>Minhas, A.S.; Reddy, M.R. Neural Network Based Approach for Anomaly Detection in the Lungs Region by Electrical Impedance Tomography. <span class='html-italic'>Physiol. Meas.</span> <b>2005</b>, <span class='html-italic'>26</span>, 489. [<a href="https://scholar.google.com/scholar_lookup?title=Neural+Network+Based+Approach+for+Anomaly+Detection+in+the+Lungs+Region+by+Electrical+Impedance+Tomography&author=Minhas,+A.S.&author=Reddy,+M.R.&publication_year=2005&journal=Physiol.+Meas.&volume=26&pages=489&doi=10.1088/0967-3334/26/4/014" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/0967-3334/26/4/014" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B55-sensors-23-07774' class='html-xx' data-content='55.'>Becher, T.; Bußmeyer, M.; Lautenschläger, I.; Schädler, D.; Weiler, N.; Frerichs, I. Characteristic Pattern of Pleural Effusion in Electrical Impedance Tomography Images of Critically Ill Patients. <span class='html-italic'>Br. J. Anaesth.</span> <b>2018</b>, <span class='html-italic'>120</span>, 1219–1228. [<a href="https://scholar.google.com/scholar_lookup?title=Characteristic+Pattern+of+Pleural+Effusion+in+Electrical+Impedance+Tomography+Images+of+Critically+Ill+Patients&author=Becher,+T.&author=Bu%C3%9Fmeyer,+M.&author=Lautenschl%C3%A4ger,+I.&author=Sch%C3%A4dler,+D.&author=Weiler,+N.&author=Frerichs,+I.&publication_year=2018&journal=Br.+J.+Anaesth.&volume=120&pages=1219%E2%80%931228&doi=10.1016/j.bja.2018.02.030" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1016/j.bja.2018.02.030" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B56-sensors-23-07774' class='html-xx' data-content='56.'>Hamilton, S.J.; Mueller, J.L.; Alsaker, M. Incorporating a Spatial Prior into Nonlinear D-Bar EIT Imaging for Complex Admittivities. <span class='html-italic'>IEEE Trans. Med. Imaging</span> <b>2017</b>, <span class='html-italic'>36</span>, 457–466. [<a href="https://scholar.google.com/scholar_lookup?title=Incorporating+a+Spatial+Prior+into+Nonlinear+D-Bar+EIT+Imaging+for+Complex+Admittivities&author=Hamilton,+S.J.&author=Mueller,+J.L.&author=Alsaker,+M.&publication_year=2017&journal=IEEE+Trans.+Med.+Imaging&volume=36&pages=457%E2%80%93466&doi=10.1109/TMI.2016.2613511&pmid=28114061" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TMI.2016.2613511" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>] [<a href="https://www.ncbi.nlm.nih.gov/pubmed/28114061" class='cross-ref' data-typ='pmid' target='_blank' rel='noopener noreferrer'>PubMed</a>]</li><li id='B57-sensors-23-07774' class='html-xx' data-content='57.'>Wei, Z.; Liu, D.; Chen, X. Dominant-Current Deep Learning Scheme for Electrical Impedance Tomography. <span class='html-italic'>IEEE Trans. Biomed. Eng.</span> <b>2019</b>, <span class='html-italic'>66</span>, 2546–2555. [<a href="https://scholar.google.com/scholar_lookup?title=Dominant-Current+Deep+Learning+Scheme+for+Electrical+Impedance+Tomography&author=Wei,+Z.&author=Liu,+D.&author=Chen,+X.&publication_year=2019&journal=IEEE+Trans.+Biomed.+Eng.&volume=66&pages=2546%E2%80%932555&doi=10.1109/TBME.2019.2891676&pmid=30629486" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TBME.2019.2891676" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>] [<a href="https://www.ncbi.nlm.nih.gov/pubmed/30629486" class='cross-ref' data-typ='pmid' target='_blank' rel='noopener noreferrer'>PubMed</a>]</li><li id='B58-sensors-23-07774' class='html-xx' data-content='58.'>Shin, K.; Mueller, J.L. Calderón’s Method with a Spatial Prior for 2-d Eit Imaging of Ventilation and Perfusion. <span class='html-italic'>Sensors</span> <b>2021</b>, <span class='html-italic'>21</span>, 5635. [<a href="https://scholar.google.com/scholar_lookup?title=Calder%C3%B3n%E2%80%99s+Method+with+a+Spatial+Prior+for+2-d+Eit+Imaging+of+Ventilation+and+Perfusion&author=Shin,+K.&author=Mueller,+J.L.&publication_year=2021&journal=Sensors&volume=21&pages=5635&doi=10.3390/s21165635" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3390/s21165635" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B59-sensors-23-07774' class='html-xx' data-content='59.'>Schullcke, B.; Gong, B.; Krueger-Ziolek, S.; Moeller, K. Reconstruction of Conductivity Change in Lung Lobes Utilizing Electrical Impedance Tomography. <span class='html-italic'>Curr. Dir. Biomed. Eng.</span> <b>2017</b>, <span class='html-italic'>3</span>, 513–516. [<a href="https://scholar.google.com/scholar_lookup?title=Reconstruction+of+Conductivity+Change+in+Lung+Lobes+Utilizing+Electrical+Impedance+Tomography&author=Schullcke,+B.&author=Gong,+B.&author=Krueger-Ziolek,+S.&author=Moeller,+K.&publication_year=2017&journal=Curr.+Dir.+Biomed.+Eng.&volume=3&pages=513%E2%80%93516&doi=10.1515/cdbme-2017-0108" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1515/cdbme-2017-0108" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B60-sensors-23-07774' class='html-xx' data-content='60.'>Wanta, D.; Makowiecka, O.; Smolik, W.T.; Kryszyn, J.; Domański, G.; Midura, M.; Wróblewski, P. Numerical Evaluation of Complex Capacitance Measurement Using Pulse Excitation in Electrical Capacitance Tomography. <span class='html-italic'>Electronics</span> <b>2022</b>, <span class='html-italic'>11</span>, 1864. [<a href="https://scholar.google.com/scholar_lookup?title=Numerical+Evaluation+of+Complex+Capacitance+Measurement+Using+Pulse+Excitation+in+Electrical+Capacitance+Tomography&author=Wanta,+D.&author=Makowiecka,+O.&author=Smolik,+W.T.&author=Kryszyn,+J.&author=Doma%C5%84ski,+G.&author=Midura,+M.&author=Wr%C3%B3blewski,+P.&publication_year=2022&journal=Electronics&volume=11&pages=1864&doi=10.3390/electronics11121864" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3390/electronics11121864" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B61-sensors-23-07774' class='html-xx' data-content='61.'>Kryszyn, J.; Smolik, W. 2D Modelling of a Sensor for Electrical Capacitance Tomography in Ectsim Toolbox. <span class='html-italic'>Inform. Control. Meas. Econ. Environ. Prot.</span> <b>2017</b>, <span class='html-italic'>7</span>, 146–149. [<a href="https://scholar.google.com/scholar_lookup?title=2D+Modelling+of+a+Sensor+for+Electrical+Capacitance+Tomography+in+Ectsim+Toolbox&author=Kryszyn,+J.&author=Smolik,+W.&publication_year=2017&journal=Inform.+Control.+Meas.+Econ.+Environ.+Prot.&volume=7&pages=146%E2%80%93149&doi=10.5604/01.3001.0010.4604" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.5604/01.3001.0010.4604" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B62-sensors-23-07774' class='html-xx' data-content='62.'>Wanta, D.; Smolik, W.T.; Kryszyn, J.; Wróblewski, P.; Midura, M. A Finite Volume Method Using a Quadtree Non-Uniform Structured Mesh for Modeling in Electrical Capacitance Tomography. <span class='html-italic'>Proc. Natl. Acad. Sci. India Sect. A Phys. Sci.</span> <b>2022</b>, <span class='html-italic'>92</span>, 443–452. [<a href="https://scholar.google.com/scholar_lookup?title=A+Finite+Volume+Method+Using+a+Quadtree+Non-Uniform+Structured+Mesh+for+Modeling+in+Electrical+Capacitance+Tomography&author=Wanta,+D.&author=Smolik,+W.T.&author=Kryszyn,+J.&author=Wr%C3%B3blewski,+P.&author=Midura,+M.&publication_year=2022&journal=Proc.+Natl.+Acad.+Sci.+India+Sect.+A+Phys.+Sci.&volume=92&pages=443%E2%80%93452&doi=10.1007/s40010-021-00748-7" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1007/s40010-021-00748-7" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B63-sensors-23-07774' class='html-xx' data-content='63.'>Imran, A.A.Z.; Hatamizadeh, A.; Ananth, S.P.; Ding, X.; Tajbakhsh, N.; Terzopoulos, D. Fast and Automatic Segmentation of Pulmonary Lobes from Chest CT Using a Progressive Dense V-Network. <span class='html-italic'>Comput. Methods Biomech. Biomed. Eng. Imaging Vis.</span> <b>2020</b>, <span class='html-italic'>8</span>, 509–518. [<a href="https://scholar.google.com/scholar_lookup?title=Fast+and+Automatic+Segmentation+of+Pulmonary+Lobes+from+Chest+CT+Using+a+Progressive+Dense+V-Network&author=Imran,+A.A.Z.&author=Hatamizadeh,+A.&author=Ananth,+S.P.&author=Ding,+X.&author=Tajbakhsh,+N.&author=Terzopoulos,+D.&publication_year=2020&journal=Comput.+Methods+Biomech.+Biomed.+Eng.+Imaging+Vis.&volume=8&pages=509%E2%80%93518&doi=10.1080/21681163.2019.1672210" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1080/21681163.2019.1672210" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B64-sensors-23-07774' class='html-xx' data-content='64.'>Jaspard, F.; Nadi, M.; Rouane, A. Dielectric Properties of Blood: An Investigation of Haematocrit Dependence. <span class='html-italic'>Physiol. Meas.</span> <b>2003</b>, <span class='html-italic'>24</span>, 137–147. [<a href="https://scholar.google.com/scholar_lookup?title=Dielectric+Properties+of+Blood:+An+Investigation+of+Haematocrit+Dependence&author=Jaspard,+F.&author=Nadi,+M.&author=Rouane,+A.&publication_year=2003&journal=Physiol.+Meas.&volume=24&pages=137%E2%80%93147&doi=10.1088/0967-3334/24/1/310" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1088/0967-3334/24/1/310" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B65-sensors-23-07774' class='html-xx' data-content='65.'>Treo, E.F.; Felice, C.J.; Tirado, M.C.; Valentinuzzi, M.E.; Cervantes, D.O. Comparative Analysis of Hematocrit Measurements by Dielectric and Impedance Techniques. <span class='html-italic'>IEEE Trans. Biomed. Eng.</span> <b>2005</b>, <span class='html-italic'>52</span>, 549–552. [<a href="https://scholar.google.com/scholar_lookup?title=Comparative+Analysis+of+Hematocrit+Measurements+by+Dielectric+and+Impedance+Techniques&author=Treo,+E.F.&author=Felice,+C.J.&author=Tirado,+M.C.&author=Valentinuzzi,+M.E.&author=Cervantes,+D.O.&publication_year=2005&journal=IEEE+Trans.+Biomed.+Eng.&volume=52&pages=549%E2%80%93552&doi=10.1109/TBME.2004.843297" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TBME.2004.843297" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B66-sensors-23-07774' class='html-xx' data-content='66.'>Deabes, W.; Jamil Khayyat, K.M. Image Reconstruction in Electrical Capacitance Tomography Based on Deep Neural Networks. <span class='html-italic'>IEEE Sens. J.</span> <b>2021</b>, <span class='html-italic'>21</span>, 25818–25830. [<a href="https://scholar.google.com/scholar_lookup?title=Image+Reconstruction+in+Electrical+Capacitance+Tomography+Based+on+Deep+Neural+Networks&author=Deabes,+W.&author=Jamil+Khayyat,+K.M.&publication_year=2021&journal=IEEE+Sens.+J.&volume=21&pages=25818%E2%80%9325830&doi=10.1109/JSEN.2021.3116164" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/JSEN.2021.3116164" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B67-sensors-23-07774' class='html-xx' data-content='67.'>Ye, Y.; Wang, H. Wuqiang Yang Image Reconstruction for Electrical Capacitance Tomography Based on Sparse Representation. <span class='html-italic'>IEEE Trans. Instrum. Meas.</span> <b>2015</b>, <span class='html-italic'>64</span>, 89–102. [<a href="https://scholar.google.com/scholar_lookup?title=Wuqiang+Yang+Image+Reconstruction+for+Electrical+Capacitance+Tomography+Based+on+Sparse+Representation&author=Ye,+Y.&author=Wang,+H.&publication_year=2015&journal=IEEE+Trans.+Instrum.+Meas.&volume=64&pages=89%E2%80%93102&doi=10.1109/TIM.2014.2329738" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/TIM.2014.2329738" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B68-sensors-23-07774' class='html-xx' data-content='68.'>Kingma, D.P.; Ba, J.L. Adam: A Method for Stochastic Optimization. In Proceedings of the 3rd International Conference on Learning Representations, ICLR 2015—Conference Track Proceedings, Diego, CA, USA, 22 December 2015. [<a href="https://scholar.google.com/scholar_lookup?title=Adam:+A+Method+for+Stochastic+Optimization&conference=Proceedings+of+the+3rd+International+Conference+on+Learning+Representations,+ICLR+2015%E2%80%94Conference+Track+Proceedings&author=Kingma,+D.P.&author=Ba,+J.L.&publication_year=2015" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>]</li><li id='B69-sensors-23-07774' class='html-xx' data-content='69.'>Ioffe, S.; Szegedy, C. Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift. In Proceedings of the 32nd International Conference on Machine Learning, Lile, France, 6–11 July 2015; Volume 1, pp. 448–456. [<a href="https://scholar.google.com/scholar_lookup?title=Batch+Normalization:+Accelerating+Deep+Network+Training+by+Reducing+Internal+Covariate+Shift&conference=Proceedings+of+the+32nd+International+Conference+on+Machine+Learning&author=Ioffe,+S.&author=Szegedy,+C.&publication_year=2015&pages=448%E2%80%93456" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>]</li><li id='B70-sensors-23-07774' class='html-xx' data-content='70.'>Deabes, W.; Abdel-Hakim, A.E. CGAN-ECT: Tomography Image Reconstruction from Electrical Capacitance Measurements Using CGANs. <span class='html-italic'>arXiv</span> <b>2022</b>, arXiv:2209.03737. [<a href="https://scholar.google.com/scholar_lookup?title=CGAN-ECT:+Tomography+Image+Reconstruction+from+Electrical+Capacitance+Measurements+Using+CGANs&author=Deabes,+W.&author=Abdel-Hakim,+A.E.&publication_year=2022&journal=arXiv" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>]</li><li id='B71-sensors-23-07774' class='html-xx' data-content='71.'>Isola, P.; Zhu, J.Y.; Zhou, T.; Efros, A.A. Image-to-Image Translation with Conditional Adversarial Networks. In Proceedings of the 2017 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), Honolulu, HI, USA, 21–26 July 2017; pp. 5967–5976. [<a href="https://scholar.google.com/scholar_lookup?title=Image-to-Image+Translation+with+Conditional+Adversarial+Networks&conference=Proceedings+of+the+2017+IEEE+Conference+on+Computer+Vision+and+Pattern+Recognition+(CVPR)&author=Isola,+P.&author=Zhu,+J.Y.&author=Zhou,+T.&author=Efros,+A.A.&publication_year=2017&pages=5967%E2%80%935976&doi=10.1109/CVPR.2017.632" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.1109/CVPR.2017.632" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li><li id='B72-sensors-23-07774' class='html-xx' data-content='72.'>DeVries, T.; Romero, A.; Pineda, L.; Taylor, G.W.; Drozdzal, M. On the Evaluation of Conditional GANs. <span class='html-italic'>arXiv</span> <b>2019</b>, arXiv:1907.08175. [<a href="https://scholar.google.com/scholar_lookup?title=On+the+Evaluation+of+Conditional+GANs&author=DeVries,+T.&author=Romero,+A.&author=Pineda,+L.&author=Taylor,+G.W.&author=Drozdzal,+M.&publication_year=2019&journal=arXiv" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>]</li><li id='B73-sensors-23-07774' class='html-xx' data-content='73.'>Rashidi, H.H.; Albahra, S.; Robertson, S.; Tran, N.K.; Hu, B. Common Statistical Concepts in the Supervised Machine Learning Arena. <span class='html-italic'>Front. Oncol.</span> <b>2023</b>, <span class='html-italic'>13</span>, 1130229. [<a href="https://scholar.google.com/scholar_lookup?title=Common+Statistical+Concepts+in+the+Supervised+Machine+Learning+Arena&author=Rashidi,+H.H.&author=Albahra,+S.&author=Robertson,+S.&author=Tran,+N.K.&author=Hu,+B.&publication_year=2023&journal=Front.+Oncol.&volume=13&pages=1130229&doi=10.3389/fonc.2023.1130229" class='google-scholar' target='_blank' rel='noopener noreferrer'>Google Scholar</a>] [<a href="https://doi.org/10.3389/fonc.2023.1130229" class='cross-ref' target='_blank' rel='noopener noreferrer'>CrossRef</a>]</li></ol></section><section id='FiguresandTables' type='display-objects'><div class="html-fig-wrap" id="sensors-23-07774-f001"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f001"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g001.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g001.png" alt="Sensors 23 07774 g001" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g001-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f001"></a> </div> </div> <div class="html-fig_description"> <b>Figure 1.</b> Model of a transversal slice of a human thorax with lungs, heart, aorta (A), and spine (S) (<b>a</b>). Model of both lungs regionally affected by pneumothorax (<b>b</b>), pleural effusion (<b>c</b>), and hydropneumothorax (<b>d</b>). Pneumothorax and pleural effusion regions are shown, respectively, in blue and red. <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f001"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f001"> <div class="html-caption"> <b>Figure 1.</b> Model of a transversal slice of a human thorax with lungs, heart, aorta (A), and spine (S) (<b>a</b>). Model of both lungs regionally affected by pneumothorax (<b>b</b>), pleural effusion (<b>c</b>), and hydropneumothorax (<b>d</b>). Pneumothorax and pleural effusion regions are shown, respectively, in blue and red.</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g001.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g001.png" alt="Sensors 23 07774 g001" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g001.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f002"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f002"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g002.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g002.png" alt="Sensors 23 07774 g002" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g002-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f002"></a> </div> </div> <div class="html-fig_description"> <b>Figure 2.</b> Dataset samples: capacitance measurements (log scale), normalized capacitance measurements (linear scale), and corresponding conductivity distribution. <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f002"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f002"> <div class="html-caption"> <b>Figure 2.</b> Dataset samples: capacitance measurements (log scale), normalized capacitance measurements (linear scale), and corresponding conductivity distribution.</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g002.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g002.png" alt="Sensors 23 07774 g002" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g002.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f003"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f003"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g003.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g003.png" alt="Sensors 23 07774 g003" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g003-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f003"></a> </div> </div> <div class="html-fig_description"> <b>Figure 3.</b> FCNN architecture: Gemm—linear layers; Relu—ReLU activation. <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f003"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f003"> <div class="html-caption"> <b>Figure 3.</b> FCNN architecture: Gemm—linear layers; Relu—ReLU activation.</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g003.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g003.png" alt="Sensors 23 07774 g003" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g003.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f004"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f004"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g004.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g004.png" alt="Sensors 23 07774 g004" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g004-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f004"></a> </div> </div> <div class="html-fig_description"> <b>Figure 4.</b> cGAN training scheme. <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f004"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f004"> <div class="html-caption"> <b>Figure 4.</b> cGAN training scheme.</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g004.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g004.png" alt="Sensors 23 07774 g004" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g004.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f005"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f005"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g005.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g005.png" alt="Sensors 23 07774 g005" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g005-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f005"></a> </div> </div> <div class="html-fig_description"> <b>Figure 5.</b> cGAN generator architecture: K—convolutional block, DK—deconvolutional block, CV—convolutional layer, L—linear layer. <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f005"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f005"> <div class="html-caption"> <b>Figure 5.</b> cGAN generator architecture: K—convolutional block, DK—deconvolutional block, CV—convolutional layer, L—linear layer.</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g005.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g005.png" alt="Sensors 23 07774 g005" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g005.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f006"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f006"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g006.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g006.png" alt="Sensors 23 07774 g006" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g006-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f006"></a> </div> </div> <div class="html-fig_description"> <b>Figure 6.</b> cGAN discriminator architecture: K—convolutional block, CV—convolutional layer, L—linear layer. <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f006"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f006"> <div class="html-caption"> <b>Figure 6.</b> cGAN discriminator architecture: K—convolutional block, CV—convolutional layer, L—linear layer.</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g006.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g006.png" alt="Sensors 23 07774 g006" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g006.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f007"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f007"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g007.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g007.png" alt="Sensors 23 07774 g007" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g007-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f007"></a> </div> </div> <div class="html-fig_description"> <b>Figure 7.</b> cGAN generator blocks: (<b>a</b>) convolutional (K) and (<b>b</b>) deconvolutional (DK). <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f007"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f007"> <div class="html-caption"> <b>Figure 7.</b> cGAN generator blocks: (<b>a</b>) convolutional (K) and (<b>b</b>) deconvolutional (DK).</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g007.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g007.png" alt="Sensors 23 07774 g007" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g007.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f008"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f008"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g008.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g008.png" alt="Sensors 23 07774 g008" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g008-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f008"></a> </div> </div> <div class="html-fig_description"> <b>Figure 8.</b> ANN classifier architecture. <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f008"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f008"> <div class="html-caption"> <b>Figure 8.</b> ANN classifier architecture.</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g008.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g008.png" alt="Sensors 23 07774 g008" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g008.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f009"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f009"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009a.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009a.png" alt="Sensors 23 07774 g009a" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009a-550.jpg" /><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009b.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009b.png" alt="Sensors 23 07774 g009b" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009b-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f009"></a> </div> </div> <div class="html-fig_description"> <b>Figure 9.</b> Conductivity in a thorax slice: (<b>a</b>) healthy lungs; (<b>b</b>) both lungs affected by pneumothorax; (<b>c</b>) both lungs affected by pleural effusion; (<b>d</b>) lungs affected by hydropneumothorax; (<b>e</b>) random ellipses in the thorax. From left to right: ground truth image; image reconstructed by FCNN, cGAN, LBP, and TPINV. <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f009"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f009"> <div class="html-caption"> <b>Figure 9.</b> Conductivity in a thorax slice: (<b>a</b>) healthy lungs; (<b>b</b>) both lungs affected by pneumothorax; (<b>c</b>) both lungs affected by pleural effusion; (<b>d</b>) lungs affected by hydropneumothorax; (<b>e</b>) random ellipses in the thorax. From left to right: ground truth image; image reconstructed by FCNN, cGAN, LBP, and TPINV.</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009a.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009a.png" alt="Sensors 23 07774 g009a" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009a.png" /><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009b.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009b.png" alt="Sensors 23 07774 g009b" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g009b.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f010"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f010"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g010.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g010.png" alt="Sensors 23 07774 g010" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g010-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f010"></a> </div> </div> <div class="html-fig_description"> <b>Figure 10.</b> Conductivity in a thorax slice reconstructed by FCNN for different level of noise added to measurements in training and testing datasets. <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f010"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f010"> <div class="html-caption"> <b>Figure 10.</b> Conductivity in a thorax slice reconstructed by FCNN for different level of noise added to measurements in training and testing datasets.</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g010.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g010.png" alt="Sensors 23 07774 g010" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g010.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f011"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f011"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g011.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g011.png" alt="Sensors 23 07774 g011" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g011-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f011"></a> </div> </div> <div class="html-fig_description"> <b>Figure 11.</b> Conductivity in a thorax slice reconstructed by cGAN for different level of noise added to measurements in training and testing datasets. <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f011"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f011"> <div class="html-caption"> <b>Figure 11.</b> Conductivity in a thorax slice reconstructed by cGAN for different level of noise added to measurements in training and testing datasets.</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g011.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g011.png" alt="Sensors 23 07774 g011" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g011.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f012"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f012"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g012.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g012.png" alt="Sensors 23 07774 g012" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g012-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f012"></a> </div> </div> <div class="html-fig_description"> <b>Figure 12.</b> Distribution of the image quality norm for the elements of the testing dataset: (<b>a</b>) RMSE, (<b>b</b>) PSNR, (<b>c</b>) SSIM, and (<b>d</b>) 2D correlation. Image reconstruction methods: FCNN (red dashed line), cGAN (blue solid line), LBP (black dotted line), pseudoinverse with Tikhonov regularization (TPINV) (magenta dashed-dotted line). <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f012"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f012"> <div class="html-caption"> <b>Figure 12.</b> Distribution of the image quality norm for the elements of the testing dataset: (<b>a</b>) RMSE, (<b>b</b>) PSNR, (<b>c</b>) SSIM, and (<b>d</b>) 2D correlation. Image reconstruction methods: FCNN (red dashed line), cGAN (blue solid line), LBP (black dotted line), pseudoinverse with Tikhonov regularization (TPINV) (magenta dashed-dotted line).</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g012.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g012.png" alt="Sensors 23 07774 g012" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g012.png" /></div> </div> <div class="html-fig-wrap" id="sensors-23-07774-f013"> <div class='html-fig_img'> <div class="html-figpopup html-figpopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f013"> <img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g013.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g013.png" alt="Sensors 23 07774 g013" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g013-550.jpg" /> <a class="html-expand html-figpopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#fig_body_display_sensors-23-07774-f013"></a> </div> </div> <div class="html-fig_description"> <b>Figure 13.</b> “One-versus-rest” integral ROC curves representing the classifier performance: (<b>a</b>) training and testing without noise, (<b>b</b>) training and testing with noise introduction. <!-- <p><a class="html-figpopup" href="#fig_body_display_sensors-23-07774-f013"> Click here to enlarge figure </a></p> --> </div> </div> <div class="html-fig_show mfp-hide" id="fig_body_display_sensors-23-07774-f013"> <div class="html-caption"> <b>Figure 13.</b> “One-versus-rest” integral ROC curves representing the classifier performance: (<b>a</b>) training and testing without noise, (<b>b</b>) training and testing with noise introduction.</div> <div class="html-img"><img data-large="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g013.png" data-original="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g013.png" alt="Sensors 23 07774 g013" data-lsrc="/sensors/sensors-23-07774/article_deploy/html/images/sensors-23-07774-g013.png" /></div> </div> <div class="html-table-wrap" id="sensors-23-07774-t001"> <div class="html-table_wrap_td"> <div class="html-tablepopup html-tablepopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href='#table_body_display_sensors-23-07774-t001'> <img data-lsrc="https://pub.mdpi-res.com/img/table.png" /> <a class="html-expand html-tablepopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#table_body_display_sensors-23-07774-t001"></a> </div> </div> <div class="html-table_wrap_discription"> <b>Table 1.</b> Lung tissue permittivity and conductivity values at 100 MHz. </div> </div> <div class="html-table_show mfp-hide " id="table_body_display_sensors-23-07774-t001"> <div class="html-caption"><b>Table 1.</b> Lung tissue permittivity and conductivity values at 100 MHz.</div> <table > <thead ><tr ><th rowspan='2' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' > </th><th colspan='2' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >Healthy Lungs</th><th rowspan='2' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >Pneumothorax</th><th rowspan='2' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >Pleural Effusion</th></tr><tr ><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >Inspiration</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >Expiration</th></tr></thead><tbody ><tr ><td align='center' valign='middle' class='html-align-center' >Relative permittivity</td><td align='center' valign='middle' class='html-align-center' >31.6</td><td align='center' valign='middle' class='html-align-center' >67.1</td><td align='center' valign='middle' class='html-align-center' >1</td><td align='center' valign='middle' class='html-align-center' >70</td></tr><tr ><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >Conductivity, S/m</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.306</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.559</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >10<sup>−15</sup></td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >1.4</td></tr></tbody> </table> </div> <div class="html-table-wrap" id="sensors-23-07774-t002"> <div class="html-table_wrap_td"> <div class="html-tablepopup html-tablepopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href='#table_body_display_sensors-23-07774-t002'> <img data-lsrc="https://pub.mdpi-res.com/img/table.png" /> <a class="html-expand html-tablepopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#table_body_display_sensors-23-07774-t002"></a> </div> </div> <div class="html-table_wrap_discription"> <b>Table 2.</b> Model component permittivity and conductivity values. </div> </div> <div class="html-table_show mfp-hide " id="table_body_display_sensors-23-07774-t002"> <div class="html-caption"><b>Table 2.</b> Model component permittivity and conductivity values.</div> <table > <thead ><tr ><th align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >Component</th><th align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >Permittivity</th><th align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >Conductivity</th></tr></thead><tbody ><tr ><td align='center' valign='middle' class='html-align-center' >Electrodes (metal)</td><td align='center' valign='middle' class='html-align-center' >1</td><td align='center' valign='middle' class='html-align-center' >0.0643</td></tr><tr ><td align='center' valign='middle' class='html-align-center' >Isolation (plastic)</td><td align='center' valign='middle' class='html-align-center' >2</td><td align='center' valign='middle' class='html-align-center' >10<sup>−21</sup></td></tr><tr ><td align='center' valign='middle' class='html-align-center' >Spine</td><td align='center' valign='middle' class='html-align-center' >10.53</td><td align='center' valign='middle' class='html-align-center' >0.0643</td></tr><tr ><td align='center' valign='middle' class='html-align-center' >Heart, Aorta</td><td align='center' valign='middle' class='html-align-center' >90.8</td><td align='center' valign='middle' class='html-align-center' >0.733</td></tr><tr ><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >Fat</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >12.7</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.068</td></tr></tbody> </table> </div> <div class="html-table-wrap" id="sensors-23-07774-t003"> <div class="html-table_wrap_td"> <div class="html-tablepopup html-tablepopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href='#table_body_display_sensors-23-07774-t003'> <img data-lsrc="https://pub.mdpi-res.com/img/table.png" /> <a class="html-expand html-tablepopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#table_body_display_sensors-23-07774-t003"></a> </div> </div> <div class="html-table_wrap_discription"> <b>Table 3.</b> Image quality norms (RMSE—root-mean-square error; PSNR—peak signal-to-noise ratio; SSIM—structural similarity index; CC—2D correlation coefficient; DV—diagnostic value) for the testing dataset. SNR = 60 dB. The mean value of the norm, the median, and the standard deviation for the elements of the testing dataset. </div> </div> <div class="html-table_show mfp-hide " id="table_body_display_sensors-23-07774-t003"> <div class="html-caption"><b>Table 3.</b> Image quality norms (RMSE—root-mean-square error; PSNR—peak signal-to-noise ratio; SSIM—structural similarity index; CC—2D correlation coefficient; DV—diagnostic value) for the testing dataset. SNR = 60 dB. The mean value of the norm, the median, and the standard deviation for the elements of the testing dataset.</div> <table > <thead ><tr ><th rowspan='2' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >Method</th><th colspan='3' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >RMSE</th><th colspan='3' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >PSNR</th><th colspan='3' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >SSIM</th><th colspan='3' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >CC</th><th align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >DV</th></tr><tr ><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >µ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >M</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >σ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >µ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >M</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >σ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >µ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >M</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >σ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >µ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >M</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >σ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >AUC</th></tr></thead><tbody ><tr ><td align='center' valign='middle' class='html-align-center' >FCNN</td><td align='center' valign='middle' class='html-align-center' >14.61</td><td align='center' valign='middle' class='html-align-center' >14.49</td><td align='center' valign='middle' class='html-align-center' >5.49</td><td align='center' valign='middle' class='html-align-center' >20.37</td><td align='center' valign='middle' class='html-align-center' >20.55</td><td align='center' valign='middle' class='html-align-center' >2.92</td><td align='center' valign='middle' class='html-align-center' >0.78</td><td align='center' valign='middle' class='html-align-center' >0.79</td><td align='center' valign='middle' class='html-align-center' >0.05</td><td align='center' valign='middle' class='html-align-center' >0.96</td><td align='center' valign='middle' class='html-align-center' >0.97</td><td align='center' valign='middle' class='html-align-center' >0.05</td><td align='center' valign='middle' class='html-align-center' >0.99</td></tr><tr ><td align='center' valign='middle' class='html-align-center' >cGAN</td><td align='center' valign='middle' class='html-align-center' >8.86</td><td align='center' valign='middle' class='html-align-center' >8.17</td><td align='center' valign='middle' class='html-align-center' >4.13</td><td align='center' valign='middle' class='html-align-center' >27.06</td><td align='center' valign='middle' class='html-align-center' >27.39</td><td align='center' valign='middle' class='html-align-center' >3.07</td><td align='center' valign='middle' class='html-align-center' >0.87</td><td align='center' valign='middle' class='html-align-center' >0.87</td><td align='center' valign='middle' class='html-align-center' >0.03</td><td align='center' valign='middle' class='html-align-center' >0.98</td><td align='center' valign='middle' class='html-align-center' >0.99</td><td align='center' valign='middle' class='html-align-center' >0.03</td><td align='center' valign='middle' class='html-align-center' >0.99</td></tr><tr ><td align='center' valign='middle' class='html-align-center' >LBP</td><td align='center' valign='middle' class='html-align-center' >54.37</td><td align='center' valign='middle' class='html-align-center' >53.94</td><td align='center' valign='middle' class='html-align-center' >14.4</td><td align='center' valign='middle' class='html-align-center' >12.45</td><td align='center' valign='middle' class='html-align-center' >12.52</td><td align='center' valign='middle' class='html-align-center' >1.23</td><td align='center' valign='middle' class='html-align-center' >0.11</td><td align='center' valign='middle' class='html-align-center' >0.12</td><td align='center' valign='middle' class='html-align-center' >0.02</td><td align='center' valign='middle' class='html-align-center' >0.68</td><td align='center' valign='middle' class='html-align-center' >0.68</td><td align='center' valign='middle' class='html-align-center' >0.06</td><td align='center' valign='middle' class='html-align-center' >0.82</td></tr><tr ><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >TPINV</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >57.22</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >57.48</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >18.16</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >12.93</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >12.73</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >1.97</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.19</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.19</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.02</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.62</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.63</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.1</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.77</td></tr></tbody> </table> </div> <div class="html-table-wrap" id="sensors-23-07774-t004"> <div class="html-table_wrap_td"> <div class="html-tablepopup html-tablepopup-link" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href='#table_body_display_sensors-23-07774-t004'> <img data-lsrc="https://pub.mdpi-res.com/img/table.png" /> <a class="html-expand html-tablepopup" data-counterslinkmanual = "https://www.mdpi.com/1424-8220/23/18/7774/display" href="#table_body_display_sensors-23-07774-t004"></a> </div> </div> <div class="html-table_wrap_discription"> <b>Table 4.</b> Image quality norms (RMSE—root-mean-square error; PSNR—peak signal-to-noise ratio; SSIM—structural similarity index; CC—2D correlation coefficient; DV—diagnostic value) for the testing dataset with Gaussian noise added to measurements during training and testing. The mean value of the norm, the median, and the standard deviation for the elements of the testing dataset. </div> </div> <div class="html-table_show mfp-hide " id="table_body_display_sensors-23-07774-t004"> <div class="html-caption"><b>Table 4.</b> Image quality norms (RMSE—root-mean-square error; PSNR—peak signal-to-noise ratio; SSIM—structural similarity index; CC—2D correlation coefficient; DV—diagnostic value) for the testing dataset with Gaussian noise added to measurements during training and testing. The mean value of the norm, the median, and the standard deviation for the elements of the testing dataset.</div> <table > <thead ><tr ><th rowspan='2' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >Method</th><th colspan='3' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >RMSE</th><th colspan='3' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >PSNR</th><th colspan='3' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >SSIM</th><th colspan='3' align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >CC</th><th align='center' valign='middle' style='border-top:solid thin;border-bottom:solid thin' class='html-align-center' >DV</th></tr><tr ><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >µ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >M</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >σ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >µ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >M</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >σ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >µ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >M</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >σ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >µ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >M</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >σ</th><th align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >AUC</th></tr></thead><tbody ><tr ><td colspan='14' align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >Training—no noise, Testing—30 dB SNR</td></tr><tr ><td align='center' valign='middle' class='html-align-center' >FCNN</td><td align='center' valign='middle' class='html-align-center' >14.84</td><td align='center' valign='middle' class='html-align-center' >14.76</td><td align='center' valign='middle' class='html-align-center' >5.53</td><td align='center' valign='middle' class='html-align-center' >20.44</td><td align='center' valign='middle' class='html-align-center' >20.66</td><td align='center' valign='middle' class='html-align-center' >3.02</td><td align='center' valign='middle' class='html-align-center' >0.78</td><td align='center' valign='middle' class='html-align-center' >0.79</td><td align='center' valign='middle' class='html-align-center' >0.05</td><td align='center' valign='middle' class='html-align-center' >0.96</td><td align='center' valign='middle' class='html-align-center' >0.97</td><td align='center' valign='middle' class='html-align-center' >0.05</td><td align='center' valign='middle' class='html-align-center' >0.99</td></tr><tr ><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >cGAN</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >9.45</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >8.96</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >4.37</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >26.9</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >27.17</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >2.96</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.87</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.87</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.03</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.98</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.99</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.03</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.99</td></tr><tr ><td colspan='14' align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >Training—no noise, Testing—10 dB SNR</td></tr><tr ><td align='center' valign='middle' class='html-align-center' >FCNN</td><td align='center' valign='middle' class='html-align-center' >62.51</td><td align='center' valign='middle' class='html-align-center' >61.18</td><td align='center' valign='middle' class='html-align-center' >14.17</td><td align='center' valign='middle' class='html-align-center' >11.2</td><td align='center' valign='middle' class='html-align-center' >11.22</td><td align='center' valign='middle' class='html-align-center' >1.83</td><td align='center' valign='middle' class='html-align-center' >0.48</td><td align='center' valign='middle' class='html-align-center' >0.48</td><td align='center' valign='middle' class='html-align-center' >0.05</td><td align='center' valign='middle' class='html-align-center' >0.68</td><td align='center' valign='middle' class='html-align-center' >0.7</td><td align='center' valign='middle' class='html-align-center' >0.13</td><td align='center' valign='middle' class='html-align-center' >0.79</td></tr><tr ><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >cGAN</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >18.00</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >17.36</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >7.37</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >21.67</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >22.01</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >3.67</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.81</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.82</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.04</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.95</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.96</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.06</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.97</td></tr><tr ><td colspan='14' align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >Training—30 dB SNR, Testing—30 dB SNR</td></tr><tr ><td align='center' valign='middle' class='html-align-center' >FCNN</td><td align='center' valign='middle' class='html-align-center' >15.01</td><td align='center' valign='middle' class='html-align-center' >14.92</td><td align='center' valign='middle' class='html-align-center' >5.52</td><td align='center' valign='middle' class='html-align-center' >20.28</td><td align='center' valign='middle' class='html-align-center' >20.5</td><td align='center' valign='middle' class='html-align-center' >2.97</td><td align='center' valign='middle' class='html-align-center' >0.77</td><td align='center' valign='middle' class='html-align-center' >0.79</td><td align='center' valign='middle' class='html-align-center' >0.05</td><td align='center' valign='middle' class='html-align-center' >0.96</td><td align='center' valign='middle' class='html-align-center' >0.97</td><td align='center' valign='middle' class='html-align-center' >0.05</td><td align='center' valign='middle' class='html-align-center' >0.99</td></tr><tr ><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >cGAN</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >9.58</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >9.07</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >4.43</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >26.8</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >27.06</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >2.98</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.87</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.87</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.03</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.98</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.99</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.03</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.99</td></tr><tr ><td colspan='14' align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >Training—30 dB SNR, Testing—10 dB SNR</td></tr><tr ><td align='center' valign='middle' class='html-align-center' >FCNN</td><td align='center' valign='middle' class='html-align-center' >27.55</td><td align='center' valign='middle' class='html-align-center' >27.13</td><td align='center' valign='middle' class='html-align-center' >6.61</td><td align='center' valign='middle' class='html-align-center' >14.96</td><td align='center' valign='middle' class='html-align-center' >15.12</td><td align='center' valign='middle' class='html-align-center' >2.38</td><td align='center' valign='middle' class='html-align-center' >0.64</td><td align='center' valign='middle' class='html-align-center' >0.65</td><td align='center' valign='middle' class='html-align-center' >0.06</td><td align='center' valign='middle' class='html-align-center' >0.89</td><td align='center' valign='middle' class='html-align-center' >0.91</td><td align='center' valign='middle' class='html-align-center' >0.08</td><td align='center' valign='middle' class='html-align-center' >0.94</td></tr><tr ><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >cGAN</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >17.54</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >16.86</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >7.29</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >22.03</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >22.41</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >3.74</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.82</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.82</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.04</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.95</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.97</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.06</td><td align='center' valign='middle' style='border-bottom:solid thin' class='html-align-center' >0.97</td></tr></tbody> </table> </div> </section><section class='html-fn_group'><table><tr id=''><td></td><td><div class='html-p'><b>Disclaimer/Publisher’s Note:</b> The statements, opinions and data contained in all publications are solely those of the individual author(s) and contributor(s) and not of MDPI and/or the editor(s). MDPI and/or the editor(s) disclaim responsibility for any injury to people or property resulting from any ideas, methods, instructions or products referred to in the content.</div></td></tr></table></section> <section id="html-copyright"><br>© 2023 by the authors. Licensee MDPI, Basel, Switzerland. This article is an open access article distributed under the terms and conditions of the Creative Commons Attribution (CC BY) license (<a href='https://creativecommons.org/licenses/by/4.0/' target='_blank' rel="noopener noreferrer" >https://creativecommons.org/licenses/by/4.0/</a>).</section> </div> </div> <div class="additional-content"> <h2><a name="cite"></a>Share and Cite</h2> <div class="social-media-links" style="text-align: left;"> <a href="/cdn-cgi/l/email-protection#635c45020e1358101601090600175e25110c0e4651532e27332a4650224651534651512a0e0204064651533106000c0d1017111600170a0c0d46515336100a0d044651533016130611150a1006074651532f0602110d0a0d044651530a0d4651533406021102010f06465153260f060017110a00020f4651532a0e130607020d0006465153370c0e0c041102130b1a4651530c05465153170b06465153370b0c11021b4512160c175845020e1358010c071a5e0b17171310594c4c1414144d0e07130a4d000c0e4c515754515b52514650224653224653222a0e0204064651533106000c0d1017111600170a0c0d46515336100a0d044651533016130611150a1006074651532f0602110d0a0d044651530a0d4651533406021102010f06465153260f060017110a00020f4651532a0e130607020d0006465153370c0e0c041102130b1a4651530c05465153170b06465153370b0c11021b4653224653222201101711020017465022465153260f060017110a00020f4651530a0e130607020d0006465153170c0e0c041102130b1a46515346515b262a3746515a4651530a10465153024651530d0c0d4e0a0d1502100a15064651531706000b0d0a121606465153050c11465153150a1016020f0a190a0d04465153170b064651530a0d1706110d020f4651531017111600171611064651530c05465153024651530b160e020d465153010c071a4d46515320021302000a170a15060f1a465153000c16130f0607465153060f060017110a00020f4651530a0e130607020d0006465153170c0e0c041102130b1a46515346515b2020262a3746515a4651530a10465153024651530d0614465153000c0d170200170f061010465153262a374651531706000b0d0a121606465153170b021746515300020d465153130c17060d170a020f0f1a4651530106465153161006074651530210465153024651531406021102010f064651530706150a00064d465153310600060d17465153101716070a06104651530b021506465153100b0c140d465153170b0217465153024651530e02000b0a0d064651530f0602110d0a0d044e0102100607465153021313110c02000b4651530a104651531506111a46515313110c0e0a100a0d04465153050c11465153262a374651530a0e0204064651531106000c0d1017111600170a0c0d4d4651532e0c10174651530c05465153170b06465153101716070a0610465153000c0d0006110d4651530e0c07060f10465153000c0d17020a0d0a0d044651531613465153170c4651535151465153060f060017110c070610465153020d07465153050c0016104651530c0d46515316100a0d04465153070a05050611060d174651530211170a050a000a020f4651530d061611020f4651530d0617140c11084651530e0c07060f1046512046515305110c0e465153100a0e130f06465153100b020f0f0c144651530d0617140c110810465153170c465153000c0e130f061b465153000c0d150c0f16170a0c0d020f4651530d0617140c1108104d4651532b0c1406150611465120465153170b064651531610064651530c05465153000c0d150c0f16170a0c0d020f4651530d0617140c1108104651530a0d4651530a0e0204064651531106000c0d1017111600170a0c0d465153140a170b465153024651530b0a040b06114651530d160e0106114651530c05465153060f060017110c070610465153110612160a110610465153051611170b06114651530a0d150610170a0402170a0c0d4d4651532a0d465153170b0a10465153140c110846512046515317140c465153070a05050611060d174651530211000b0a17060017161106104651530c054651530211170a050a000a020f4651530d0617140c1108104651531406110646515316100607465153050c114651532020262a374651530a0e0204064651531106000c0d1017111600170a0c0d4650224651530246515305160f0f1a465153000c0d0d0600170607465153070606134651530d061611020f4651530d0617140c1108465153020d0746515302465153000c0d070a170a0c0d020f46515304060d061102170a150646515302071506111002110a020f4651530d0617140c110846515346515b0024222d46515a4d465153370b064651531711020a0d0a0d044651530702170210061746515314021046515304060d061102170607465153011a465153170b064651530d160e06110a00020f465153100a0e160f02170a0c0d4651530c0546515302465153170b0c11021b465153130b020d170c0e465153140a170b4651530b06020f170b1a465153020d074651530a0f0f0d0610104e02050506001706074651530f160d04104d465153370b110606465153080a0d07104651530c054651530a0f0f0d0610100610465120465153130d06160e0c170b0c11021b465120465153130f061611020f46515306050516100a0c0d465120465153020d074651530b1a07110c130d06160e0c170b0c11021b465120465153140611064651530e0c07060f060746515316100a0d04465153170b06465153060f060017110a00020f46515313110c130611170a06104651530c05465153170b06465153170a10101606104d465153370b06465153170b0c11021b465153130b020d170c0e4651530a0d000f16070607465153170b064651530b06021117465120384d4d4d3e" title="Email"> <i class="fa fa-envelope-square" style="font-size: 30px;"></i> </a> <a href="https://twitter.com/intent/tweet?text=Image+Reconstruction+Using+Supervised+Learning+in+Wearable+Electrical+Impedance+Tomography+of+the+Thorax&amp;hashtags=mdpisensors&amp;url=https%3A%2F%2Fwww.mdpi.com%2F2472812&amp;via=Sensors_MDPI" onclick="windowOpen(this.href,600,800); return false" target="_blank" rel="noopener noreferrer"> <i class="fa fa-twitter-x-square" style="font-size: 30px;"></i> </a> <a href=" http://www.linkedin.com/shareArticle?mini=true&amp;url=https%3A%2F%2Fwww.mdpi.com%2F2472812&amp;title=Image%20Reconstruction%20Using%20Supervised%20Learning%20in%20Wearable%20Electrical%20Impedance%20Tomography%20of%20the%20Thorax%26source%3Dhttps%3A%2F%2Fwww.mdpi.com%26summary%3DElectrical%20impedance%20tomography%20%28EIT%29%20is%20a%20non-invasive%20technique%20for%20visualizing%20the%20internal%20structure%20of%20a%20human%20body.%20Capacitively%20coupled%20electrical%20impedance%20tomography%20%28CCEIT%29%20is%20a%20new%20contactless%20EIT%20technique%20that%20can%20potentially%20be%20used%20as%20%5B...%5D" onclick="windowOpen(this.href,600,800); return false" title="LinkedIn" target="_blank" rel="noopener noreferrer"> <i class="fa fa-linkedin-square" style="font-size: 30px;"></i> </a> <a href="https://www.facebook.com/sharer.php?u=https://www.mdpi.com/2472812" title="facebook" target="_blank" rel="noopener noreferrer"> <i class="fa fa-facebook-square" style="font-size: 30px;"></i> </a> <a href="javascript:void(0);" title="Wechat" data-reveal-id="weixin-share-modal"> <i class="fa fa-weixin-square" style="font-size: 26px;"></i> </a> <a href="http://www.reddit.com/submit?url=https://www.mdpi.com/2472812" title="Reddit" target="_blank" rel="noopener noreferrer"> <i class="fa fa-reddit-square" style="font-size: 30px;"></i> </a> <a href="http://www.mendeley.com/import/?url=https://www.mdpi.com/2472812" title="Mendeley" target="_blank" rel="noopener noreferrer"> <i class="fa fa-mendeley-square" style="font-size: 30px;"></i> </a> </div> <div class="in-tab" style="padding-top: 0px!important; margin-top: 15px;"> <div><b>MDPI and ACS Style</b></div> <p> Ivanenko, M.; Smolik, W.T.; Wanta, D.; Midura, M.; Wróblewski, P.; Hou, X.; Yan, X. Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax. <em>Sensors</em> <b>2023</b>, <em>23</em>, 7774. https://doi.org/10.3390/s23187774 </p> <div style="display: block"> <b>AMA Style</b><br> <p> Ivanenko M, Smolik WT, Wanta D, Midura M, Wróblewski P, Hou X, Yan X. Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax. <em>Sensors</em>. 2023; 23(18):7774. https://doi.org/10.3390/s23187774 </p> <b>Chicago/Turabian Style</b><br> <p> Ivanenko, Mikhail, Waldemar T. Smolik, Damian Wanta, Mateusz Midura, Przemysław Wróblewski, Xiaohan Hou, and Xiaoheng Yan. 2023. "Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax" <em>Sensors</em> 23, no. 18: 7774. https://doi.org/10.3390/s23187774 </p> <b>APA Style</b><br> <p> Ivanenko, M., Smolik, W. T., Wanta, D., Midura, M., Wróblewski, P., Hou, X., & Yan, X. (2023). Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax. <em>Sensors</em>, <em>23</em>(18), 7774. https://doi.org/10.3390/s23187774 </p> </div> </div> <div class="info-box no-margin"> Note that from the first issue of 2016, this journal uses article numbers instead of page numbers. See further details <a target="_blank" href="https://www.mdpi.com/about/announcements/784">here</a>. </div> <h2><a name="metrics"></a>Article Metrics</h2> <div class="row"> <div class="small-12 columns"> <div id="loaded_cite_count" style="display:none">No</div> <div id="framed_div_cited_count" class="in-tab" style="display: none; overflow: auto;"></div> <div id="loaded" style="display:none">No</div> <div id="framed_div" class="in-tab" style="display: none; margin-top: 10px;"></div> </div> <div class="small-12 columns"> <div id="article_stats_div" style="display: none; margin-bottom: 1em;"> <h3>Article Access Statistics</h3> <div id="article_stats_swf" ></div> For more information on the journal statistics, click <a href="/journal/sensors/stats">here</a>. <div class="info-box"> Multiple requests from the same IP address are counted as one view. </div> </div> </div> </div> </div> </div> </article> </div> </div></div> <div class="webpymol-controls webpymol-controls-template" style="margin-top: 10px; display: none;"> <a class="bzoom">Zoom</a> <span style="display: inline-block; margin-left: 5px; margin-right: 5px;">|</span> <a class="borient"> Orient </a> <span style="display: inline-block; margin-left: 5px; margin-right: 5px;">|</span> <a class="blines"> As Lines </a> <span style="display: inline-block; margin-left: 5px; margin-right: 5px;">|</span> <a class="bsticks"> As Sticks </a> <span style="display: inline-block; margin-left: 5px; margin-right: 5px;">|</span> <a class="bcartoon"> As Cartoon </a> <span style="display: inline-block; margin-left: 5px; margin-right: 5px;">|</span> <a class="bsurface"> As Surface </a> <span style="display: inline-block; margin-left: 5px; margin-right: 5px;">|</span> <a class="bprevscene">Previous Scene</a> <span style="display: inline-block; margin-left: 5px; margin-right: 5px;">|</span> <a class="bnextscene">Next Scene</a> </div> <div id="scifeed-modal" class="reveal-modal reveal-modal-new" data-reveal aria-labelledby="modalTitle" aria-hidden="true" role="dialog"> </div> <div id="recommended-articles-modal" class="reveal-modal reveal-modal-new" data-reveal aria-labelledby="modalTitle" aria-hidden="true" role="dialog"> </div> <div id="author-biographies-modal" class="reveal-modal reveal-modal-new" data-reveal aria-labelledby="modalTitle" aria-hidden="true" role="dialog"> </div> <div id="cite-modal" class="reveal-modal reveal-modal-new" data-reveal aria-labelledby="Captcha" aria-hidden="true" role="dialog"> <div class="row"> <div class="small-12 columns"> <h2 style="margin: 0;">Cite</h2> </div> <div class="small-12 columns"> <!-- BibTeX --> <form style="margin:0; padding:0; display:inline;" name="export-bibtex" method="POST" action="/export"> <input type="hidden" name="articles_ids[]" value="1236406"> <input type="hidden" name="export_format_top" value="bibtex"> <input type="hidden" name="export_submit_top" value=""> </form> <!-- EndNote --> <form style="margin:0; padding:0; display:inline;" name="export-endnote" method="POST" action="/export"> <input type="hidden" name="articles_ids[]" value="1236406"> <input type="hidden" name="export_format_top" value="endnote_no_abstract"> <input type="hidden" name="export_submit_top" value=""> </form> <!-- RIS --> <form style="margin:0; padding:0; display:inline;" name="export-ris" method="POST" action="/export"> <input type="hidden" name="articles_ids[]" value="1236406"> <input type="hidden" name="export_format_top" value="ris"> <input type="hidden" name="export_submit_top" value=""> </form> <div> Export citation file: <a href="javascript:window.document.forms['export-bibtex'].submit()">BibTeX</a> | <a href="javascript:window.document.forms['export-endnote'].submit()">EndNote</a> | <a href="javascript:window.document.forms['export-ris'].submit()">RIS</a> </div> </div> <div class="small-12 columns"> <div class="in-tab"> <div><b>MDPI and ACS Style</b></div> <p> Ivanenko, M.; Smolik, W.T.; Wanta, D.; Midura, M.; Wróblewski, P.; Hou, X.; Yan, X. Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax. <em>Sensors</em> <b>2023</b>, <em>23</em>, 7774. https://doi.org/10.3390/s23187774 </p> <div style="display: block"> <b>AMA Style</b><br> <p> Ivanenko M, Smolik WT, Wanta D, Midura M, Wróblewski P, Hou X, Yan X. Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax. <em>Sensors</em>. 2023; 23(18):7774. https://doi.org/10.3390/s23187774 </p> <b>Chicago/Turabian Style</b><br> <p> Ivanenko, Mikhail, Waldemar T. Smolik, Damian Wanta, Mateusz Midura, Przemysław Wróblewski, Xiaohan Hou, and Xiaoheng Yan. 2023. "Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax" <em>Sensors</em> 23, no. 18: 7774. https://doi.org/10.3390/s23187774 </p> <b>APA Style</b><br> <p> Ivanenko, M., Smolik, W. T., Wanta, D., Midura, M., Wróblewski, P., Hou, X., & Yan, X. (2023). Image Reconstruction Using Supervised Learning in Wearable Electrical Impedance Tomography of the Thorax. <em>Sensors</em>, <em>23</em>(18), 7774. https://doi.org/10.3390/s23187774 </p> </div> </div> <div class="info-box no-margin"> Note that from the first issue of 2016, this journal uses article numbers instead of page numbers. See further details <a target="_blank" href="https://www.mdpi.com/about/announcements/784">here</a>. </div> </div> </div> <a class="close-reveal-modal" aria-label="Close"> <i class="material-icons">clear</i> </a> </div> </div> </div> </div> </div> </section> <div id="footer"> <div class="journal-info"> <span> <em><a class="Var_JournalInfo" href="/journal/sensors">Sensors</a></em>, EISSN 1424-8220, Published by MDPI </span> <div class="large-right"> <span> <a href="/rss/journal/sensors" class="rss-link">RSS</a> </span> <span> <a href="/journal/sensors/toc-alert">Content Alert</a> </span> </div> </div> <div class="row full-width footer-links" data-equalizer="footer" data-equalizer-mq="small"> <div class="large-2 large-push-4 medium-3 small-6 columns" data-equalizer-watch="footer"> <h3> Further Information </h3> <a href="/apc"> Article Processing Charges </a> <a href="/about/payment"> Pay an Invoice </a> <a href="/openaccess"> Open Access Policy </a> <a href="/about/contact"> Contact MDPI </a> <a href="https://careers.mdpi.com" target="_blank" rel="noopener noreferrer"> Jobs at MDPI </a> </div> <div class="large-2 large-push-4 medium-3 small-6 columns" data-equalizer-watch="footer"> <h3> Guidelines </h3> <a href="/authors"> For Authors </a> <a href="/reviewers"> For Reviewers </a> <a href="/editors"> For Editors </a> <a href="/librarians"> For Librarians </a> <a href="/publishing_services"> For Publishers </a> <a href="/societies"> For Societies </a> <a href="/conference_organizers"> For Conference Organizers </a> </div> <div class="large-2 large-push-4 medium-3 small-6 columns"> <h3> MDPI Initiatives </h3> <a href="https://sciforum.net" target="_blank" rel="noopener noreferrer"> Sciforum </a> <a href="https://www.mdpi.com/books" target="_blank" rel="noopener noreferrer"> MDPI Books </a> <a href="https://www.preprints.org" target="_blank" rel="noopener noreferrer"> Preprints.org </a> <a href="https://www.scilit.net" target="_blank" rel="noopener noreferrer"> Scilit </a> <a href="https://sciprofiles.com?utm_source=mpdi.com&utm_medium=bottom_menu&utm_campaign=initiative" target="_blank" rel="noopener noreferrer"> SciProfiles </a> <a href="https://encyclopedia.pub" target="_blank" rel="noopener noreferrer"> Encyclopedia </a> <a href="https://jams.pub" target="_blank" rel="noopener noreferrer"> JAMS </a> <a href="/about/proceedings"> Proceedings Series </a> </div> <div class="large-2 large-push-4 medium-3 small-6 right-border-large-without columns UA_FooterFollowMDPI"> <h3> Follow MDPI </h3> <a href="https://www.linkedin.com/company/mdpi" target="_blank" rel="noopener noreferrer"> LinkedIn </a> <a href="https://www.facebook.com/MDPIOpenAccessPublishing" target="_blank" rel="noopener noreferrer"> Facebook </a> <a href="https://twitter.com/MDPIOpenAccess" target="_blank" rel="noopener noreferrer"> Twitter </a> </div> <div id="footer-subscribe" class="large-4 large-pull-8 medium-12 small-12 left-border-large columns"> <div class="footer-subscribe__container"> <img class="show-for-large-up" src="https://pub.mdpi-res.com/img/design/mdpi-pub-logo-white-small.png?71d18e5f805839ab?1732286508" alt="MDPI" title="MDPI Open Access Journals" style="height: 50px; margin-bottom: 10px;"> <form id="newsletter" method="POST" action="/subscribe"> <p> Subscribe to receive issue release notifications and newsletters from MDPI journals </p> <select multiple id="newsletter-journal" class="foundation-select" name="journals[]"> <option value="acoustics">Acoustics</option> <option value="amh">Acta Microbiologica Hellenica</option> <option value="actuators">Actuators</option> <option value="admsci">Administrative Sciences</option> <option value="adolescents">Adolescents</option> <option value="arm">Advances in Respiratory Medicine</option> <option value="aerobiology">Aerobiology</option> <option value="aerospace">Aerospace</option> <option value="agriculture">Agriculture</option> <option value="agriengineering">AgriEngineering</option> <option value="agrochemicals">Agrochemicals</option> <option value="agronomy">Agronomy</option> <option value="ai">AI</option> <option value="air">Air</option> <option value="algorithms">Algorithms</option> <option value="allergies">Allergies</option> <option value="alloys">Alloys</option> <option value="analytica">Analytica</option> <option value="analytics">Analytics</option> <option value="anatomia">Anatomia</option> <option value="anesthres">Anesthesia Research</option> <option value="animals">Animals</option> <option value="antibiotics">Antibiotics</option> <option value="antibodies">Antibodies</option> <option value="antioxidants">Antioxidants</option> <option value="applbiosci">Applied Biosciences</option> <option value="applmech">Applied Mechanics</option> <option value="applmicrobiol">Applied Microbiology</option> <option value="applnano">Applied Nano</option> <option value="applsci">Applied Sciences</option> <option value="asi">Applied System Innovation</option> <option value="appliedchem">AppliedChem</option> <option value="appliedmath">AppliedMath</option> <option value="aquacj">Aquaculture Journal</option> <option value="architecture">Architecture</option> <option value="arthropoda">Arthropoda</option> <option value="arts">Arts</option> <option value="astronomy">Astronomy</option> <option value="atmosphere">Atmosphere</option> <option value="atoms">Atoms</option> <option value="audiolres">Audiology Research</option> <option value="automation">Automation</option> <option value="axioms">Axioms</option> <option value="bacteria">Bacteria</option> <option value="batteries">Batteries</option> <option value="behavsci">Behavioral Sciences</option> <option value="beverages">Beverages</option> <option value="BDCC">Big Data and Cognitive Computing</option> <option value="biochem">BioChem</option> <option value="bioengineering">Bioengineering</option> <option value="biologics">Biologics</option> <option value="biology">Biology</option> <option value="blsf">Biology and Life Sciences Forum</option> <option value="biomass">Biomass</option> <option value="biomechanics">Biomechanics</option> <option value="biomed">BioMed</option> <option value="biomedicines">Biomedicines</option> <option value="biomedinformatics">BioMedInformatics</option> <option value="biomimetics">Biomimetics</option> <option value="biomolecules">Biomolecules</option> <option value="biophysica">Biophysica</option> <option value="biosensors">Biosensors</option> <option value="biotech">BioTech</option> <option value="birds">Birds</option> <option value="blockchains">Blockchains</option> <option value="brainsci">Brain Sciences</option> <option value="buildings">Buildings</option> <option value="businesses">Businesses</option> <option value="carbon">C</option> <option value="cancers">Cancers</option> <option value="cardiogenetics">Cardiogenetics</option> <option value="catalysts">Catalysts</option> <option value="cells">Cells</option> <option value="ceramics">Ceramics</option> <option value="challenges">Challenges</option> <option value="ChemEngineering">ChemEngineering</option> <option value="chemistry">Chemistry</option> <option value="chemproc">Chemistry Proceedings</option> <option value="chemosensors">Chemosensors</option> <option value="children">Children</option> <option value="chips">Chips</option> <option value="civileng">CivilEng</option> <option value="cleantechnol">Clean Technologies</option> <option value="climate">Climate</option> <option value="ctn">Clinical and Translational Neuroscience</option> <option value="clinbioenerg">Clinical Bioenergetics</option> <option value="clinpract">Clinics and Practice</option> <option value="clockssleep">Clocks &amp; Sleep</option> <option value="coasts">Coasts</option> <option value="coatings">Coatings</option> <option value="colloids">Colloids and Interfaces</option> <option value="colorants">Colorants</option> <option value="commodities">Commodities</option> <option value="complications">Complications</option> <option value="compounds">Compounds</option> <option value="computation">Computation</option> <option value="csmf">Computer Sciences &amp; Mathematics Forum</option> <option value="computers">Computers</option> <option value="condensedmatter">Condensed Matter</option> <option value="conservation">Conservation</option> <option value="constrmater">Construction Materials</option> <option value="cmd">Corrosion and Materials Degradation</option> <option value="cosmetics">Cosmetics</option> <option value="covid">COVID</option> <option value="crops">Crops</option> <option value="cryo">Cryo</option> <option value="cryptography">Cryptography</option> <option value="crystals">Crystals</option> <option value="cimb">Current Issues in Molecular Biology</option> <option value="curroncol">Current Oncology</option> <option value="dairy">Dairy</option> <option value="data">Data</option> <option value="dentistry">Dentistry Journal</option> <option value="dermato">Dermato</option> <option value="dermatopathology">Dermatopathology</option> <option value="designs">Designs</option> <option value="diabetology">Diabetology</option> <option value="diagnostics">Diagnostics</option> <option value="dietetics">Dietetics</option> <option value="digital">Digital</option> <option value="disabilities">Disabilities</option> <option value="diseases">Diseases</option> <option value="diversity">Diversity</option> <option value="dna">DNA</option> <option value="drones">Drones</option> <option value="ddc">Drugs and Drug Candidates</option> <option value="dynamics">Dynamics</option> <option value="earth">Earth</option> <option value="ecologies">Ecologies</option> <option value="econometrics">Econometrics</option> <option value="economies">Economies</option> <option value="education">Education Sciences</option> <option value="electricity">Electricity</option> <option value="electrochem">Electrochem</option> <option value="electronicmat">Electronic Materials</option> <option value="electronics">Electronics</option> <option value="ecm">Emergency Care and Medicine</option> <option value="encyclopedia">Encyclopedia</option> <option value="endocrines">Endocrines</option> <option value="energies">Energies</option> <option value="esa">Energy Storage and Applications</option> <option value="eng">Eng</option> <option value="engproc">Engineering Proceedings</option> <option value="entropy">Entropy</option> <option value="environsciproc">Environmental Sciences Proceedings</option> <option value="environments">Environments</option> <option value="epidemiologia">Epidemiologia</option> <option value="epigenomes">Epigenomes</option> <option value="ebj">European Burn Journal</option> <option value="ejihpe">European Journal of Investigation in Health, Psychology and Education</option> <option value="fermentation">Fermentation</option> <option value="fibers">Fibers</option> <option value="fintech">FinTech</option> <option value="fire">Fire</option> <option value="fishes">Fishes</option> <option value="fluids">Fluids</option> <option value="foods">Foods</option> <option value="forecasting">Forecasting</option> <option value="forensicsci">Forensic Sciences</option> <option value="forests">Forests</option> <option value="fossstud">Fossil Studies</option> <option value="foundations">Foundations</option> <option value="fractalfract">Fractal and Fractional</option> <option value="fuels">Fuels</option> <option value="future">Future</option> <option value="futureinternet">Future Internet</option> <option value="futurepharmacol">Future Pharmacology</option> <option value="futuretransp">Future Transportation</option> <option value="galaxies">Galaxies</option> <option value="games">Games</option> <option value="gases">Gases</option> <option value="gastroent">Gastroenterology Insights</option> <option value="gastrointestdisord">Gastrointestinal Disorders</option> <option value="gastronomy">Gastronomy</option> <option value="gels">Gels</option> <option value="genealogy">Genealogy</option> <option value="genes">Genes</option> <option value="geographies">Geographies</option> <option value="geohazards">GeoHazards</option> <option value="geomatics">Geomatics</option> <option value="geometry">Geometry</option> <option value="geosciences">Geosciences</option> <option value="geotechnics">Geotechnics</option> <option value="geriatrics">Geriatrics</option> <option value="glacies">Glacies</option> <option value="gucdd">Gout, Urate, and Crystal Deposition Disease</option> <option value="grasses">Grasses</option> <option value="hardware">Hardware</option> <option value="healthcare">Healthcare</option> <option value="hearts">Hearts</option> <option value="hemato">Hemato</option> <option value="hematolrep">Hematology Reports</option> <option value="heritage">Heritage</option> <option value="histories">Histories</option> <option value="horticulturae">Horticulturae</option> <option value="hospitals">Hospitals</option> <option value="humanities">Humanities</option> <option value="humans">Humans</option> <option value="hydrobiology">Hydrobiology</option> <option value="hydrogen">Hydrogen</option> <option value="hydrology">Hydrology</option> <option value="hygiene">Hygiene</option> <option value="immuno">Immuno</option> <option value="idr">Infectious Disease Reports</option> <option value="informatics">Informatics</option> <option value="information">Information</option> <option value="infrastructures">Infrastructures</option> <option value="inorganics">Inorganics</option> <option value="insects">Insects</option> <option value="instruments">Instruments</option> <option value="iic">Intelligent Infrastructure and Construction</option> <option value="ijerph">International Journal of Environmental Research and Public Health</option> <option value="ijfs">International Journal of Financial Studies</option> <option value="ijms">International Journal of Molecular Sciences</option> <option value="IJNS">International Journal of Neonatal Screening</option> <option value="ijpb">International Journal of Plant Biology</option> <option value="ijt">International Journal of Topology</option> <option value="ijtm">International Journal of Translational Medicine</option> <option value="ijtpp">International Journal of Turbomachinery, Propulsion and Power</option> <option value="ime">International Medical Education</option> <option value="inventions">Inventions</option> <option value="IoT">IoT</option> <option value="ijgi">ISPRS International Journal of Geo-Information</option> <option value="J">J</option> <option value="jal">Journal of Ageing and Longevity</option> <option value="jcdd">Journal of Cardiovascular Development and Disease</option> <option value="jcto">Journal of Clinical &amp; Translational Ophthalmology</option> <option value="jcm">Journal of Clinical Medicine</option> <option value="jcs">Journal of Composites Science</option> <option value="jcp">Journal of Cybersecurity and Privacy</option> <option value="jdad">Journal of Dementia and Alzheimer&#039;s Disease</option> <option value="jdb">Journal of Developmental Biology</option> <option value="jeta">Journal of Experimental and Theoretical Analyses</option> <option value="jfb">Journal of Functional Biomaterials</option> <option value="jfmk">Journal of Functional Morphology and Kinesiology</option> <option value="jof">Journal of Fungi</option> <option value="jimaging">Journal of Imaging</option> <option value="jintelligence">Journal of Intelligence</option> <option value="jlpea">Journal of Low Power Electronics and Applications</option> <option value="jmmp">Journal of Manufacturing and Materials Processing</option> <option value="jmse">Journal of Marine Science and Engineering</option> <option value="jmahp">Journal of Market Access &amp; Health Policy</option> <option value="jmp">Journal of Molecular Pathology</option> <option value="jnt">Journal of Nanotheranostics</option> <option value="jne">Journal of Nuclear Engineering</option> <option value="ohbm">Journal of Otorhinolaryngology, Hearing and Balance Medicine</option> <option value="jop">Journal of Parks</option> <option value="jpm">Journal of Personalized Medicine</option> <option value="jpbi">Journal of Pharmaceutical and BioTech Industry</option> <option value="jor">Journal of Respiration</option> <option value="jrfm">Journal of Risk and Financial Management</option> <option value="jsan">Journal of Sensor and Actuator Networks</option> <option value="joma">Journal of the Oman Medical Association</option> <option value="jtaer">Journal of Theoretical and Applied Electronic Commerce Research</option> <option value="jvd">Journal of Vascular Diseases</option> <option value="jox">Journal of Xenobiotics</option> <option value="jzbg">Journal of Zoological and Botanical Gardens</option> <option value="journalmedia">Journalism and Media</option> <option value="kidneydial">Kidney and Dialysis</option> <option value="kinasesphosphatases">Kinases and Phosphatases</option> <option value="knowledge">Knowledge</option> <option value="labmed">LabMed</option> <option value="laboratories">Laboratories</option> <option value="land">Land</option> <option value="languages">Languages</option> <option value="laws">Laws</option> <option value="life">Life</option> <option value="limnolrev">Limnological Review</option> <option value="lipidology">Lipidology</option> <option value="liquids">Liquids</option> <option value="literature">Literature</option> <option value="livers">Livers</option> <option value="logics">Logics</option> <option value="logistics">Logistics</option> <option value="lubricants">Lubricants</option> <option value="lymphatics">Lymphatics</option> <option value="make">Machine Learning and Knowledge Extraction</option> <option value="machines">Machines</option> <option value="macromol">Macromol</option> <option value="magnetism">Magnetism</option> <option value="magnetochemistry">Magnetochemistry</option> <option value="marinedrugs">Marine Drugs</option> <option value="materials">Materials</option> <option value="materproc">Materials Proceedings</option> <option value="mca">Mathematical and Computational Applications</option> <option value="mathematics">Mathematics</option> <option value="medsci">Medical Sciences</option> <option value="msf">Medical Sciences Forum</option> <option value="medicina">Medicina</option> <option value="medicines">Medicines</option> <option value="membranes">Membranes</option> <option value="merits">Merits</option> <option value="metabolites">Metabolites</option> <option value="metals">Metals</option> <option value="meteorology">Meteorology</option> <option value="methane">Methane</option> <option value="mps">Methods and Protocols</option> <option value="metrics">Metrics</option> <option value="metrology">Metrology</option> <option value="micro">Micro</option> <option value="microbiolres">Microbiology Research</option> <option value="micromachines">Micromachines</option> <option value="microorganisms">Microorganisms</option> <option value="microplastics">Microplastics</option> <option value="minerals">Minerals</option> <option value="mining">Mining</option> <option value="modelling">Modelling</option> <option value="mmphys">Modern Mathematical Physics</option> <option value="molbank">Molbank</option> <option value="molecules">Molecules</option> <option value="mti">Multimodal Technologies and Interaction</option> <option value="muscles">Muscles</option> <option value="nanoenergyadv">Nanoenergy Advances</option> <option value="nanomanufacturing">Nanomanufacturing</option> <option value="nanomaterials">Nanomaterials</option> <option value="ndt">NDT</option> <option value="network">Network</option> <option value="neuroglia">Neuroglia</option> <option value="neurolint">Neurology International</option> <option value="neurosci">NeuroSci</option> <option value="nitrogen">Nitrogen</option> <option value="ncrna">Non-Coding RNA</option> <option value="nursrep">Nursing Reports</option> <option value="nutraceuticals">Nutraceuticals</option> <option value="nutrients">Nutrients</option> <option value="obesities">Obesities</option> <option value="oceans">Oceans</option> <option value="onco">Onco</option> <option value="optics">Optics</option> <option value="oral">Oral</option> <option value="organics">Organics</option> <option value="organoids">Organoids</option> <option value="osteology">Osteology</option> <option value="oxygen">Oxygen</option> <option value="parasitologia">Parasitologia</option> <option value="particles">Particles</option> <option value="pathogens">Pathogens</option> <option value="pathophysiology">Pathophysiology</option> <option value="pediatrrep">Pediatric Reports</option> <option value="pets">Pets</option> <option value="pharmaceuticals">Pharmaceuticals</option> <option value="pharmaceutics">Pharmaceutics</option> <option value="pharmacoepidemiology">Pharmacoepidemiology</option> <option value="pharmacy">Pharmacy</option> <option value="philosophies">Philosophies</option> <option value="photochem">Photochem</option> <option value="photonics">Photonics</option> <option value="phycology">Phycology</option> <option value="physchem">Physchem</option> <option value="psf">Physical Sciences Forum</option> <option value="physics">Physics</option> <option value="physiologia">Physiologia</option> <option value="plants">Plants</option> <option value="plasma">Plasma</option> <option value="platforms">Platforms</option> <option value="pollutants">Pollutants</option> <option value="polymers">Polymers</option> <option value="polysaccharides">Polysaccharides</option> <option value="populations">Populations</option> <option value="poultry">Poultry</option> <option value="powders">Powders</option> <option value="proceedings">Proceedings</option> <option value="processes">Processes</option> <option value="prosthesis">Prosthesis</option> <option value="proteomes">Proteomes</option> <option value="psychiatryint">Psychiatry International</option> <option value="psychoactives">Psychoactives</option> <option value="psycholint">Psychology International</option> <option value="publications">Publications</option> <option value="qubs">Quantum Beam Science</option> <option value="quantumrep">Quantum Reports</option> <option value="quaternary">Quaternary</option> <option value="radiation">Radiation</option> <option value="reactions">Reactions</option> <option value="realestate">Real Estate</option> <option value="receptors">Receptors</option> <option value="recycling">Recycling</option> <option value="rsee">Regional Science and Environmental Economics</option> <option value="religions">Religions</option> <option value="remotesensing">Remote Sensing</option> <option value="reports">Reports</option> <option value="reprodmed">Reproductive Medicine</option> <option value="resources">Resources</option> <option value="rheumato">Rheumato</option> <option value="risks">Risks</option> <option value="robotics">Robotics</option> <option value="ruminants">Ruminants</option> <option value="safety">Safety</option> <option value="sci">Sci</option> <option value="scipharm">Scientia Pharmaceutica</option> <option value="sclerosis">Sclerosis</option> <option value="seeds">Seeds</option> <option value="sensors">Sensors</option> <option value="separations">Separations</option> <option value="sexes">Sexes</option> <option value="signals">Signals</option> <option value="sinusitis">Sinusitis</option> <option value="smartcities">Smart Cities</option> <option value="socsci">Social Sciences</option> <option value="siuj">Société Internationale d’Urologie Journal</option> <option value="societies">Societies</option> <option value="software">Software</option> <option value="soilsystems">Soil Systems</option> <option value="solar">Solar</option> <option value="solids">Solids</option> <option value="spectroscj">Spectroscopy Journal</option> <option value="sports">Sports</option> <option value="standards">Standards</option> <option value="stats">Stats</option> <option value="stresses">Stresses</option> <option value="surfaces">Surfaces</option> <option value="surgeries">Surgeries</option> <option value="std">Surgical Techniques Development</option> <option value="sustainability">Sustainability</option> <option value="suschem">Sustainable Chemistry</option> <option value="symmetry">Symmetry</option> <option value="synbio">SynBio</option> <option value="systems">Systems</option> <option value="targets">Targets</option> <option value="taxonomy">Taxonomy</option> <option value="technologies">Technologies</option> <option value="telecom">Telecom</option> <option value="textiles">Textiles</option> <option value="thalassrep">Thalassemia Reports</option> <option value="therapeutics">Therapeutics</option> <option value="thermo">Thermo</option> <option value="timespace">Time and Space</option> <option value="tomography">Tomography</option> <option value="tourismhosp">Tourism and Hospitality</option> <option value="toxics">Toxics</option> <option value="toxins">Toxins</option> <option value="transplantology">Transplantology</option> <option value="traumacare">Trauma Care</option> <option value="higheredu">Trends in Higher Education</option> <option value="tropicalmed">Tropical Medicine and Infectious Disease</option> <option value="universe">Universe</option> <option value="urbansci">Urban Science</option> <option value="uro">Uro</option> <option value="vaccines">Vaccines</option> <option value="vehicles">Vehicles</option> <option value="venereology">Venereology</option> <option value="vetsci">Veterinary Sciences</option> <option value="vibration">Vibration</option> <option value="virtualworlds">Virtual Worlds</option> <option value="viruses">Viruses</option> <option value="vision">Vision</option> <option value="waste">Waste</option> <option value="water">Water</option> <option value="wild">Wild</option> <option value="wind">Wind</option> <option value="women">Women</option> <option value="world">World</option> <option value="wevj">World Electric Vehicle Journal</option> <option value="youth">Youth</option> <option value="zoonoticdis">Zoonotic Diseases</option> </select> <input name="email" type="email" placeholder="Enter your email address..." required="required" /> <button class="genericCaptcha button button--dark UA_FooterNewsletterSubscribeButton" type="submit">Subscribe</button> </form> </div> </div> </div> <div id="footer-copyright"> <div class="row"> <div class="columns large-6 medium-6 small-12 text-left"> © 1996-2024 MDPI (Basel, Switzerland) unless otherwise stated </div> <div class="columns large-6 medium-6 small-12 small-text-left medium-text-right large-text-right"> <a data-dropdown="drop-view-disclaimer" aria-controls="drop-view-disclaimer" aria-expanded="false" data-options="align:top; is_hover:true; hover_timeout:2000;"> Disclaimer </a> <div id="drop-view-disclaimer" class="f-dropdown label__btn__dropdown label__btn__dropdown--wide text-left" data-dropdown-content aria-hidden="true" tabindex="-1"> Disclaimer/Publisher’s Note: The statements, opinions and data contained in all publications are solely those of the individual author(s) and contributor(s) and not of MDPI and/or the editor(s). MDPI and/or the editor(s) disclaim responsibility for any injury to people or property resulting from any ideas, methods, instructions or products referred to in the content. </div> <a href="/about/terms-and-conditions"> Terms and Conditions </a> <a href="/about/privacy"> Privacy Policy </a> </div> </div> </div> </div> <div id="cookie-notification" class="js-allow-cookies" style="display: none;"> <div class="columns large-10 medium-10 small-12"> We use cookies on our website to ensure you get the best experience.<br class="show-for-medium-up"/> Read more about our cookies <a href="/about/privacy">here</a>. </div> <div class="columns large-2 medium-2 small-12 small-only-text-left text-right"> <a class="button button--default" href="/accept_cookies">Accept</a> </div> </div> </div> <div id="main-share-modal" class="reveal-modal reveal-modal-new reveal-modal-new--small" data-reveal aria-labelledby="modalTitle" aria-hidden="true" role="dialog"> <div class="row"> <div class="small-12 columns"> <h2 style="margin: 0;">Share Link</h2> </div> <div class="small-12 columns"> <div class="social-media-links UA_ShareModalLinks" style="text-align: left;"> <a href="/cdn-cgi/l/email-protection#5f60793e322f642c2a3d353a3c2b62192d30327a6d6f121b0f167a6c1e7a6d6f7a6d6d16323e383a7a6d6f0d3a3c30312c2b2d2a3c2b3630317a6d6f0a2c3631387a6d6f0c2a2f3a2d29362c3a3b7a6d6f133a3e2d313631387a6d6f36317a6d6f083a3e2d3e3d333a7a6d6f1a333a3c2b2d363c3e337a6d6f16322f3a3b3e313c3a7a6d6f0b303230382d3e2f37267a6d6f30397a6d6f2b373a7a6d6f0b37302d3e27792e2a302b64793e322f643d303b2662372b2b2f2c65707028282871323b2f36713c3032706d6b686d676e6d7a6c1e7a6f1e7a6f1e16323e383a7a6d6f0d3a3c30312c2b2d2a3c2b3630317a6d6f0a2c3631387a6d6f0c2a2f3a2d29362c3a3b7a6d6f133a3e2d313631387a6d6f36317a6d6f083a3e2d3e3d333a7a6d6f1a333a3c2b2d363c3e337a6d6f16322f3a3b3e313c3a7a6d6f0b303230382d3e2f37267a6d6f30397a6d6f2b373a7a6d6f0b37302d3e2755551a333a3c2b2d363c3e337a6d6f36322f3a3b3e313c3a7a6d6f2b303230382d3e2f37267a6d6f7a6d671a160b7a6d667a6d6f362c7a6d6f3e7a6d6f313031723631293e2c36293a7a6d6f2b3a3c3731362e2a3a7a6d6f39302d7a6d6f29362c2a3e3336253631387a6d6f2b373a7a6d6f36312b3a2d313e337a6d6f2c2b2d2a3c2b2a2d3a7a6d6f30397a6d6f3e7a6d6f372a323e317a6d6f3d303b26717a6d6f1c3e2f3e3c362b36293a33267a6d6f3c302a2f333a3b7a6d6f3a333a3c2b2d363c3e337a6d6f36322f3a3b3e313c3a7a6d6f2b303230382d3e2f37267a6d6f7a6d671c1c1a160b7a6d667a6d6f362c7a6d6f3e7a6d6f313a287a6d6f3c30312b3e3c2b333a2c2c7a6d6f1a160b7a6d6f2b3a3c3731362e2a3a7a6d6f2b373e2b7a6d6f3c3e317a6d6f2f302b3a312b363e3333267a6d6f3d3a7a6d6f2a2c3a3b7a6d6f3e2c7a6d6f3e7a6d6f283a3e2d3e3d333a7a6d6f3b3a29363c3a717a6d6f0d3a3c3a312b7a6d6f2c2b2a3b363a2c7a6d6f373e293a7a6d6f2c373028317a6d6f2b373e2b7a6d6f3e7a6d6f323e3c3736313a7a6d6f333a3e2d31363138723d3e2c3a3b7a6d6f3e2f2f2d303e3c377a6d6f362c7a6d6f293a2d267a6d6f2f2d3032362c3631387a6d6f39302d7a6d6f1a160b7a6d6f36323e383a7a6d6f2d3a3c30312c2b2d2a3c2b363031717a6d6f12302c2b7a6d6f30397a6d6f2b373a7a6d6f2c2b2a3b363a2c7a6d6f3c30313c3a2d317a6d6f32303b3a332c7a6d6f3c30312b3e36313631387a6d6f2a2f7a6d6f2b307a6d6f6d6d7a6d6f3a333a3c2b2d303b3a2c7a6d6f3e313b7a6d6f39303c2a2c7a6d6f30317a6d6f2a2c3631387a6d6f3b3639393a2d3a312b7a6d6f3e2d2b3639363c363e337a6d6f313a2a2d3e337a6d6f313a2b28302d347a6d6f32303b3a332c7a6d1c7a6d6f392d30327a6d6f2c36322f333a7a6d6f2c373e333330287a6d6f313a2b28302d342c7a6d6f2b307a6d6f3c30322f333a277a6d6f3c30312930332a2b3630313e337a6d6f313a2b28302d342c717a6d6f1730283a293a2d7a6d1c7a6d6f2b373a7a6d6f2a2c3a7a6d6f30397a6d6f3c30312930332a2b3630313e337a6d6f313a2b28302d342c7a6d6f36317a6d6f36323e383a7a6d6f2d3a3c30312c2b2d2a3c2b3630317a6d6f28362b377a6d6f3e7a6d6f373638373a2d7a6d6f312a323d3a2d7a6d6f30397a6d6f3a333a3c2b2d303b3a2c7a6d6f2d3a2e2a362d3a2c7a6d6f392a2d2b373a2d7a6d6f3631293a2c2b36383e2b363031717a6d6f16317a6d6f2b37362c7a6d6f28302d347a6d1c7a6d6f2b28307a6d6f3b3639393a2d3a312b7a6d6f3e2d3c37362b3a3c2b2a2d3a2c7a6d6f30397a6d6f3e2d2b3639363c363e337a6d6f313a2b28302d342c7a6d6f283a2d3a7a6d6f2a2c3a3b7a6d6f39302d7a6d6f1c1c1a160b7a6d6f36323e383a7a6d6f2d3a3c30312c2b2d2a3c2b3630317a6c1e7a6d6f3e7a6d6f392a3333267a6d6f3c3031313a3c2b3a3b7a6d6f3b3a3a2f7a6d6f313a2a2d3e337a6d6f313a2b28302d347a6d6f3e313b7a6d6f3e7a6d6f3c30313b362b3630313e337a6d6f383a313a2d3e2b36293a7a6d6f3e3b293a2d2c3e2d363e337a6d6f313a2b28302d347a6d6f7a6d673c181e117a6d66717a6d6f0b373a7a6d6f2b2d3e36313631387a6d6f3b3e2b3e2c3a2b7a6d6f283e2c7a6d6f383a313a2d3e2b3a3b7a6d6f3d267a6d6f2b373a7a6d6f312a323a2d363c3e337a6d6f2c36322a333e2b3630317a6d6f30397a6d6f3e7a6d6f2b37302d3e277a6d6f2f373e312b30327a6d6f28362b377a6d6f373a3e332b37267a6d6f3e313b7a6d6f363333313a2c2c723e39393a3c2b3a3b7a6d6f332a31382c717a6d6f0b372d3a3a7a6d6f3436313b2c7a6d6f30397a6d6f363333313a2c2c3a2c7a6d1c7a6d6f2f313a2a32302b37302d3e277a6d1c7a6d6f2f333a2a2d3e337a6d6f3a39392a2c3630317a6d1c7a6d6f3e313b7a6d6f37263b2d302f313a2a32302b37302d3e277a6d1c7a6d6f283a2d3a7a6d6f32303b3a333a3b7a6d6f2a2c3631387a6d6f2b373a7a6d6f3a333a3c2b2d363c3e337a6d6f2f2d302f3a2d2b363a2c7a6d6f30397a6d6f2b373a7a6d6f2b362c2c2a3a2c717a6d6f0b373a7a6d6f2b37302d3e277a6d6f2f373e312b30327a6d6f36313c332a3b3a3b7a6d6f2b373a7a6d6f373a3e2d2b7a6d1c7a6d6f3e302d2b3e7a6d1c0471717102" title="Email"> <i class="fa fa-envelope-square" style="font-size: 30px;"></i> </a> <a href="https://twitter.com/intent/tweet?text=Image+Reconstruction+Using+Supervised+Learning+in+Wearable+Electrical+Impedance+Tomography+of+the+Thorax&amp;hashtags=mdpisensors&amp;url=https%3A%2F%2Fwww.mdpi.com%2F2472812&amp;via=Sensors_MDPI" onclick="windowOpen(this.href,600,800); return false" title="Twitter" target="_blank" rel="noopener noreferrer"> <i class="fa fa-twitter-x-square" style="font-size: 30px;"></i> </a> <a href=" http://www.linkedin.com/shareArticle?mini=true&amp;url=https%3A%2F%2Fwww.mdpi.com%2F2472812&amp;title=Image%20Reconstruction%20Using%20Supervised%20Learning%20in%20Wearable%20Electrical%20Impedance%20Tomography%20of%20the%20Thorax%26source%3Dhttps%3A%2F%2Fwww.mdpi.com%26summary%3DElectrical%20impedance%20tomography%20%28EIT%29%20is%20a%20non-invasive%20technique%20for%20visualizing%20the%20internal%20structure%20of%20a%20human%20body.%20Capacitively%20coupled%20electrical%20impedance%20tomography%20%28CCEIT%29%20is%20a%20new%20contactless%20EIT%20technique%20that%20can%20potentially%20be%20used%20as%20%5B...%5D" onclick="windowOpen(this.href,600,800); return false" title="LinkedIn" target="_blank" rel="noopener noreferrer"> <i class="fa fa-linkedin-square" style="font-size: 30px;"></i> </a> <a href="https://www.facebook.com/sharer.php?u=https://www.mdpi.com/2472812" title="facebook" target="_blank" rel="noopener noreferrer"> <i class="fa fa-facebook-square" style="font-size: 30px;"></i> </a> <a href="javascript:void(0);" title="Wechat" data-reveal-id="weixin-share-modal"> <i class="fa fa-weixin-square" style="font-size: 26px;"></i> </a> <a href="http://www.reddit.com/submit?url=https://www.mdpi.com/2472812" title="Reddit" target="_blank" rel="noopener noreferrer"> <i class="fa fa-reddit-square" style="font-size: 30px;"></i> </a> <a href="http://www.mendeley.com/import/?url=https://www.mdpi.com/2472812" title="Mendeley" target="_blank" rel="noopener noreferrer"> <i class="fa fa-mendeley-square" style="font-size: 30px;"></i> </a> <a href="http://www.citeulike.org/posturl?url=https://www.mdpi.com/2472812" title="CiteULike" target="_blank" rel="noopener noreferrer"> <i class="fa fa-citeulike-square" style="font-size: 30px;"></i> </a> </div> </div> <div class="small-9 columns"> <input id="js-clipboard-text" type="text" readonly value="https://www.mdpi.com/2472812" /> </div> <div class="small-3 columns text-left"> <a class="button button--color js-clipboard-copy" data-clipboard-target="#js-clipboard-text">Copy</a> </div> </div> <a class="close-reveal-modal" aria-label="Close"> <i class="material-icons">clear</i> </a> </div> <div id="weixin-share-modal" class="reveal-modal reveal-modal-new" data-reveal aria-labelledby="weixin-share-modal-title" aria-hidden="true" role="dialog"> <div class="row"> <div class="small-12 columns"> <h2 id="weixin-share-modal-title" style="margin: 0;">Share</h2> </div> <div class="small-12 columns"> <div class="weixin-qr-code-section"> <?xml version="1.0" standalone="no"?> <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> <svg width="300" height="300" version="1.1" xmlns="http://www.w3.org/2000/svg"> <desc>https://www.mdpi.com/2472812</desc> <g id="elements" fill="black" stroke="none"> <rect x="0" y="0" width="12" height="12" /> <rect x="12" y="0" width="12" height="12" /> <rect x="24" y="0" width="12" height="12" /> <rect x="36" y="0" width="12" height="12" /> <rect x="48" y="0" width="12" height="12" /> <rect x="60" y="0" width="12" height="12" /> <rect x="72" y="0" width="12" height="12" /> <rect x="120" y="0" width="12" height="12" /> <rect x="168" y="0" width="12" height="12" /> <rect x="180" y="0" width="12" height="12" /> <rect x="216" y="0" width="12" height="12" /> <rect x="228" y="0" width="12" height="12" /> <rect x="240" y="0" width="12" height="12" /> <rect x="252" y="0" width="12" height="12" /> <rect x="264" y="0" width="12" height="12" /> <rect x="276" y="0" width="12" height="12" /> <rect x="288" y="0" width="12" height="12" /> <rect x="0" y="12" width="12" height="12" /> <rect x="72" y="12" width="12" height="12" /> <rect x="108" y="12" width="12" height="12" /> <rect x="180" y="12" width="12" height="12" /> <rect x="216" y="12" width="12" height="12" /> <rect x="288" y="12" width="12" height="12" /> <rect x="0" y="24" width="12" height="12" /> <rect x="24" y="24" width="12" height="12" /> <rect x="36" y="24" width="12" height="12" /> <rect x="48" y="24" width="12" height="12" /> <rect x="72" y="24" width="12" height="12" /> <rect x="96" y="24" width="12" height="12" /> <rect x="132" y="24" width="12" height="12" /> <rect x="180" y="24" width="12" height="12" /> <rect x="216" y="24" width="12" height="12" /> <rect x="240" y="24" width="12" height="12" /> <rect x="252" y="24" width="12" height="12" /> <rect x="264" y="24" width="12" height="12" /> <rect x="288" y="24" width="12" height="12" /> <rect x="0" y="36" width="12" height="12" /> <rect x="24" y="36" width="12" height="12" /> <rect x="36" y="36" width="12" height="12" /> <rect x="48" y="36" width="12" height="12" /> <rect x="72" y="36" width="12" height="12" /> <rect x="108" y="36" width="12" height="12" /> <rect x="168" y="36" width="12" height="12" /> <rect x="216" y="36" width="12" height="12" /> <rect x="240" y="36" width="12" height="12" /> <rect x="252" y="36" width="12" height="12" /> <rect x="264" y="36" width="12" height="12" /> <rect x="288" y="36" width="12" height="12" /> <rect x="0" y="48" width="12" height="12" /> <rect x="24" y="48" width="12" height="12" /> <rect x="36" y="48" width="12" height="12" /> <rect x="48" y="48" width="12" height="12" /> <rect x="72" y="48" width="12" height="12" /> <rect x="108" y="48" width="12" height="12" /> <rect x="144" y="48" width="12" height="12" /> <rect x="156" y="48" width="12" height="12" /> <rect x="168" y="48" width="12" height="12" /> <rect x="180" y="48" width="12" height="12" /> <rect x="192" y="48" width="12" height="12" /> <rect x="216" y="48" width="12" height="12" /> <rect x="240" y="48" width="12" height="12" /> <rect x="252" y="48" width="12" height="12" /> <rect x="264" y="48" width="12" height="12" /> <rect x="288" y="48" width="12" height="12" /> <rect x="0" y="60" width="12" height="12" /> <rect x="72" y="60" width="12" height="12" /> <rect x="120" y="60" width="12" height="12" /> <rect x="132" y="60" width="12" height="12" /> <rect x="168" y="60" width="12" height="12" /> <rect x="180" y="60" width="12" height="12" /> <rect x="192" y="60" width="12" height="12" /> <rect x="216" y="60" width="12" height="12" /> <rect x="288" y="60" width="12" height="12" /> <rect x="0" y="72" width="12" height="12" /> <rect x="12" y="72" width="12" height="12" /> <rect x="24" y="72" width="12" height="12" /> <rect x="36" y="72" width="12" height="12" /> <rect x="48" y="72" width="12" height="12" /> <rect x="60" y="72" width="12" height="12" /> <rect x="72" y="72" width="12" height="12" /> <rect x="96" y="72" width="12" height="12" /> <rect x="120" y="72" width="12" height="12" /> <rect x="144" y="72" width="12" height="12" /> <rect x="168" y="72" width="12" height="12" /> <rect x="192" y="72" width="12" height="12" /> <rect x="216" y="72" width="12" height="12" /> <rect x="228" y="72" width="12" height="12" /> <rect x="240" y="72" width="12" height="12" /> <rect x="252" y="72" width="12" height="12" /> <rect x="264" y="72" width="12" height="12" /> <rect x="276" y="72" width="12" height="12" /> <rect x="288" y="72" width="12" height="12" /> <rect x="96" y="84" width="12" height="12" /> <rect x="132" y="84" width="12" height="12" /> <rect x="156" y="84" width="12" height="12" /> <rect x="168" y="84" width="12" height="12" /> <rect x="180" y="84" width="12" height="12" /> <rect x="0" y="96" width="12" height="12" /> <rect x="12" y="96" width="12" height="12" /> <rect x="24" y="96" width="12" height="12" /> <rect x="48" y="96" width="12" height="12" /> <rect x="60" y="96" width="12" height="12" /> <rect x="72" y="96" width="12" height="12" /> <rect x="84" y="96" width="12" height="12" /> <rect x="96" y="96" width="12" height="12" /> <rect x="144" y="96" width="12" height="12" /> <rect x="156" y="96" width="12" height="12" /> <rect x="180" y="96" width="12" height="12" /> <rect x="192" y="96" width="12" height="12" /> <rect x="204" y="96" width="12" height="12" /> <rect x="216" y="96" width="12" height="12" /> <rect x="264" y="96" width="12" height="12" /> <rect x="0" y="108" width="12" height="12" /> <rect x="12" y="108" width="12" height="12" /> <rect x="24" y="108" width="12" height="12" /> <rect x="48" y="108" width="12" height="12" /> <rect x="84" y="108" width="12" height="12" /> <rect x="108" y="108" width="12" height="12" /> <rect x="132" y="108" width="12" height="12" /> <rect x="192" y="108" width="12" height="12" /> <rect x="216" y="108" width="12" height="12" /> <rect x="288" y="108" width="12" height="12" /> <rect x="0" y="120" width="12" height="12" /> <rect x="36" y="120" width="12" height="12" /> <rect x="60" y="120" width="12" height="12" /> <rect x="72" y="120" width="12" height="12" /> <rect x="84" y="120" width="12" height="12" /> <rect x="96" y="120" width="12" height="12" /> <rect x="120" y="120" width="12" height="12" /> <rect x="132" y="120" width="12" height="12" /> <rect x="192" y="120" width="12" height="12" /> <rect x="204" y="120" width="12" height="12" /> <rect x="216" y="120" width="12" height="12" /> <rect x="228" y="120" width="12" height="12" /> <rect x="264" y="120" width="12" height="12" /> <rect x="276" y="120" width="12" height="12" /> <rect x="288" y="120" width="12" height="12" /> <rect x="0" y="132" width="12" height="12" /> <rect x="24" y="132" width="12" height="12" /> <rect x="48" y="132" width="12" height="12" /> <rect x="96" y="132" width="12" height="12" /> <rect x="108" y="132" width="12" height="12" /> <rect x="120" y="132" width="12" height="12" /> <rect x="144" y="132" width="12" height="12" /> <rect x="156" y="132" width="12" height="12" /> <rect x="168" y="132" width="12" height="12" /> <rect x="180" y="132" width="12" height="12" /> <rect x="216" y="132" width="12" height="12" /> <rect x="276" y="132" width="12" height="12" /> <rect x="72" y="144" width="12" height="12" /> <rect x="96" y="144" width="12" height="12" /> <rect x="120" y="144" width="12" height="12" /> <rect x="156" y="144" width="12" height="12" /> <rect x="168" y="144" width="12" height="12" /> <rect x="204" y="144" width="12" height="12" /> <rect x="216" y="144" width="12" height="12" /> <rect x="228" y="144" width="12" height="12" /> <rect x="252" y="144" width="12" height="12" /> <rect x="276" y="144" width="12" height="12" /> <rect x="288" y="144" width="12" height="12" /> <rect x="84" y="156" width="12" height="12" /> <rect x="108" y="156" width="12" height="12" /> <rect x="120" y="156" width="12" height="12" /> <rect x="132" y="156" width="12" height="12" /> <rect x="156" y="156" width="12" height="12" /> <rect x="192" y="156" width="12" height="12" /> <rect x="216" y="156" width="12" height="12" /> <rect x="252" y="156" width="12" height="12" /> <rect x="288" y="156" width="12" height="12" /> <rect x="0" y="168" width="12" height="12" /> <rect x="36" y="168" width="12" height="12" /> <rect x="48" y="168" width="12" height="12" /> <rect x="60" y="168" width="12" height="12" /> <rect x="72" y="168" width="12" height="12" /> <rect x="108" y="168" width="12" height="12" /> <rect x="120" y="168" width="12" height="12" /> <rect x="144" y="168" width="12" height="12" /> <rect x="168" y="168" width="12" height="12" /> <rect x="192" y="168" width="12" height="12" /> <rect x="204" y="168" width="12" height="12" /> <rect x="216" y="168" width="12" height="12" /> <rect x="228" y="168" width="12" height="12" /> <rect x="264" y="168" width="12" height="12" /> <rect x="276" y="168" width="12" height="12" /> <rect x="288" y="168" width="12" height="12" /> <rect x="12" y="180" width="12" height="12" /> <rect x="24" y="180" width="12" height="12" /> <rect x="36" y="180" width="12" height="12" /> <rect x="84" y="180" width="12" height="12" /> <rect x="108" y="180" width="12" height="12" /> <rect x="132" y="180" width="12" height="12" /> <rect x="156" y="180" width="12" height="12" /> <rect x="180" y="180" width="12" height="12" /> <rect x="192" y="180" width="12" height="12" /> <rect x="216" y="180" width="12" height="12" /> <rect x="240" y="180" width="12" height="12" /> <rect x="276" y="180" width="12" height="12" /> <rect x="0" y="192" width="12" height="12" /> <rect x="48" y="192" width="12" height="12" /> <rect x="60" y="192" width="12" height="12" /> <rect x="72" y="192" width="12" height="12" /> <rect x="120" y="192" width="12" height="12" /> <rect x="132" y="192" width="12" height="12" /> <rect x="144" y="192" width="12" height="12" /> <rect x="192" y="192" width="12" height="12" /> <rect x="204" y="192" width="12" height="12" /> <rect x="216" y="192" width="12" height="12" /> <rect x="228" y="192" width="12" height="12" /> <rect x="240" y="192" width="12" height="12" /> <rect x="252" y="192" width="12" height="12" /> <rect x="96" y="204" width="12" height="12" /> <rect x="108" y="204" width="12" height="12" /> <rect x="132" y="204" width="12" height="12" /> <rect x="156" y="204" width="12" height="12" /> <rect x="180" y="204" width="12" height="12" /> <rect x="192" y="204" width="12" height="12" /> <rect x="240" y="204" width="12" height="12" /> <rect x="252" y="204" width="12" height="12" /> <rect x="276" y="204" width="12" height="12" /> <rect x="288" y="204" width="12" height="12" /> <rect x="0" y="216" width="12" height="12" /> <rect x="12" y="216" width="12" height="12" /> <rect x="24" y="216" width="12" height="12" /> <rect x="36" y="216" width="12" height="12" /> <rect x="48" y="216" width="12" height="12" /> <rect x="60" y="216" width="12" height="12" /> <rect x="72" y="216" width="12" height="12" /> <rect x="96" y="216" width="12" height="12" /> <rect x="132" y="216" width="12" height="12" /> <rect x="156" y="216" width="12" height="12" /> <rect x="180" y="216" width="12" height="12" /> <rect x="192" y="216" width="12" height="12" /> <rect x="216" y="216" width="12" height="12" /> <rect x="240" y="216" width="12" height="12" /> <rect x="252" y="216" width="12" height="12" /> <rect x="276" y="216" width="12" height="12" /> <rect x="288" y="216" width="12" height="12" /> <rect x="0" y="228" width="12" height="12" /> <rect x="72" y="228" width="12" height="12" /> <rect x="96" y="228" width="12" height="12" /> <rect x="108" y="228" width="12" height="12" /> <rect x="132" y="228" width="12" height="12" /> <rect x="168" y="228" width="12" height="12" /> <rect x="192" y="228" width="12" height="12" /> <rect x="240" y="228" width="12" height="12" /> <rect x="252" y="228" width="12" height="12" /> <rect x="276" y="228" width="12" height="12" /> <rect x="0" y="240" width="12" height="12" /> <rect x="24" y="240" width="12" height="12" /> <rect x="36" y="240" width="12" height="12" /> <rect x="48" y="240" width="12" height="12" /> <rect x="72" y="240" width="12" height="12" /> <rect x="96" y="240" width="12" height="12" /> <rect x="108" y="240" width="12" height="12" /> <rect x="144" y="240" width="12" height="12" /> <rect x="168" y="240" width="12" height="12" /> <rect x="192" y="240" width="12" height="12" /> <rect x="204" y="240" width="12" height="12" /> <rect x="216" y="240" width="12" height="12" /> <rect x="228" y="240" width="12" height="12" /> <rect x="240" y="240" width="12" height="12" /> <rect x="252" y="240" width="12" height="12" /> <rect x="276" y="240" width="12" height="12" /> <rect x="288" y="240" width="12" height="12" /> <rect x="0" y="252" width="12" height="12" /> <rect x="24" y="252" width="12" height="12" /> <rect x="36" y="252" width="12" height="12" /> <rect x="48" y="252" width="12" height="12" /> <rect x="72" y="252" width="12" height="12" /> <rect x="120" y="252" width="12" height="12" /> <rect x="144" y="252" width="12" height="12" /> <rect x="156" y="252" width="12" height="12" /> <rect x="168" y="252" width="12" height="12" /> <rect x="204" y="252" width="12" height="12" /> <rect x="228" y="252" width="12" height="12" /> <rect x="240" y="252" width="12" height="12" /> <rect x="252" y="252" width="12" height="12" /> <rect x="264" y="252" width="12" height="12" /> <rect x="0" y="264" width="12" height="12" /> <rect x="24" y="264" width="12" height="12" /> <rect x="36" y="264" width="12" height="12" /> <rect x="48" y="264" width="12" height="12" /> <rect x="72" y="264" width="12" height="12" /> <rect x="96" y="264" width="12" height="12" /> <rect x="108" y="264" width="12" height="12" /> <rect x="168" y="264" width="12" height="12" /> <rect x="180" y="264" width="12" height="12" /> <rect x="240" y="264" width="12" height="12" /> <rect x="288" y="264" width="12" height="12" /> <rect x="0" y="276" width="12" height="12" /> <rect x="72" y="276" width="12" height="12" /> <rect x="96" y="276" width="12" height="12" /> <rect x="108" y="276" width="12" height="12" /> <rect x="144" y="276" width="12" height="12" /> <rect x="156" y="276" width="12" height="12" /> <rect x="168" y="276" width="12" height="12" /> <rect x="180" y="276" width="12" height="12" /> <rect x="192" y="276" width="12" height="12" /> <rect x="216" y="276" width="12" height="12" /> <rect x="240" y="276" width="12" height="12" /> <rect x="252" y="276" width="12" height="12" /> <rect x="276" y="276" width="12" height="12" /> <rect x="0" y="288" width="12" height="12" /> <rect x="12" y="288" width="12" height="12" /> <rect x="24" y="288" width="12" height="12" /> <rect x="36" y="288" width="12" height="12" /> <rect x="48" y="288" width="12" height="12" /> <rect x="60" y="288" width="12" height="12" /> <rect x="72" y="288" width="12" height="12" /> <rect x="96" y="288" width="12" height="12" /> <rect x="168" y="288" width="12" height="12" /> <rect x="180" y="288" width="12" height="12" /> <rect x="192" y="288" width="12" height="12" /> <rect x="228" y="288" width="12" height="12" /> <rect x="276" y="288" width="12" height="12" /> <rect x="288" y="288" width="12" height="12" /> </g> </svg> </div> </div> </div> <a class="close-reveal-modal" aria-label="Close"> <i class="material-icons">clear</i> </a> </div> <a href="#" class="back-to-top"><span class="show-for-medium-up">Back to Top</span><span class="show-for-small">Top</span></a> <script data-cfasync="false" src="/cdn-cgi/scripts/5c5dd728/cloudflare-static/email-decode.min.js"></script><script src="https://pub.mdpi-res.com/assets/js/modernizr-2.8.3.min.js?5227e0738f7f421d?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/jquery-1.12.4.min.js?4f252523d4af0b47?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/foundation-5.5.3.min.js?6b2ec41c18b29054?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/foundation-5.5.3.equalizer.min.js?0f6c549b75ec554c?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/jquery.multiselect.js?0edd3998731d1091?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/jquery.cycle2.min.js?63413052928f97ee?1732286508"></script> <script> // old browser fix - this way the console log rows won't throw (silent) errors in browsers not supporting console log if (!window.console) window.console = {}; if (!window.console.log) window.console.log = function () { }; var currentJournalNameSystem = "sensors"; $(document).ready(function() { $('select.foundation-select').multiselect({ search: true, minHeight: 130, maxHeight: 130, }); $(document).foundation({ orbit: { timer_speed: 4000, }, reveal: { animation: 'fadeAndPop', animation_speed: 100, } }); $(".chosen-select").each(function(element) { var maxSelected = (undefined !== $(this).data('maxselectedoptions') ? $(this).data('maxselectedoptions') : 100); $(this).on('chosen:ready', function(event, data) { var select = $(data.chosen.form_field); if (select.attr('id') === 'journal-browser-volume') { $(data.chosen.dropdown).addClass('UI_JournalBrowser_Volume_Options'); } if (select.attr('id') === 'journal-browser-issue') { $(data.chosen.dropdown).addClass('UI_JournalBrowser_Issue_Options'); } }).chosen({ display_disabled_options: false, disable_search_threshold: 7, max_selected_options: maxSelected, width: "100%" }); }); $(".toEncode").each(function(e) { var oldHref = $(this).attr("href"); var newHref = oldHref.replace('.botdefense.please.enable.javascript.','@'); $(this).attr("href", newHref); if (!$(this).hasClass("emailCaptcha")) { $(this).html(newHref.replace('mailto:', '')); } $(this).removeClass("visibility-hidden"); }); $(document).on('opened.fndtn.reveal', '[data-reveal]', function() { $(document).foundation('equalizer', 'reflow'); }); // fix the images that have tag height / width defined // otherwise the default foundation styles overwrite the tag definitions $("img").each(function() { if ($(this).attr('width') != undefined || $(this).attr('height') != undefined) { $(this).addClass("img-fixed"); } }); $("#basic_search, #advanced_search").submit(function(e) { var searchArguments = false; $(this).find("input,select").not("#search,.search-button").each(function() { if (undefined === $(this).val() || "" === $(this).val()) { $(this).attr('name', null); } else { $(this).attr('name'); searchArguments = true; } }); if (!searchArguments) { window.location = $(this).attr('action'); return false; } }); $(".hide-show-desktop-option").click(function(e) { e.preventDefault(); var parentDiv = $(this).closest("div"); $.ajax({ url: $(this).attr('href'), success: function(msg) { parentDiv.removeClass().hide(); } }); }); $(".generic-toggleable-header").click(function(e) { $(this).toggleClass("active"); $(this).next(".generic-toggleable-content").toggleClass("active"); }); /* * handle whole row as a link if the row contains only one visible link */ $("table.new tr").hover(function() { if ($(this).find("td:visible a").length == 1) { $(this).addClass("single-link"); } }, function() { $(this).removeClass("single-link"); }); $("table.new:not(.table-of-tables)").on("click", "tr.single-link", function(e) { var target = $(e.target); if (!e.ctrlKey && !target.is("a")) { $(this).find("td:visible a")[0].click(); } }); $(document).on("click", ".custom-accordion-for-small-screen-link", function(e) { if ($(this).closest("#basic_search").length > 0) { if ($(".search-container__advanced").first().is(":visible")) { openAdvanced() } } if (Foundation.utils.is_small_only()) { if ($(this).hasClass("active")) { $(this).removeClass("active"); $(this).next(".custom-accordion-for-small-screen-content").addClass("show-for-medium-up"); } else { $(this).addClass("active"); $(this).next(".custom-accordion-for-small-screen-content").removeClass("show-for-medium-up"); $(document).foundation('orbit', 'reflow'); } } if (undefined !== $(this).data("callback")) { var customCallback = $(this).data("callback"); func = window[customCallback]; func(); } }); $(document).on("click", ".js-open-small-search", function(e) { e.preventDefault(); $(this).toggleClass("active").closest(".tab-bar").toggleClass("active"); $(".search-container").toggleClass("hide-for-small-down"); }); $(document).on("click", ".js-open-menu", function(e) { $(".search-container").addClass("hide-for-small-down"); }); $(window).on('resize', function() { recalculate_main_browser_position(); recalculate_responsive_moving_containers(); }); updateSearchLabelVisibilities(); recalculate_main_browser_position(); recalculate_responsive_moving_containers(); if (window.document.documentMode == 11) { $("<link/>", { rel: "stylesheet", type: "text/css", href: "https://fonts.googleapis.com/icon?family=Material+Icons"}).appendTo("head"); } }); function recalculate_main_browser_position() { if (Foundation.utils.is_small_only()) { if ($("#js-main-top-container").parent("#js-large-main-top-container").length > 0) { $("#js-main-top-container").appendTo($("#js-small-main-top-container")); } } else { if ($("#js-main-top-container").parent("#js-small-main-top-container").length > 0) { $("#js-main-top-container").appendTo($("#js-large-main-top-container")); } } } function recalculate_responsive_moving_containers() { $(".responsive-moving-container.large").each(function() { var previousParent = $(".responsive-moving-container.active[data-id='"+$(this).data("id")+"']"); var movingContent = previousParent.html(); if (Foundation.utils.is_small_only()) { var currentParent = $(".responsive-moving-container.small[data-id='"+$(this).data("id")+"']"); } else if (Foundation.utils.is_medium_only()) { var currentParent = $(".responsive-moving-container.medium[data-id='"+$(this).data("id")+"']"); } else { var currentParent = $(".responsive-moving-container.large[data-id='"+$(this).data("id")+"']"); } if (previousParent.attr("class") !== currentParent.attr("class")) { currentParent.html(movingContent); previousParent.html(); currentParent.addClass("active"); previousParent.removeClass("active"); } }); } // cookies allowed is checked from a) local storage and b) from server separately so that the footer bar doesn't // get included in the custom page caches function checkCookiesAllowed() { var cookiesEnabled = localStorage.getItem("mdpi_cookies_enabled"); if (null === cookiesEnabled) { $.ajax({ url: "/ajax_cookie_value/mdpi_cookies_accepted", success: function(data) { if (data.value) { localStorage.setItem("mdpi_cookies_enabled", true); checkDisplaySurvey(); } else { $(".js-allow-cookies").show(); } } }); } else { checkDisplaySurvey(); } } function checkDisplaySurvey() { } window.addEventListener('CookiebotOnAccept', function (e) { var CookieDate = new Date; if (Cookiebot.consent.preferences) { CookieDate.setFullYear(CookieDate.getFullYear() + 1); document.cookie = "mdpi_layout_type_v2=mobile; path=/; expires=" + CookieDate.toUTCString() + ";"; $(".js-toggle-desktop-layout-link").css("display", "inline-block"); } }, false); window.addEventListener('CookiebotOnDecline', function (e) { if (!Cookiebot.consent.preferences) { $(".js-toggle-desktop-layout-link").hide(); if ("" === "desktop") { window.location = "/toggle_desktop_layout_cookie"; } } }, false); var hash = $(location).attr('hash'); if ("#share" === hash) { if (1 === $("#main-share-modal").length) { $('#main-share-modal').foundation('reveal', 'open'); } } </script> <script src="https://pub.mdpi-res.com/assets/js/lib.js?f8d3d71b3a772f9d?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/mdpi.js?c267ce58392b15da?1732286508"></script> <script>var banners_url = 'https://serve.mdpi.com';</script> <script type='text/javascript' src='https://pub.mdpi-res.com/assets/js/ifvisible.min.js?c621d19ecb761212?1732286508'></script> <script src="https://pub.mdpi-res.com/assets/js/xmltohtml/affix.js?ac4ea55275297c15?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/clipboard.min.js?3f3688138a1b9fc4?1732286508"></script> <script type="text/javascript"> $(document).ready(function() { var helpFunctions = $(".middle-column__help__fixed"); var leftColumnAffix = $(".left-column__fixed"); var middleColumn = $("#middle-column"); var clone = null; helpFunctions.affix({ offset: { top: function() { return middleColumn.offset().top - 8 - (Foundation.utils.is_medium_only() ? 30 : 0); }, bottom: function() { return $("#footer").innerHeight() + 74 + (Foundation.utils.is_medium_only() ? 0 : 0); } } }); if (leftColumnAffix.length > 0) { clone = leftColumnAffix.clone(); clone.addClass("left-column__fixed__affix"); clone.insertBefore(leftColumnAffix); clone.css('width', leftColumnAffix.outerWidth() + 50); clone.affix({ offset: { top: function() { return leftColumnAffix.offset().top - 30 - (Foundation.utils.is_medium_only() ? 50 : 0); }, bottom: function() { return $("#footer").innerHeight() + 92 + (Foundation.utils.is_medium_only() ? 0 : 0); } } }); } $(window).on("resize", function() { if (clone !== null) { clone.css('width', leftColumnAffix.outerWidth() + 50); } }); new ClipboardJS('.js-clipboard-copy'); }); </script> <script src="https://pub.mdpi-res.com/assets/js/jquery-ui-1.13.2.min.js?1e2047978946a1d2?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/slick.min.js?d5a61c749e44e471?1732286508"></script> <script> $(document).ready(function() { $(".link-article-menu").click(function(e) { e.preventDefault(); $(this).find('span').toggle(); $(this).next("div").toggleClass("active"); }); $(".js-similarity-related-articles").click(function(e) { e.preventDefault(); if ('' !== $('#recommended-articles-modal').attr('data-url')) { $('#recommended-articles-modal').foundation('reveal', 'open', $('#recommended-articles-modal').attr('data-url')); } }); $.ajax({ url: "/article/1236406/similarity-related/show-link", success: function(result) { if (result.show) { $('#recommended-articles-modal').attr('data-url', result.link); $('.js-article-similarity-container').show(); } } }); $(document).on('opened.fndtn.reveal', '[data-reveal]', function() { var modal = $(this); if (modal.attr('id') === "author-biographies-modal") { modal.find('.multiple-items').slick({ slidesToShow: 1, nextArrow: '<a class="slick-next" href="#"><i class="material-icons">chevron_right</i></a>', prevArrow: '<a class="slick-prev" href="#"><i class="material-icons">chevron_left</i></a>', slidesToScroll: 1, draggable: false, }); modal.find('.multiple-items').slick('refresh'); } }); }); </script> <script> $(document).ready(function() { $(document).on('keyup', function (e) { if (e.keyCode == 27) { var hElem = $(this).find(".annotator-adder"); if (hElem.length){ hElem.css({'visibility':'hidden'}); } else { document.querySelector("hypothesis-adder").shadowRoot.querySelector(".annotator-adder").style.visibility = "hidden"; } } }); }); </script> <script> window.hypothesisConfig = function () { return { sidebarAppUrl: 'https://commenting.mdpi.com/app.html', showHighlights: 'whenSidebarOpen' , openSidebar: false , assetRoot: 'https://commentingres.mdpi.com/hypothesis', services: [{ apiUrl: 'https://commenting.mdpi.com/api/', authority: 'mdpi', grantToken: '', doi: '10.3390/s23187774' }], }; }; </script> <script async id="hypothesis_frame"></script> <script type="text/javascript"> if (-1 !== window.location.href.indexOf("?src=")) { window.history.replaceState({}, '', `${location.pathname}`); } $(document).ready(function() { var scifeedCounter = 0; var search = window.location.search; var mathjaxReady = false; // late image file loading $("img[data-lsrc]").each(function() { $(this).attr("src", $(this).data("lsrc")); }); // late mathjax initialization var head = document.getElementsByTagName("head")[0]; var script = document.createElement("script"); script.type = "text/x-mathjax-config"; script[(window.opera ? "innerHTML" : "text")] = "MathJax.Hub.processSectionDelay = 0;\n" + "MathJax.Hub.Config({\n" + " \"menuSettings\": {\n" + " CHTMLpreview: false\n" + " },\n" + " \"CHTML-preview\":{\n" + " disabled: true\n" + " },\n" + " \"HTML-CSS\": {\n" + " scale: 90,\n" + " availableFonts: [],\n" + " preferredFont: null,\n" + " preferredFonts: null,\n" + " webFont:\"Gyre-Pagella\",\n" + " imageFont:'TeX',\n" + " undefinedFamily:\"'Arial Unicode MS',serif\",\n" + " linebreaks: { automatic: false }\n" + " },\n" + " \"TeX\": {\n" + " extensions: ['noErrors.js'],\n" + " noErrors: {\n" + " inlineDelimiters: [\"\",\"\"],\n" + " multiLine: true,\n" + " style: {\n" + " 'font-size': '90%',\n" + " 'text-align': 'left',\n" + " 'color': 'black',\n" + " 'padding': '1px 3px',\n" + " 'border': '1px solid'\n" + " }\n" + " }\n" + " }\n" + "});\n" + "MathJax.Hub.Register.StartupHook('End', function() {\n" + " refreshMathjaxWidths();\n" + " mathjaxReady = true;\n" + "});\n" + "MathJax.Hub.Startup.signal.Interest(function (message) {\n" + " if (message == 'End') {\n" + " var hypoLink = document.getElementById('hypothesis_frame');\n" + " if (null !== hypoLink) {\n" + " hypoLink.setAttribute('src', 'https://commenting.mdpi.com/embed.js');\n" + " }\n" + " }\n" + "});"; head.appendChild(script); script = document.createElement("script"); script.type = "text/javascript"; script.src = "https://pub.mdpi-res.com/bundles/mathjax/MathJax.js?config=TeX-AMS-MML_HTMLorMML"; head.appendChild(script); // article version checker if (0 === search.indexOf('?type=check_update&version=')) { $.ajax({ url: "/1424-8220/23/18/7774" + "/versioncheck" + search, success: function(result) { $(".js-check-update-container").html(result); } }); } $('#feed_option').click(function() { // tracker if ($('#scifeed_clicked').length<1) { $(this).append('<span style="display:none" id="scifeed_clicked">done</span>'); } $('#feed_data').toggle('slide', { direction: 'up'}, '1000'); // slideToggle(700); OR toggle(700) $("#scifeed_error_msg").html('').hide(); $("#scifeed_notice_msg").html('').hide(); }); $('#feed_option').click(function(event) { setTimeout(function(){ var captchaSection = $("#captchaSection"); captchaSection.removeClass('ui-helper-hidden').find('input').prop('disabled', false); // var img = captchaSection.find('img'); // img.attr('src', img.data('url') + "?" + (new Date()).getTime()); // $(".captcha_reload").trigger("click"); var img = document.getElementById('gregwar_captcha_scifeed'); img.src = '/generate-captcha/gcb_captcha?n=' + (new Date()).getTime(); },800); }); $(document).on('click', '.split_feeds', function() { var name = $( this ).attr('name'); var flag = 1 - ($(this).is(":checked")*1); $('.split_feeds').each(function (index) { if ($( this ).attr('name') !== name) { $(this)[0].checked = flag; } }); }); $(document).on('click', '#scifeed_submit, #scifeed_submit1', function(event) { event.preventDefault(); $(".captcha_reload").trigger("click"); $("#scifeed_error_msg").html(""); $("#scifeed_error_msg").hide(); }); $(document).on('click', '.subscription_toggle', function(event) { if ($(this).val() === 'Create SciFeed' && $('#scifeed_hidden_flag').length>0) { event.preventDefault(); // alert('Here there would be a captcha because user is not logged in'); var captchaSection = $("#captchaSection"); if (captchaSection.hasClass('ui-helper-hidden')) { captchaSection.removeClass('ui-helper-hidden').find('input').prop('disabled', false); var img = captchaSection.find('img'); img.attr('src', img.data('url') + "?" + (new Date()).getTime()); $("#reloadCaptcha").trigger("click"); } } }); $(document).on('click', '.scifeed_msg', function(){ $(this).hide(); }); $(document).on('click', '.article-scilit-search', function(e) { e.preventDefault(); var data = $(".article-scilit-search-data").val(); var dataArray = data.split(';').map(function(keyword) { return "(\"" + keyword.trim() + "\")"; }); var searchQuery = dataArray.join(" OR "); var searchUrl = encodeURI("https://www.scilit.net/articles/search?q="+ searchQuery + "&advanced=1&highlight=1"); var win = window.open(searchUrl, '_blank'); if (win) { win.focus(); } else { window.location(searchUrl); } }); display_stats(); citedCount(); follow_goto(); // Select the node that will be observed for mutations const targetNodes = document.getElementsByClassName('hypothesis-count-container'); // Options for the observer (which mutations to observe) const config = { attributes: false, childList: true, subtree: false }; // Callback function to execute when mutations are observed const callback = function(mutationList, observer) { for(const mutation of mutationList) { if (mutation.type === 'childList') { let node = $(mutation.target); if (parseInt(node.html()) > 0) { node.show(); } } } }; // Create an observer instance linked to the callback function const observer = new MutationObserver(callback); // Start observing the target node for configured mutations for(const targetNode of targetNodes) { observer.observe(targetNode, config); } // Select the node that will be observed for mutations const mathjaxTargetNode = document.getElementById('middle-column'); // Callback function to execute when mutations are observed const mathjaxCallback = function(mutationList, observer) { if (mathjaxReady && typeof(MathJax) !== 'undefined') { refreshMathjaxWidths(); } }; // Create an observer instance linked to the callback function const mathjaxObserver = new ResizeObserver(mathjaxCallback); // Start observing the target node for configured mutations mathjaxObserver.observe(mathjaxTargetNode); }); /* END $(document).ready */ function refreshMathjaxWidths() { let width = ($('.html-body').width()*0.9) + "px"; $('.MathJax_Display').css('max-width', width); $('.MJXc-display').css('max-width', width); } function sendScifeedFrom(form) { if (!$('#scifeed_email').val().trim()) { // empty email alert('Please, provide an email for subscribe to this scifeed'); return false; } else if (!$('#captchaSection').hasClass('ui-helper-hidden') && !$('#captchaSection').find('input').val().trim()) { // empty captcha alert('Please, fill the captcha field.'); return false; } else if( ((($('#scifeed_form').find('input:checkbox:checked').length)-($('#split_feeds:checked').length))<1) || ($('#scifeed_kwd_txt').length < 0 && !$('#scifeed_kwd_txt').val().trim()) || ($('#scifeed_author_txt').length<0 &&!$('#scifeed_author_txt').val().trim()) ) { alert('You did not select anything to subscribe'); return false; } else if(($('#scifeed_form').find('input:checkbox:checked').length)-($('#split_feeds2:checked').length)<1){ alert("You did not select anything to subscribe"); return false; } else { var url = $('#scifeed_subscribe_url').html(); var formData = $(form).serializeArray(); $.post(url, formData).done(function (data) { if (JSON.parse(data)) { $('.scifeed_msg').hide(); var res = JSON.parse(data); var successFeeds = 0; var errorFeeds = 0; if (res) { $('.scifeed_msg').html(''); $.each(res, function (index, val) { if (val) { if (val.error) { errorFeeds++; $("#scifeed_error_msg").append(index+' - '+val.error+'<br>'); } if (val.notice) // for successful feed creation { successFeeds++; // $("#scifeed_notice_msg").append(index+' - '+val.notice+'<br>'); $("#scifeed_notice_msg").append('<li>'+index+'</li>'); } } }); if (successFeeds>0) { text = $('#scifeed_notice_msg').html(); text = 'The following feed'+(successFeeds>1?'s have':' has')+ ' been sucessfully created:<br><ul>'+ text + '</ul>' +($('#scifeed_hidden_flag').length>0 ? 'You are not logged in, so you probably need to validate '+ (successFeeds>1?'them':' it')+'.<br>' :'' ) +'Please check your email'+(successFeeds>1?'s':'')+' for more details.'; //(successFeeds>1?' for each of them':'')+'.<br>'; $("#scifeed_notice_msg").html(text); $("#scifeed_notice_msg").show(); } if (errorFeeds>0) { $("#scifeed_error_msg").show();; } } $("#feed_data").hide(); } }); } } function follow_goto() { var hashStr = location.hash.replace("#",""); if(typeof hashStr !== 'undefined') { if( hashStr == 'supplementary') { document.getElementById('suppl_id').scrollIntoView(); } if( hashStr == 'citedby') { document.getElementById('cited_id').scrollIntoView(); } } } function cited() { $("#framed_div").toggle('fast', function(){ if ($(this).css('display') != 'none') { var loaded = document.getElementById("loaded"); if(loaded.innerHTML == "No") { // Load Xref result var container = document.getElementById("framed_div"); // This replace the content container.innerHTML = "<img src=\"https://pub.mdpi-res.com/img/loading_circle.gif?9a82694213036313?1732286508\" height=\"20\" width=\"20\" alt=\"Processing...\" style=\"vertical-align:middle; margin-right:0.6em;\">"; var url = "/citedby/10.3390%252Fs23187774/3"; $.post(url, function(result) { if (result.success) { container.innerHTML = result.view; } loaded.innerHTML = "Yes"; }); } } return true; // for not going at the beginning of the page... }) return true; // for not going at the beginning of the page... } function detect_device() { // Added by Bastien (18/08/2014): based on the http://detectmobilebrowsers.com/ detector var check = false; (function(a){if(/(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows (ce|phone)|xda|xiino/i.test(a)||/1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-/i.test(a.substr(0,4)))check = true})(navigator.userAgent||navigator.vendor||window.opera); return check; } function display_stats(){ $("#article_stats_div").toggle(); return false; } /* * Cited By Scopus */ function citedCount(){ $("#framed_div_cited_count").toggle('fast', function(){ if ($(this).css('display') != 'none') { var loaded = document.getElementById("loaded_cite_count"); // to load only once the result! if(loaded.innerHTML == "No") { // Load Xref result var d = document.getElementById("framed_div_cited_count"); // This replace the content d.innerHTML = "<img src=\"https://pub.mdpi-res.com/img/loading_circle.gif?9a82694213036313?1732286508\" height=\"20\" width=\"20\" alt=\"Processing...\" style=\"vertical-align:middle; margin-right:0.6em;\">"; $.ajax({ method : "POST", url : "/cite-count/10.3390%252Fs23187774", success : function(data) { if (data.succ) { d.innerHTML = data.view; loaded.innerHTML = "Yes"; follow_goto(); } } }); } } // end else return true; // for not going at the beginning of the page... }) return true; // for not going at the beginning of the page... } </script><script type="text/javascript" src="https://pub.mdpi-res.com/assets/js/third-party/highcharts/highcharts.js?bdd06f45e34c33df?1732286508"></script><script type="text/javascript" src="https://pub.mdpi-res.com/assets/js/third-party/highcharts/modules/exporting.js?944dc938d06de3a8?1732286508"></script><script type="text/javascript" defer="defer"> var advancedStatsData; var selectedStatsType = "abstract"; $(function(){ var countWrapper = $('#counts-wrapper'); $('#author_stats_id #type_links a').on('click', function(e) { e.preventDefault(); selectedStatsType = $(this).data('type'); $('#article_advanced_stats').vectorMap('set', 'values', advancedStatsData[selectedStatsType]); $('#advanced_stats_max').html(advancedStatsData[selectedStatsType].max); $('#type_links a').removeClass('active'); $(this).addClass('active'); }); $.get('/1424-8220/23/18/7774/stats', function (result) { if (!result.success) { return; } // process article metrics part in left column var viewNumber = countWrapper.find(".view-number"); viewNumber.html(result.metrics.views); viewNumber.parent().toggleClass("count-div--grey", result.metrics.views == 0); var downloadNumber = countWrapper.find(".download-number"); downloadNumber.html(result.metrics.downloads); downloadNumber.parent().toggleClass("count-div--grey", result.metrics.downloads == 0); var citationsNumber = countWrapper.find(".citations-number"); citationsNumber.html(result.metrics.citations); citationsNumber.parent().toggleClass("count-div--grey", result.metrics.citations == 0); if (result.metrics.views > 0 || result.metrics.downloads > 0 || result.metrics.citations > 0) { countWrapper.find("#js-counts-wrapper__views, #js-counts-wrapper__downloads").addClass("visible").show(); if (result.metrics.citations > 0) { countWrapper.find('.citations-number').html(result.metrics.citations).show(); countWrapper.find("#js-counts-wrapper__citations").addClass("visible").show(); } else { countWrapper.find("#js-counts-wrapper__citations").remove(); } $("[data-id='article-counters']").removeClass("hidden"); } if (result.metrics.altmetrics_score > 0) { $("#js-altmetrics-donut").show(); } // process view chart in main column var jsondata = result.chart; var series = new Array(); $.each(jsondata.elements, function(i, element) { var dataValues = new Array(); $.each(element.values, function(i, value) { dataValues.push(new Array(value.tip, value.value)); }); series[i] = {name: element.text, data:dataValues}; }); Highcharts.setOptions({ chart: { style: { fontFamily: 'Arial,sans-serif' } } }); $('#article_stats_swf').highcharts({ chart: { type: 'line', width: $("#tabs").width() //* 0.91 }, credits: { enabled: false }, exporting: { enabled: true }, title: { text: jsondata.title.text, x: -20 //center }, xAxis: { categories: jsondata.x_axis.labels.labels, offset: jsondata.x_axis.offset, labels:{ step: jsondata.x_axis.labels.steps, rotation: 30 } }, yAxis: { max: jsondata.y_axis.max, min: jsondata.y_axis.min, offset: jsondata.y_axis.offset, labels: { steps: jsondata.y_axis.steps }, title: { enabled: false } }, tooltip: { formatter: function (){ return this.key.replace("#val#", this.y); } }, legend: { align: 'top', itemDistance: 50 }, series: series }); }); $('#supplement_link').click(function() { document.getElementById('suppl_id').scrollIntoView(); }); $('#stats_link').click(function() { document.getElementById('stats_id').scrollIntoView(); }); // open mol viewer for molbank special supplementary files $('.showJmol').click(function(e) { e.preventDefault(); var jmolModal = $("#jmolModal"); var url = "/article/1236406/jsmol_viewer/__supplementary_id__"; url = url.replace(/__supplementary_id__/g, $(this).data('index')); $('#jsmol-content').attr('src', url); jmolModal.find(".content").html($(this).data('description')); jmolModal.foundation("reveal", "open"); }); }); !function() { "use strict"; function e(e) { try { if ("undefined" == typeof console) return; "error"in console ? console.error(e) : console.log(e) } catch (e) {} } function t(e) { return d.innerHTML = '<a href="' + e.replace(/"/g, "&quot;") + '"></a>', d.childNodes[0].getAttribute("href") || "" } function n(n, c) { var o = ""; var k = parseInt(n.substr(c + 4, 2), 16); for (var i = c; i < n.length; i += 2) { if (i != c + 4) { var s = parseInt(n.substr(i, 2), 16) ^ k; o += String.fromCharCode(s); } } try { o = decodeURIComponent(escape(o)); } catch (error) { console.error(error); } return t(o); } function c(t) { for (var r = t.querySelectorAll("a"), c = 0; c < r.length; c++) try { var o = r[c] , a = o.href.indexOf(l); a > -1 && (o.href = "mailto:" + n(o.href, a + l.length)) } catch (i) { e(i) } } function o(t) { for (var r = t.querySelectorAll(u), c = 0; c < r.length; c++) try { var o = r[c] , a = o.parentNode , i = o.getAttribute(f); if (i) { var l = n(i, 0) , d = document.createTextNode(l); a.replaceChild(d, o) } } catch (h) { e(h) } } function a(t) { for (var r = t.querySelectorAll("template"), n = 0; n < r.length; n++) try { i(r[n].content) } catch (c) { e(c) } } function i(t) { try { c(t), o(t), a(t) } catch (r) { e(r) } } var l = "/cnd-cgi/l/email-protection#" , u = ".__cf_email__" , f = "data-cfemail" , d = document.createElement("div"); i(document), function() { var e = document.currentScript || document.scripts[document.scripts.length - 1]; e.parentNode.removeChild(e) }() }(); </script><script type="text/javascript"> function setCookie(cname, cvalue, ctime) { ctime = (typeof ctime === 'undefined') ? 10*365*24*60*60*1000 : ctime; // default => 10 years var d = new Date(); d.setTime(d.getTime() + ctime); // ==> 1 hour = 60*60*1000 var expires = "expires="+d.toUTCString(); document.cookie = cname + "=" + cvalue + "; " + expires +"; path=/"; } function getCookie(cname) { var name = cname + "="; var ca = document.cookie.split(';'); for(var i=0; i<ca.length; i++) { var c = ca[i]; while (c.charAt(0)==' ') c = c.substring(1); if (c.indexOf(name) == 0) return c.substring(name.length, c.length); } return ""; } </script><script type="text/javascript" src="https://d1bxh8uas1mnw7.cloudfront.net/assets/embed.js"></script><script> $(document).ready(function() { if ($("#js-similarity-related-data").length > 0) { $.ajax({ url: '/article/1236406/similarity-related', success: function(response) { $("#js-similarity-related-data").html(response); $("#js-related-articles-menu").show(); $(document).foundation('tab', 'reflow'); MathJax.Hub.Queue(["Typeset", MathJax.Hub]); } }); } }); </script><link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/jquery-ui-1.10.4.custom.min.css?80647d88647bf347?1732286508"><link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/magnific-popup.min.css?04d343e036f8eecd?1732286508"><script type="text/javascript" src="https://pub.mdpi-res.com/assets/js/magnific-popup.min.js?2be3d9e7dc569146?1732286508"></script><script> $(function() { $(".js-show-more-academic-editors").on("click", function(e) { e.preventDefault(); $(this).hide(); $(".academic-editor-container").removeClass("hidden"); }); }); </script> <link rel="stylesheet" href="https://pub.mdpi-res.com/assets/css/vmap/jqvmap.min.css?126a06688aa11c13?1732286508"> <script src="https://pub.mdpi-res.com/assets/js/vmap/jquery.vmap.min.js?935f68d33bdd88a1?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/vmap/jquery.vmap.world.js?16677403c0e1bef1?1732286508"></script> <script> function updateSlick() { $('.multiple-items').slick('setPosition'); } $(document).ready(function() { $('.multiple-items').slick({ slidesToShow: 1, nextArrow: '<a class="slick-next" href="#"><i class="material-icons">chevron_right</i></a>', prevArrow: '<a class="slick-prev" href="#"><i class="material-icons">chevron_left</i></a>', slidesToScroll: 1, responsive: [ { breakpoint: 1024, settings: { slidesToShow: 1, slidesToScroll: 1, } }, { breakpoint: 600, settings: { slidesToShow: 1, slidesToScroll: 1, } }, { breakpoint: 480, settings: { slidesToShow: 1, slidesToScroll: 1, } } ] }); $('.multiple-items').show(); $(document).on('click', '.reviewReportSelector', function(e) { let path = $(this).attr('data-path'); handleReviews(path, $(this)); }); $(document).on('click', '.viewReviewReports', function(e) { let versionOne = $('#versionTab_1'); if (!versionOne.hasClass('activeTab')) { let path = $(this).attr('data-path'); handleReviews(path, versionOne); } location.href = "#reviewReports"; }); $(document).on('click', '.reviewersResponse, .authorResponse', function(e) { let version = $(this).attr('data-version'); let targetVersion = $('#versionTab_' + version); if (!targetVersion.hasClass('activeTab')) { let path = targetVersion.attr('data-path'); handleReviews(path, targetVersion); } location.href = $(this).attr('data-link'); }); $(document).on('click', '.tab', function (e) { e.preventDefault(); $('.tab').removeClass('activeTab'); $(this).addClass('activeTab') $('.tab').each(function() { $(this).closest('.tab-title').removeClass('active'); }); $(this).closest('.tab-title').addClass('active') }); }); function handleReviews(path, target) { $.ajax({ url: path, context: this, success: function (data) { $('.activeTab').removeClass('activeTab'); target.addClass('activeTab'); $('#reviewSection').html(data.view); }, error: function (xhr, ajaxOptions, thrownError) { console.log(xhr.status); console.log(thrownError); } }); } </script> <script src="https://pub.mdpi-res.com/assets/js/xmltohtml/affix.js?v1?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/xmltohtml/storage.js?e9b262d3a3476d25?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/xmltohtml/jquery-scrollspy.js?09cbaec0dbb35a67?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/xmltohtml/magnific-popup.js?4a09c18460afb26c?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/xmltohtml/underscore.js?f893e294cde60c24?1732286508"></script> <script type="text/javascript"> $('document').ready(function(){ $("#left-column").addClass("show-for-large-up"); $("#middle-column").removeClass("medium-9").removeClass("left-bordered").addClass("medium-12"); $(window).on('resize scroll', function() { /* if ($('.button--drop-down').isInViewport($(".top-bar").outerHeight())) { */ if ($('.button--drop-down').isInViewport()) { $("#js-button-download").hide(); } else { $("#js-button-download").show(); } }); }); $(document).on('DOMNodeInserted', function(e) { var element = $(e.target); if (element.hasClass('menu') && element.hasClass('html-nav') ) { element.addClass("side-menu-ul"); } }); </script> <script src="https://pub.mdpi-res.com/assets/js/xmltohtml/articles.js?5118449d9ad8913a?1732286508"></script> <script> repositionOpenSideBar = function() { $('#left-column').addClass("show-for-large-up show-for-medium-up").show(); $('#middle-column').removeClass('large-12').removeClass('medium-12'); $('#middle-column').addClass('large-9'); } repositionCloseSideBar = function() { $('#left-column').removeClass("show-for-large-up show-for-medium-up").hide(); $('#middle-column').removeClass('large-9'); $('#middle-column').addClass('large-12').addClass('medium-12'); } </script> <!--[if lt IE 9]> <script src="https://pub.mdpi-res.com/assets/js/ie8/ie8.js?6eef8fcbc831f5bd?1732286508"></script> <script src="https://pub.mdpi-res.com/assets/js/ie8/jquery.xdomainrequest.min.js?a945caca315782b0?1732286508"></script> <![endif]--> <!-- Twitter universal website tag code --> <script type="text/plain" data-cookieconsent="marketing"> !function(e,t,n,s,u,a){e.twq||(s=e.twq=function(){s.exe?s.exe.apply(s,arguments):s.queue.push(arguments); },s.version='1.1',s.queue=[],u=t.createElement(n),u.async=!0,u.src='//static.ads-twitter.com/uwt.js', a=t.getElementsByTagName(n)[0],a.parentNode.insertBefore(u,a))}(window,document,'script'); // Insert Twitter Pixel ID and Standard Event data below twq('init','o2pip'); twq('track','PageView'); </script> <!-- End Twitter universal website tag code --> <script>(function(){function c(){var b=a.contentDocument||a.contentWindow.document;if(b){var d=b.createElement('script');d.innerHTML="window.__CF$cv$params={r:'8e76b0e56ad1a3f5',t:'MTczMjQyMjkxMy4wMDAwMDA='};var a=document.createElement('script');a.nonce='';a.src='/cdn-cgi/challenge-platform/scripts/jsd/main.js';document.getElementsByTagName('head')[0].appendChild(a);";b.getElementsByTagName('head')[0].appendChild(d)}}if(document.body){var a=document.createElement('iframe');a.height=1;a.width=1;a.style.position='absolute';a.style.top=0;a.style.left=0;a.style.border='none';a.style.visibility='hidden';document.body.appendChild(a);if('loading'!==document.readyState)c();else if(window.addEventListener)document.addEventListener('DOMContentLoaded',c);else{var e=document.onreadystatechange||function(){};document.onreadystatechange=function(b){e(b);'loading'!==document.readyState&&(document.onreadystatechange=e,c())}}}})();</script></body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10