CINXE.COM

Multimodal deep learning models for early detection of Alzheimer’s disease stage | Scientific Reports

<!DOCTYPE html> <html lang="en" class="grade-c"> <head> <title>Multimodal deep learning models for early detection of Alzheimer’s disease stage | Scientific Reports</title> <link rel="alternate" type="application/rss+xml" href="https://www.nature.com/srep.rss"/> <link rel="preconnect" href="https://cmp.nature.com" crossorigin> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="applicable-device" content="pc,mobile"> <meta name="viewport" content="width=device-width,initial-scale=1.0,maximum-scale=5,user-scalable=yes"> <meta name="360-site-verification" content="5a2dc4ab3fcb9b0393241ffbbb490480" /> <script data-test="dataLayer"> window.dataLayer = [{"content":{"category":{"contentType":"article","legacy":{"webtrendsPrimaryArticleType":"research","webtrendsSubjectTerms":"data-integration;data-mining","webtrendsContentCategory":null,"webtrendsContentCollection":"Top 100 in Neuroscience","webtrendsContentGroup":"Scientific Reports","webtrendsContentGroupType":null,"webtrendsContentSubGroup":"Article","status":null}},"article":{"doi":"10.1038/s41598-020-74399-w"},"attributes":{"cms":null,"deliveryPlatform":"oscar","copyright":{"open":true,"legacy":{"webtrendsLicenceType":"http://creativecommons.org/licenses/by/4.0/"}}},"contentInfo":{"authors":["Janani Venugopalan","Li Tong","Hamid Reza Hassanzadeh","May D. Wang"],"publishedAt":1612483200,"publishedAtString":"2021-02-05","title":"Multimodal deep learning models for early detection of Alzheimer’s disease stage","legacy":null,"publishedAtTime":null,"documentType":"aplusplus","subjects":"Data integration,Data mining"},"journal":{"pcode":"srep","title":"scientific reports","volume":"11","issue":"1","id":41598,"publishingModel":"Open Access"},"authorization":{"status":true},"features":[{"name":"furtherReadingSection","present":true}],"collection":{"id":"ehhfgeijba"}},"page":{"category":{"pageType":"article"},"attributes":{"template":"mosaic","featureFlags":[{"name":"nature-onwards-journey","active":false}],"testGroup":null},"search":null},"privacy":{},"version":"1.0.0","product":null,"session":null,"user":null,"backHalfContent":true,"country":"HK","hasBody":true,"uneditedManuscript":false,"twitterId":["o3xnx","o43y9","o3ef7"],"baiduId":"d38bce82bcb44717ccc29a90c4b781ea","japan":false}]; window.dataLayer.push({ ga4MeasurementId: 'G-ERRNTNZ807', ga360TrackingId: 'UA-71668177-1', twitterId: ['3xnx', 'o43y9', 'o3ef7'], baiduId: 'd38bce82bcb44717ccc29a90c4b781ea', ga4ServerUrl: 'https://collect.nature.com', imprint: 'nature' }); </script> <script> (function(w, d) { w.config = w.config || {}; w.config.mustardcut = false; if (w.matchMedia && w.matchMedia('only print, only all and (prefers-color-scheme: no-preference), only all and (prefers-color-scheme: light), only all and (prefers-color-scheme: dark)').matches) { w.config.mustardcut = true; d.classList.add('js'); d.classList.remove('grade-c'); d.classList.remove('no-js'); } })(window, document.documentElement); </script> <style>@media only print, only all and (prefers-color-scheme: no-preference), only all and (prefers-color-scheme: light), only all and (prefers-color-scheme: dark) { .c-card--major .c-card__title,.u-h1,.u-h2,h1,h2{font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif}.c-article-editorial-summary__container .c-article-editorial-summary__article-title,.c-card__title,.c-reading-companion__figure-title,.u-h3,.u-h4,h3,h4,h5,h6{letter-spacing:-.0117156rem}html{text-size-adjust:100%;box-sizing:border-box;font-size:100%;height:100%;line-height:1.15;overflow-y:scroll}body{background:#eee;color:#222;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:1.125rem;line-height:1.76;margin:0;min-height:100%}details,main{display:block}h1{font-size:2em;margin:.67em 0}a,sup{vertical-align:baseline}a{background-color:transparent;color:#069;overflow-wrap:break-word;text-decoration:underline;text-decoration-skip-ink:auto;word-break:break-word}b{font-weight:bolder}sup{font-size:75%;line-height:0;position:relative;top:-.5em}img{border:0;height:auto;max-width:100%;vertical-align:middle}button,input,select{font-family:inherit;font-size:100%;line-height:1.15;margin:0}button,input{overflow:visible}button,select{text-transform:none}[type=submit],button{-webkit-appearance:button}[type=checkbox]{box-sizing:border-box;padding:0}summary{display:list-item}[hidden]{display:none}.c-card--major .c-card__title,.u-h1,.u-h2,button,h1,h2{font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif}button{border-radius:0;cursor:pointer}.c-card--major .c-card__title,.u-h1,.u-h2,h1,h2{font-weight:700}h1{font-size:2rem;letter-spacing:-.0390625rem;line-height:2.25rem}.c-card--major .c-card__title,.u-h2,h2{font-size:1.5rem;letter-spacing:-.0117156rem;line-height:1.6rem}.u-h3{letter-spacing:-.0117156rem}.c-article-editorial-summary__container .c-article-editorial-summary__article-title,.c-card__title,.c-reading-companion__figure-title,.u-h3,.u-h4,h3,h4,h5,h6{font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:1.25rem;font-weight:700;line-height:1.4rem}.c-article-editorial-summary__container .c-article-editorial-summary__article-title,.c-reading-companion__figure-title,.u-h4,h3,h4,h5,h6{letter-spacing:-.0117156rem}.c-reading-companion__figure-title,.u-h4,h4{font-size:1.125rem}button:focus{outline:3px solid #fece3e;will-change:transform}input+label{padding-left:.5em}nav ol,nav ul{list-style:none none}p:empty{display:none}.sans-serif{font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif}.article-page{background:#fff}.c-article-header{font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;margin-bottom:40px}.c-article-identifiers{color:#6f6f6f;display:flex;flex-wrap:wrap;font-size:1rem;line-height:1.3;list-style:none;margin:0 0 8px;padding:0}.c-article-identifiers__item{border-right:1px solid #6f6f6f;list-style:none;margin-right:8px;padding-right:8px}.c-article-identifiers__item:last-child{border-right:0;margin-right:0;padding-right:0}.c-article-title{font-size:1.5rem;line-height:1.25;margin:0 0 16px}@media only screen and (min-width:768px){.c-article-title{font-size:1.875rem;line-height:1.2}}.c-article-author-list{display:inline;font-size:1rem;list-style:none;margin:0 8px 0 0;padding:0;width:100%}.c-article-author-list__item{display:inline;padding-right:0}.c-article-author-list svg{margin-left:4px}.c-article-author-list__show-more{display:none;margin-right:4px}.c-article-author-list__button,.js .c-article-author-list__item--hide,.js .c-article-author-list__show-more{display:none}.js .c-article-author-list--long .c-article-author-list__show-more,.js .c-article-author-list--long+.c-article-author-list__button{display:inline}@media only screen and (max-width:539px){.js .c-article-author-list__item--hide-small-screen{display:none}.js .c-article-author-list--short .c-article-author-list__show-more,.js .c-article-author-list--short+.c-article-author-list__button{display:inline}}#uptodate-client,.js .c-article-author-list--expanded .c-article-author-list__show-more{display:none!important}.js .c-article-author-list--expanded .c-article-author-list__item--hide-small-screen{display:inline!important}.c-article-author-list__button,.c-button-author-list{background:#ebf1f5;border:4px solid #ebf1f5;border-radius:20px;color:#666;font-size:.875rem;line-height:1.4;padding:2px 11px 2px 8px;text-decoration:none}.c-article-author-list__button svg,.c-button-author-list svg{margin:1px 4px 0 0}.c-article-author-list__button:hover,.c-button-author-list:hover{background:#069;border-color:transparent;color:#fff}.c-article-info-details{font-size:1rem;margin-bottom:8px;margin-top:16px}.c-article-info-details__cite-as{border-left:1px solid #6f6f6f;margin-left:8px;padding-left:8px}.c-article-metrics-bar{display:flex;flex-wrap:wrap;font-size:1rem;line-height:1.3}.c-article-metrics-bar__wrapper{margin:16px 0}.c-article-metrics-bar__item{align-items:baseline;border-right:1px solid #6f6f6f;margin-right:8px}.c-article-metrics-bar__item:last-child{border-right:0}.c-article-metrics-bar__count{font-weight:700;margin:0}.c-article-metrics-bar__label{color:#626262;font-style:normal;font-weight:400;margin:0 10px 0 5px}.c-article-metrics-bar__details{margin:0}.c-article-main-column{font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;margin-right:8.6%;width:60.2%}@media only screen and (max-width:1023px){.c-article-main-column{margin-right:0;width:100%}}.c-article-extras{float:left;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;width:31.2%}@media only screen and (max-width:1023px){.c-article-extras{display:none}}.c-article-associated-content__container .c-article-associated-content__title,.c-article-section__title{border-bottom:2px solid #d5d5d5;font-size:1.25rem;margin:0;padding-bottom:8px}@media only screen and (min-width:768px){.c-article-associated-content__container .c-article-associated-content__title,.c-article-section__title{font-size:1.5rem;line-height:1.24}}.c-article-associated-content__container .c-article-associated-content__title{margin-bottom:8px}.c-article-body p{margin-bottom:24px;margin-top:0}.c-article-section{clear:both}.c-article-section__content{margin-bottom:40px;padding-top:8px}@media only screen and (max-width:1023px){.c-article-section__content{padding-left:0}}.c-article-authors-search{margin-bottom:24px;margin-top:0}.c-article-authors-search__item,.c-article-authors-search__title{font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif}.c-article-authors-search__title{color:#626262;font-size:1.05rem;font-weight:700;margin:0;padding:0}.c-article-authors-search__item{font-size:1rem}.c-article-authors-search__text{margin:0}.c-article-license__badge,c-card__section{margin-top:8px}.c-code-block{border:1px solid #eee;font-family:monospace;margin:0 0 24px;padding:20px}.c-code-block__heading{font-weight:400;margin-bottom:16px}.c-code-block__line{display:block;overflow-wrap:break-word;white-space:pre-wrap}.c-article-share-box__no-sharelink-info{font-size:.813rem;font-weight:700;margin-bottom:24px;padding-top:4px}.c-article-share-box__only-read-input{border:1px solid #d5d5d5;box-sizing:content-box;display:inline-block;font-size:.875rem;font-weight:700;height:24px;margin-bottom:8px;padding:8px 10px}.c-article-share-box__button--link-like{background-color:transparent;border:0;color:#069;cursor:pointer;font-size:.875rem;margin-bottom:8px;margin-left:10px}.c-article-editorial-summary__container{font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:1rem}.c-article-editorial-summary__container .c-article-editorial-summary__content p:last-child{margin-bottom:0}.c-article-editorial-summary__container .c-article-editorial-summary__content--less{max-height:9.5rem;overflow:hidden}.c-article-editorial-summary__container .c-article-editorial-summary__button{background-color:#fff;border:0;color:#069;font-size:.875rem;margin-bottom:16px}.c-article-editorial-summary__container .c-article-editorial-summary__button.active,.c-article-editorial-summary__container .c-article-editorial-summary__button.hover,.c-article-editorial-summary__container .c-article-editorial-summary__button:active,.c-article-editorial-summary__container .c-article-editorial-summary__button:hover{text-decoration:underline;text-decoration-skip-ink:auto}.c-article-associated-content__container .c-article-associated-content__collection-label{font-size:.875rem;line-height:1.4}.c-article-associated-content__container .c-article-associated-content__collection-title{line-height:1.3}.c-context-bar{box-shadow:0 0 10px 0 rgba(51,51,51,.2);position:relative;width:100%}.c-context-bar__title{display:none}.c-reading-companion{clear:both;min-height:389px}.c-reading-companion__sticky{max-width:389px}.c-reading-companion__scroll-pane{margin:0;min-height:200px;overflow:hidden auto}.c-reading-companion__tabs{display:flex;flex-flow:row nowrap;font-size:1rem;list-style:none;margin:0 0 8px;padding:0}.c-reading-companion__tabs>li{flex-grow:1}.c-reading-companion__tab{background-color:#eee;border:1px solid #d5d5d5;border-image:initial;border-left-width:0;color:#069;font-size:1rem;padding:8px 8px 8px 15px;text-align:left;width:100%}.c-reading-companion__tabs li:first-child .c-reading-companion__tab{border-left-width:1px}.c-reading-companion__tab--active{background-color:#fff;border-bottom:1px solid #fff;color:#222;font-weight:700}.c-reading-companion__sections-list{list-style:none;padding:0}.c-reading-companion__figures-list,.c-reading-companion__references-list{list-style:none;min-height:389px;padding:0}.c-reading-companion__sections-list{margin:0 0 8px;min-height:50px}.c-reading-companion__section-item{font-size:1rem;padding:0}.c-reading-companion__section-item a{display:block;line-height:1.5;overflow:hidden;padding:8px 0 8px 16px;text-overflow:ellipsis;white-space:nowrap}.c-reading-companion__figure-item{border-top:1px solid #d5d5d5;font-size:1rem;padding:16px 8px 16px 0}.c-reading-companion__figure-item:first-child{border-top:none;padding-top:8px}.c-reading-companion__reference-item{border-top:1px solid #d5d5d5;font-size:1rem;padding:8px 8px 8px 16px}.c-reading-companion__reference-item:first-child{border-top:none}.c-reading-companion__reference-item a{word-break:break-word}.c-reading-companion__reference-citation{display:inline}.c-reading-companion__reference-links{font-size:.813rem;font-weight:700;list-style:none;margin:8px 0 0;padding:0;text-align:right}.c-reading-companion__reference-links>a{display:inline-block;padding-left:8px}.c-reading-companion__reference-links>a:first-child{display:inline-block;padding-left:0}.c-reading-companion__figure-title{display:block;margin:0 0 8px}.c-reading-companion__figure-links{display:flex;justify-content:space-between;margin:8px 0 0}.c-reading-companion__figure-links>a{align-items:center;display:flex}.c-reading-companion__figure-full-link svg{height:.8em;margin-left:2px}.c-reading-companion__panel{border-top:none;display:none;margin-top:0;padding-top:0}.c-cod,.c-reading-companion__panel--active{display:block}.c-cod{font-size:1rem;width:100%}.c-cod__form{background:#ebf0f3}.c-cod__prompt{font-size:1.125rem;line-height:1.3;margin:0 0 24px}.c-cod__label{display:block;margin:0 0 4px}.c-cod__row{display:flex;margin:0 0 16px}.c-cod__row:last-child{margin:0}.c-cod__input{border:1px solid #d5d5d5;border-radius:2px;flex-basis:75%;flex-shrink:0;margin:0;padding:13px}.c-cod__input--submit{background-color:#069;border:1px solid #069;color:#fff;flex-shrink:1;margin-left:8px;transition:background-color .2s ease-out 0s,color .2s ease-out 0s}.c-cod__input--submit-single{flex-basis:100%;flex-shrink:0;margin:0}.c-cod__input--submit:focus,.c-cod__input--submit:hover{background-color:#fff;color:#069}.c-pdf-download__link .u-icon{padding-top:2px}.c-pdf-download{display:flex;margin-bottom:16px;max-height:48px}@media only screen and (min-width:540px){.c-pdf-download{max-height:none}}@media only screen and (min-width:1024px){.c-pdf-download{max-height:48px}}.c-pdf-download__link{display:flex;flex:1 1 0%}.c-pdf-download__link:hover{text-decoration:none}.c-pdf-download__text{padding-right:4px}@media only screen and (max-width:539px){.c-pdf-download__text{text-transform:capitalize}}@media only screen and (min-width:540px){.c-pdf-download__text{padding-right:8px}}.c-context-bar--sticky .c-pdf-download{display:block;margin-bottom:0;white-space:nowrap}@media only screen and (max-width:539px){.c-pdf-download .u-sticky-visually-hidden{clip:rect(0,0,0,0);border:0;height:1px;margin:-100%;overflow:hidden;padding:0;position:absolute!important;width:1px}}.c-pdf-container{display:flex;justify-content:flex-end}@media only screen and (max-width:539px){.c-pdf-container .c-pdf-download{display:flex;flex-basis:100%}}.c-pdf-container .c-pdf-download+.c-pdf-download{margin-left:16px}.c-article-extras .c-pdf-container .c-pdf-download{width:100%}.c-article-extras .c-pdf-container .c-pdf-download+.c-pdf-download{margin-left:0}@media only screen and (min-width:540px){.c-context-bar--sticky .c-pdf-download__link{align-items:center;flex:1 1 183px}}@media only screen and (max-width:320px){.c-context-bar--sticky .c-pdf-download__link{padding:16px}}.article-page--commercial .c-article-main-column .c-pdf-button__container .c-pdf-download{display:none}@media only screen and (max-width:1023px){.article-page--commercial .c-article-main-column .c-pdf-button__container .c-pdf-download{display:block}}.c-status-message--success{border-bottom:2px solid #00b8b0;justify-content:center;margin-bottom:16px;padding-bottom:8px}.c-recommendations-list__item .c-card{flex-basis:100%}.c-recommendations-list__item .c-card__image{align-items:baseline;flex:1 1 40%;margin:0 0 0 16px;max-width:150px}.c-recommendations-list__item .c-card__image img{border:1px solid #cedbe0;height:auto;min-height:0;position:static}@media only screen and (max-width:1023px){.c-recommendations-list__item .c-card__image{display:none}}.c-card__layout{display:flex;flex:1 1 auto;justify-content:space-between}.c-card__title-recommendation{-webkit-box-orient:vertical;-webkit-line-clamp:4;display:-webkit-box;font-size:1rem;font-weight:700;line-height:1.4;margin:0 0 8px;max-height:5.6em;overflow:hidden!important;text-overflow:ellipsis}.c-card__title-recommendation .c-card__link{color:inherit}.c-card__title-recommendation .c-card__link:hover{text-decoration:underline}.c-card__title-recommendation .MathJax_Display{display:inline!important}.c-card__link:not(.c-card__link--no-block-link):before{z-index:1}.c-article-metrics__heading a,.c-article-metrics__posts .c-card__title a,.c-article-recommendations-card__link{color:inherit}.c-recommendations-column-switch .c-meta{margin-top:auto}.c-article-recommendations-card__meta-type,.c-meta .c-meta__item:first-child{font-weight:700}.c-article-body .c-article-recommendations-card__authors{display:none;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:.875rem;line-height:1.5;margin:0 0 8px}@media only screen and (max-width:539px){.c-article-body .c-article-recommendations-card__authors{display:block;margin:0}}.c-article-metrics__posts .c-card__title{font-size:1.05rem}.c-article-metrics__posts .c-card__title+span{color:#6f6f6f;font-size:1rem}p{overflow-wrap:break-word;word-break:break-word}.c-ad{text-align:center}@media only screen and (min-width:320px){.c-ad{padding:8px}}.c-ad--728x90{background-color:#ccc;display:none}.c-ad--728x90 .c-ad__inner{min-height:calc(1.5em + 94px)}@media only screen and (min-width:768px){.js .c-ad--728x90{display:none}}.c-ad__label{color:#333;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:.875rem;font-weight:400;line-height:1.5;margin-bottom:4px}.c-author-list{color:#6f6f6f;font-family:inherit;font-size:1rem;line-height:inherit;list-style:none;margin:0;padding:0}.c-author-list>li,.c-breadcrumbs>li,.c-footer__links>li,.js .c-author-list,.u-list-comma-separated>li,.u-list-inline>li{display:inline}.c-author-list>li:not(:first-child):not(:last-child):before{content:", "}.c-author-list>li:not(:only-child):last-child:before{content:" & "}.c-author-list--compact{font-size:.875rem;line-height:1.4}.c-author-list--truncated>li:not(:only-child):last-child:before{content:" ... "}.js .c-author-list__hide{display:none;visibility:hidden}.js .c-author-list__hide:first-child+*{margin-block-start:0}.c-meta{color:inherit;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:.875rem;line-height:1.4;list-style:none;margin:0;padding:0}.c-meta--large{font-size:1rem}.c-meta--large .c-meta__item{margin-bottom:8px}.c-meta__item{display:inline-block;margin-bottom:4px}.c-meta__item:not(:last-child){border-right:1px solid #d5d5d5;margin-right:4px;padding-right:4px}@media only screen and (max-width:539px){.c-meta__item--block-sm-max{display:block}.c-meta__item--block-sm-max:not(:last-child){border-right:none;margin-right:0;padding-right:0}}@media only screen and (min-width:1024px){.c-meta__item--block-at-lg{display:block}.c-meta__item--block-at-lg:not(:last-child){border-right:none;margin-right:0;padding-right:0}}.c-meta__type{font-weight:700;text-transform:none}.c-skip-link{background:#069;bottom:auto;color:#fff;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:.875rem;padding:8px;position:absolute;text-align:center;transform:translateY(-100%);z-index:9999}@media (prefers-reduced-motion:reduce){.c-skip-link{transition:top .3s ease-in-out 0s}}@media print{.c-skip-link{display:none}}.c-skip-link:link{color:#fff}.c-status-message{align-items:center;box-sizing:border-box;display:flex;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:1rem;position:relative;width:100%}.c-card__summary>p:last-child,.c-status-message :last-child{margin-bottom:0}.c-status-message--boxed{background-color:#fff;border:1px solid #eee;border-radius:2px;line-height:1.4;padding:16px}.c-status-message__heading{font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:1rem;font-weight:700}.c-status-message__icon{fill:currentcolor;display:inline-block;flex:0 0 auto;height:1.5em;margin-right:8px;transform:translate(0);vertical-align:text-top;width:1.5em}.c-status-message__icon--top{align-self:flex-start}.c-status-message--info .c-status-message__icon{color:#003f8d}.c-status-message--boxed.c-status-message--info{border-bottom:4px solid #003f8d}.c-status-message--error .c-status-message__icon{color:#c40606}.c-status-message--boxed.c-status-message--error{border-bottom:4px solid #c40606}.c-status-message--success .c-status-message__icon{color:#00b8b0}.c-status-message--boxed.c-status-message--success{border-bottom:4px solid #00b8b0}.c-status-message--warning .c-status-message__icon{color:#edbc53}.c-status-message--boxed.c-status-message--warning{border-bottom:4px solid #edbc53}.c-breadcrumbs{color:#000;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:1rem;list-style:none;margin:0;padding:0}.c-breadcrumbs__link{color:#666}svg.c-breadcrumbs__chevron{fill:#888;height:10px;margin:4px 4px 0;width:10px}@media only screen and (max-width:539px){.c-breadcrumbs .c-breadcrumbs__item{display:none}.c-breadcrumbs .c-breadcrumbs__item:last-child,.c-breadcrumbs .c-breadcrumbs__item:nth-last-child(2){display:inline}}.c-card{background-color:transparent;border:0;box-shadow:none;display:flex;flex-direction:column;font-size:14px;min-width:0;overflow:hidden;padding:0;position:relative}.c-card--no-shape{background:0 0;border:0;box-shadow:none}.c-card__image{display:flex;justify-content:center;overflow:hidden;padding-bottom:56.25%;position:relative}@supports (aspect-ratio:1/1){.c-card__image{padding-bottom:0}}.c-card__image img{left:0;min-height:100%;min-width:100%;position:absolute}@supports ((-o-object-fit:cover) or (object-fit:cover)){.c-card__image img{height:100%;object-fit:cover;width:100%}}.c-card__body{flex:1 1 auto;padding:16px}.c-card--no-shape .c-card__body{padding:0}.c-card--no-shape .c-card__body:not(:first-child){padding-top:16px}.c-card__title{letter-spacing:-.01875rem;margin-bottom:8px;margin-top:0}[lang=de] .c-card__title{hyphens:auto}.c-card__summary{line-height:1.4}.c-card__summary>p{margin-bottom:5px}.c-card__summary a{text-decoration:underline}.c-card__link:not(.c-card__link--no-block-link):before{bottom:0;content:"";left:0;position:absolute;right:0;top:0}.c-card--flush .c-card__body{padding:0}.c-card--major{font-size:1rem}.c-card--dark{background-color:#29303c;border-width:0;color:#e3e4e5}.c-card--dark .c-card__title{color:#fff}.c-card--dark .c-card__link,.c-card--dark .c-card__summary a{color:inherit}.c-header{background-color:#fff;border-bottom:5px solid #000;font-size:1rem;line-height:1.4;margin-bottom:16px}.c-header__row{padding:0;position:relative}.c-header__row:not(:last-child){border-bottom:1px solid #eee}.c-header__split{align-items:center;display:flex;justify-content:space-between}.c-header__logo-container{flex:1 1 0px;line-height:0;margin:8px 24px 8px 0}.c-header__logo{transform:translateZ(0)}.c-header__logo img{max-height:32px}.c-header__container{margin:0 auto;max-width:1280px}.c-header__menu{align-items:center;display:flex;flex:0 1 auto;flex-wrap:wrap;font-weight:700;gap:8px 8px;line-height:1.4;list-style:none;margin:0 -8px;padding:0}@media print{.c-header__menu{display:none}}@media only screen and (max-width:1023px){.c-header__menu--hide-lg-max{display:none;visibility:hidden}}.c-header__menu--global{font-weight:400;justify-content:flex-end}.c-header__menu--global svg{display:none;visibility:hidden}.c-header__menu--global svg:first-child+*{margin-block-start:0}@media only screen and (min-width:540px){.c-header__menu--global svg{display:block;visibility:visible}}.c-header__menu--journal{font-size:.875rem;margin:8px 0 8px -8px}@media only screen and (min-width:540px){.c-header__menu--journal{flex-wrap:nowrap;font-size:1rem}}.c-header__item{padding-bottom:0;padding-top:0;position:static}.c-header__item--pipe{border-left:2px solid #eee;padding-left:8px}.c-header__item--padding{padding-bottom:8px;padding-top:8px}@media only screen and (min-width:540px){.c-header__item--dropdown-menu{position:relative}}@media only screen and (min-width:1024px){.c-header__item--hide-lg{display:none;visibility:hidden}}@media only screen and (max-width:767px){.c-header__item--hide-md-max{display:none;visibility:hidden}.c-header__item--hide-md-max:first-child+*{margin-block-start:0}}.c-header__link{align-items:center;color:inherit;display:inline-flex;gap:4px 4px;padding:8px;white-space:nowrap}.c-header__link svg{transition-duration:.2s}.c-header__show-text{display:none;visibility:hidden}.has-tethered .c-header__heading--js-hide:first-child+*{margin-block-start:0}@media only screen and (min-width:540px){.c-header__show-text{display:inline;visibility:visible}}.c-header__dropdown{background-color:#000;border-bottom:1px solid #2f2f2f;color:#eee;font-size:.875rem;line-height:1.2;padding:16px 0}@media print{.c-header__dropdown{display:none}}.c-header__heading{display:inline-block;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:1.25rem;font-weight:400;line-height:1.4;margin-bottom:8px}.c-header__heading--keyline{border-top:1px solid;border-color:#2f2f2f;margin-top:16px;padding-top:16px;width:100%}.c-header__list{display:flex;flex-wrap:wrap;gap:0 16px;list-style:none;margin:0 -8px}.c-header__flush{margin:0 -8px}.c-header__visually-hidden{clip:rect(0,0,0,0);border:0;height:1px;margin:-100%;overflow:hidden;padding:0;position:absolute!important;width:1px}.c-header__search-form{margin-bottom:8px}.c-header__search-layout{display:flex;flex-wrap:wrap;gap:16px 16px}.c-header__search-layout>:first-child{flex:999 1 auto}.c-header__search-layout>*{flex:1 1 auto}.c-header__search-layout--max-width{max-width:720px}.c-header__search-button{align-items:center;background-color:transparent;background-image:none;border:1px solid #fff;border-radius:2px;color:#fff;cursor:pointer;display:flex;font-family:sans-serif;font-size:1rem;justify-content:center;line-height:1.15;margin:0;padding:8px 16px;position:relative;text-decoration:none;transition:all .25s ease 0s,color .25s ease 0s,border-color .25s ease 0s;width:100%}.u-button svg,.u-button--primary svg{fill:currentcolor}.c-header__input,.c-header__select{border:1px solid;border-radius:3px;box-sizing:border-box;font-size:1rem;padding:8px 16px;width:100%}.c-header__select{-webkit-appearance:none;background-image:url("data:image/svg+xml,%3Csvg height='16' viewBox='0 0 16 16' width='16' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='m5.58578644 3-3.29289322-3.29289322c-.39052429-.39052429-.39052429-1.02368927 0-1.41421356s1.02368927-.39052429 1.41421356 0l4 4c.39052429.39052429.39052429 1.02368927 0 1.41421356l-4 4c-.39052429.39052429-1.02368927.39052429-1.41421356 0s-.39052429-1.02368927 0-1.41421356z' fill='%23333' fill-rule='evenodd' transform='matrix(0 1 -1 0 11 3)'/%3E%3C/svg%3E");background-position:right .7em top 50%;background-repeat:no-repeat;background-size:1em;box-shadow:0 1px 0 1px rgba(0,0,0,.04);display:block;margin:0;max-width:100%;min-width:150px}@media only screen and (min-width:540px){.c-header__menu--journal .c-header__item--dropdown-menu:last-child .c-header__dropdown.has-tethered{left:auto;right:0}}@media only screen and (min-width:768px){.c-header__menu--journal .c-header__item--dropdown-menu:last-child .c-header__dropdown.has-tethered{left:0;right:auto}}.c-header__dropdown.has-tethered{border-bottom:0;border-radius:0 0 2px 2px;left:0;position:absolute;top:100%;transform:translateY(5px);width:100%;z-index:1}@media only screen and (min-width:540px){.c-header__dropdown.has-tethered{transform:translateY(8px);width:auto}}@media only screen and (min-width:768px){.c-header__dropdown.has-tethered{min-width:225px}}.c-header__dropdown--full-width.has-tethered{padding:32px 0 24px;transform:none;width:100%}.has-tethered .c-header__heading--js-hide{display:none;visibility:hidden}.has-tethered .c-header__list--js-stack{flex-direction:column}.has-tethered .c-header__item--keyline,.has-tethered .c-header__list~.c-header__list .c-header__item:first-child{border-top:1px solid #d5d5d5;margin-top:8px;padding-top:8px}.c-header__item--snid-account-widget{display:flex}.c-header__container{padding:0 4px}.c-header__list{padding:0 12px}.c-header__menu .c-header__link{font-size:14px}.c-header__item--snid-account-widget .c-header__link{padding:8px}.c-header__menu--journal{margin-left:0}@media only screen and (min-width:540px){.c-header__container{padding:0 16px}.c-header__menu--journal{margin-left:-8px}.c-header__menu .c-header__link{font-size:16px}.c-header__link--search{gap:13px 13px}}.u-button{align-items:center;background-color:transparent;background-image:none;border:1px solid #069;border-radius:2px;color:#069;cursor:pointer;display:inline-flex;font-family:sans-serif;font-size:1rem;justify-content:center;line-height:1.3;margin:0;padding:8px;position:relative;text-decoration:none;transition:all .25s ease 0s,color .25s ease 0s,border-color .25s ease 0s;width:auto}.u-button--primary{background-color:#069;background-image:none;border:1px solid #069;color:#fff}.u-button--full-width{display:flex;width:100%}.u-display-none{display:none}.js .u-js-hide,.u-hide{display:none;visibility:hidden}.u-hide:first-child+*{margin-block-start:0}.u-visually-hidden{clip:rect(0,0,0,0);border:0;height:1px;margin:-100%;overflow:hidden;padding:0;position:absolute!important;width:1px}@media print{.u-hide-print{display:none}}@media only screen and (min-width:1024px){.u-hide-at-lg{display:none;visibility:hidden}.u-hide-at-lg:first-child+*{margin-block-start:0}}.u-clearfix:after,.u-clearfix:before{content:"";display:table}.u-clearfix:after{clear:both}.u-color-open-access{color:#b74616}.u-float-left{float:left}.u-icon{fill:currentcolor;display:inline-block;height:1em;transform:translate(0);vertical-align:text-top;width:1em}.u-full-height{height:100%}.u-link-inherit{color:inherit}.u-list-reset{list-style:none;margin:0;padding:0}.u-sans-serif{font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif}.u-text-bold{font-weight:700}.u-container{margin:0 auto;max-width:1280px;padding:0 16px}.u-justify-content-space-between{justify-content:space-between}.u-mt-32{margin-top:32px}.u-mb-8{margin-bottom:8px}.u-mb-16{margin-bottom:16px}.u-mb-24{margin-bottom:24px}.u-mb-32{margin-bottom:32px}.c-nature-box svg+.c-article__button-text,.u-ml-8{margin-left:8px}.u-pa-16{padding:16px}html *,html :after,html :before{box-sizing:inherit}.c-article-section__title,.c-article-title{font-weight:700}.c-card__title{line-height:1.4em}.c-article__button{background-color:#069;border:1px solid #069;border-radius:2px;color:#fff;display:flex;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen-Sans,Ubuntu,Cantarell,Helvetica Neue,sans-serif;font-size:.875rem;line-height:1.4;margin-bottom:16px;padding:13px;transition:background-color .2s ease-out 0s,color .2s ease-out 0s}.c-article__button,.c-article__button:hover{text-decoration:none}.c-article__button--inverted,.c-article__button:hover{background-color:#fff;color:#069}.c-article__button--inverted:hover{background-color:#069;color:#fff}.c-header__link{text-decoration:inherit}.grade-c-hide{display:block}.u-lazy-ad-wrapper{background-color:#ccc;display:none;min-height:137px}@media only screen and (min-width:768px){.u-lazy-ad-wrapper{display:block}}.c-nature-box{background-color:#fff;border:1px solid #d5d5d5;border-radius:2px;box-shadow:0 0 5px 0 rgba(51,51,51,.1);line-height:1.3;margin-bottom:24px;padding:16px 16px 3px}.c-nature-box__text{font-size:1rem;margin-bottom:16px}.c-nature-box .c-pdf-download{margin-bottom:16px!important}.c-nature-box--version{background-color:#eee}.c-nature-box__wrapper{transform:translateZ(0)}.c-nature-box__wrapper--placeholder{min-height:165px}.c-pdf-download__link{padding:13px 24px} } </style> <link data-test="critical-css-handler" data-inline-css-source="critical-css" rel="stylesheet" href="/static/css/enhanced-article-912e265451.css" media="print" onload="this.media='only print, only all and (prefers-color-scheme: no-preference), only all and (prefers-color-scheme: light), only all and (prefers-color-scheme: dark)';this.onload=null"> <noscript> <link rel="stylesheet" type="text/css" href="/static/css/enhanced-article-912e265451.css" media="only print, only all and (prefers-color-scheme: no-preference), only all and (prefers-color-scheme: light), only all and (prefers-color-scheme: dark)"> </noscript> <link rel="stylesheet" type="text/css" href="/static/css/article-print-122346e276.css" media="print"> <link rel="apple-touch-icon" sizes="180x180" href=/static/images/favicons/nature/apple-touch-icon-f39cb19454.png> <link rel="icon" type="image/png" sizes="48x48" href=/static/images/favicons/nature/favicon-48x48-b52890008c.png> <link rel="icon" type="image/png" sizes="32x32" href=/static/images/favicons/nature/favicon-32x32-3fe59ece92.png> <link rel="icon" type="image/png" sizes="16x16" href=/static/images/favicons/nature/favicon-16x16-951651ab72.png> <link rel="manifest" href=/static/manifest.json crossorigin="use-credentials"> <link rel="mask-icon" href=/static/images/favicons/nature/safari-pinned-tab-69bff48fe6.svg color="#000000"> <link rel="shortcut icon" href=/static/images/favicons/nature/favicon.ico> <meta name="msapplication-TileColor" content="#000000"> <meta name="msapplication-config" content=/static/browserconfig.xml> <meta name="theme-color" content="#000000"> <meta name="application-name" content="Nature"> <script> (function () { if ( typeof window.CustomEvent === "function" ) return false; function CustomEvent ( event, params ) { params = params || { bubbles: false, cancelable: false, detail: null }; var evt = document.createEvent( 'CustomEvent' ); evt.initCustomEvent( event, params.bubbles, params.cancelable, params.detail ); return evt; } CustomEvent.prototype = window.Event.prototype; window.CustomEvent = CustomEvent; })(); </script> <!-- Google Tag Manager --> <script data-test="gtm-head"> window.initGTM = function() { if (window.config.mustardcut) { (function (w, d, s, l, i) { w[l] = w[l] || []; w[l].push({'gtm.start': new Date().getTime(), event: 'gtm.js'}); var f = d.getElementsByTagName(s)[0], j = d.createElement(s), dl = l != 'dataLayer' ? '&l=' + l : ''; j.async = true; j.src = 'https://www.googletagmanager.com/gtm.js?id=' + i + dl; f.parentNode.insertBefore(j, f); })(window, document, 'script', 'dataLayer', 'GTM-MRVXSHQ'); } } </script> <!-- End Google Tag Manager --> <script> (function(w,d,t) { function cc() { var h = w.location.hostname; if (h.indexOf('preview-www.nature.com') > -1) return; var e = d.createElement(t), s = d.getElementsByTagName(t)[0]; if (h.indexOf('nature.com') > -1) { if (h.indexOf('test-www.nature.com') > -1) { e.src = 'https://cmp.nature.com/production_live/en/consent-bundle-8-68.js'; e.setAttribute('onload', "initGTM(window,document,'script','dataLayer','GTM-MRVXSHQ')"); } else { e.src = 'https://cmp.nature.com/production_live/en/consent-bundle-8-68.js'; e.setAttribute('onload', "initGTM(window,document,'script','dataLayer','GTM-MRVXSHQ')"); } } else { e.src = '/static/js/cookie-consent-es5-bundle-cb57c2c98a.js'; e.setAttribute('data-consent', h); } s.insertAdjacentElement('afterend', e); } cc(); })(window,document,'script'); </script> <script id="js-position0"> (function(w, d) { w.idpVerifyPrefix = 'https://verify.nature.com'; w.ra21Host = 'https://wayf.springernature.com'; var moduleSupport = (function() { return 'noModule' in d.createElement('script'); })(); if (w.config.mustardcut === true) { w.loader = { index: 0, registered: [], scripts: [ {src: '/static/js/global-article-es6-bundle-c8a573ca90.js', test: 'global-article-js', module: true}, {src: '/static/js/global-article-es5-bundle-d17603b9e9.js', test: 'global-article-js', nomodule: true}, {src: '/static/js/shared-es6-bundle-606cb67187.js', test: 'shared-js', module: true}, {src: '/static/js/shared-es5-bundle-e919764a53.js', test: 'shared-js', nomodule: true}, {src: '/static/js/header-150-es6-bundle-5bb959eaa1.js', test: 'header-150-js', module: true}, {src: '/static/js/header-150-es5-bundle-994fde5b1d.js', test: 'header-150-js', nomodule: true} ].filter(function (s) { if (s.src === null) return false; if (moduleSupport && s.nomodule) return false; return !(!moduleSupport && s.module); }), register: function (value) { this.registered.push(value); }, ready: function () { if (this.registered.length === this.scripts.length) { this.registered.forEach(function (fn) { if (typeof fn === 'function') { setTimeout(fn, 0); } }); this.ready = function () {}; } }, insert: function (s) { var t = d.getElementById('js-position' + this.index); if (t && t.insertAdjacentElement) { t.insertAdjacentElement('afterend', s); } else { d.head.appendChild(s); } ++this.index; }, createScript: function (script, beforeLoad) { var s = d.createElement('script'); s.id = 'js-position' + (this.index + 1); s.setAttribute('data-test', script.test); if (beforeLoad) { s.defer = 'defer'; s.onload = function () { if (script.noinit) { loader.register(true); } if (d.readyState === 'interactive' || d.readyState === 'complete') { loader.ready(); } }; } else { s.async = 'async'; } s.src = script.src; return s; }, init: function () { this.scripts.forEach(function (s) { loader.insert(loader.createScript(s, true)); }); d.addEventListener('DOMContentLoaded', function () { loader.ready(); var conditionalScripts; conditionalScripts = [ {match: 'div[data-pan-container]', src: '/static/js/pan-zoom-es6-bundle-464a2af269.js', test: 'pan-zoom-js', module: true }, {match: 'div[data-pan-container]', src: '/static/js/pan-zoom-es5-bundle-98fb9b653b.js', test: 'pan-zoom-js', nomodule: true }, {match: 'math,span.mathjax-tex', src: '/static/js/math-es6-bundle-23597ae350.js', test: 'math-js', module: true}, {match: 'math,span.mathjax-tex', src: '/static/js/math-es5-bundle-6532c6f78b.js', test: 'math-js', nomodule: true} ]; if (conditionalScripts) { conditionalScripts.filter(function (script) { return !!document.querySelector(script.match) && !((moduleSupport && script.nomodule) || (!moduleSupport && script.module)); }).forEach(function (script) { loader.insert(loader.createScript(script)); }); } }, false); } }; loader.init(); } })(window, document); </script> <meta name="robots" content="noarchive"> <meta name="access" content="Yes"> <link rel="search" href="https://www.nature.com/search"> <link rel="search" href="https://www.nature.com/opensearch/opensearch.xml" type="application/opensearchdescription+xml" title="nature.com"> <link rel="search" href="https://www.nature.com/opensearch/request" type="application/sru+xml" title="nature.com"> <script type="application/ld+json">{"mainEntity":{"headline":"Multimodal deep learning models for early detection of Alzheimer’s disease stage","description":"Most current Alzheimer’s disease (AD) and mild cognitive disorders (MCI) studies use single data modality to make predictions such as AD stages. The fusion of multiple data modalities can provide a holistic view of AD staging analysis. Thus, we use deep learning (DL) to integrally analyze imaging (magnetic resonance imaging (MRI)), genetic (single nucleotide polymorphisms (SNPs)), and clinical test data to classify patients into AD, MCI, and controls (CN). We use stacked denoising auto-encoders to extract features from clinical and genetic data, and use 3D-convolutional neural networks (CNNs) for imaging data. We also develop a novel data interpretation method to identify top-performing features learned by the deep-models with clustering and perturbation analysis. Using Alzheimer’s disease neuroimaging initiative (ADNI) dataset, we demonstrate that deep models outperform shallow models, including support vector machines, decision trees, random forests, and k-nearest neighbors. In addition, we demonstrate that integrating multi-modality data outperforms single modality models in terms of accuracy, precision, recall, and meanF1 scores. Our models have identified hippocampus, amygdala brain areas, and the Rey Auditory Verbal Learning Test (RAVLT) as top distinguished features, which are consistent with the known AD literature.","datePublished":"2021-02-05T00:00:00Z","dateModified":"2021-02-05T00:00:00Z","pageStart":"1","pageEnd":"13","license":"http://creativecommons.org/licenses/by/4.0/","sameAs":"https://doi.org/10.1038/s41598-020-74399-w","keywords":["Data integration","Data mining","Science","Humanities and Social Sciences","multidisciplinary"],"image":["https://media.springernature.com/lw1200/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig1_HTML.png","https://media.springernature.com/lw1200/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig2_HTML.png","https://media.springernature.com/lw1200/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig3_HTML.png","https://media.springernature.com/lw1200/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig4_HTML.png","https://media.springernature.com/lw1200/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig5_HTML.png","https://media.springernature.com/lw1200/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig6_HTML.png","https://media.springernature.com/lw1200/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig7_HTML.png"],"isPartOf":{"name":"Scientific Reports","issn":["2045-2322"],"volumeNumber":"11","@type":["Periodical","PublicationVolume"]},"publisher":{"name":"Nature Publishing Group UK","logo":{"url":"https://www.springernature.com/app-sn/public/images/logo-springernature.png","@type":"ImageObject"},"@type":"Organization"},"author":[{"name":"Janani Venugopalan","url":"http://orcid.org/0000-0002-9049-6024","affiliation":[{"name":"Georgia Institute of Technology and Emory University","address":{"name":"Department of Biomedical Engineering, Georgia Institute of Technology and Emory University, Atlanta, USA","@type":"PostalAddress"},"@type":"Organization"}],"@type":"Person"},{"name":"Li Tong","affiliation":[{"name":"Georgia Institute of Technology and Emory University","address":{"name":"Department of Biomedical Engineering, Georgia Institute of Technology and Emory University, Atlanta, USA","@type":"PostalAddress"},"@type":"Organization"}],"@type":"Person"},{"name":"Hamid Reza Hassanzadeh","affiliation":[{"name":"Georgia Institute of Technology","address":{"name":"School of Computational Science and Engineering, Georgia Institute of Technology, Atlanta, USA","@type":"PostalAddress"},"@type":"Organization"}],"@type":"Person"},{"name":"May D. Wang","affiliation":[{"name":"Georgia Institute of Technology and Emory University","address":{"name":"Department of Biomedical Engineering, Georgia Institute of Technology and Emory University, Atlanta, USA","@type":"PostalAddress"},"@type":"Organization"},{"name":"Georgia Institute of Technology","address":{"name":"School of Electrical and Computer Engineering, Georgia Institute of Technology, Atlanta, USA","@type":"PostalAddress"},"@type":"Organization"},{"name":"Georgia Institute of Technology and Emory University","address":{"name":"Winship Cancer Institute, Parker H. Petit Institute for Bioengineering and Biosciences, Institute of People and Technology, Georgia Institute of Technology and Emory University, Atlanta, USA","@type":"PostalAddress"},"@type":"Organization"}],"email":"maywang@gatech.edu","@type":"Person"}],"isAccessibleForFree":true,"@type":"ScholarlyArticle"},"@context":"https://schema.org","@type":"WebPage"}</script> <link rel="canonical" href="https://www.nature.com/articles/s41598-020-74399-w"> <meta name="journal_id" content="41598"/> <meta name="dc.title" content="Multimodal deep learning models for early detection of Alzheimer’s disease stage"/> <meta name="dc.source" content="Scientific Reports 2021 11:1"/> <meta name="dc.format" content="text/html"/> <meta name="dc.publisher" content="Nature Publishing Group"/> <meta name="dc.date" content="2021-02-05"/> <meta name="dc.type" content="OriginalPaper"/> <meta name="dc.language" content="En"/> <meta name="dc.copyright" content="2021 The Author(s)"/> <meta name="dc.rights" content="2021 The Author(s)"/> <meta name="dc.rightsAgent" content="journalpermissions@springernature.com"/> <meta name="dc.description" content="Most current Alzheimer&#8217;s disease (AD) and mild cognitive disorders (MCI) studies use single data modality to make predictions such as AD stages. The fusion of multiple data modalities can provide a holistic view of AD staging analysis. Thus, we use deep learning&amp;nbsp;(DL) to integrally analyze imaging (magnetic resonance imaging&amp;nbsp;(MRI)), genetic (single nucleotide polymorphisms (SNPs)), and clinical test data to classify patients into AD, MCI, and controls&amp;nbsp;(CN). We use stacked denoising auto-encoders to extract features from clinical and genetic data, and use 3D-convolutional neural networks (CNNs) for imaging data.&amp;nbsp;We also develop a novel data interpretation method to identify top-performing features learned by the deep-models with clustering and perturbation analysis. Using Alzheimer&#8217;s disease neuroimaging initiative (ADNI) dataset, we demonstrate that deep models outperform shallow models, including support vector machines, decision trees, random forests, and k-nearest neighbors. In addition, we demonstrate that integrating multi-modality data outperforms single modality models in terms of accuracy, precision, recall, and meanF1 scores. Our models have identified hippocampus, amygdala brain areas, and the&amp;nbsp;Rey Auditory Verbal Learning Test (RAVLT) as top distinguished features, which are consistent with the known AD literature."/> <meta name="prism.issn" content="2045-2322"/> <meta name="prism.publicationName" content="Scientific Reports"/> <meta name="prism.publicationDate" content="2021-02-05"/> <meta name="prism.volume" content="11"/> <meta name="prism.number" content="1"/> <meta name="prism.section" content="OriginalPaper"/> <meta name="prism.startingPage" content="1"/> <meta name="prism.endingPage" content="13"/> <meta name="prism.copyright" content="2021 The Author(s)"/> <meta name="prism.rightsAgent" content="journalpermissions@springernature.com"/> <meta name="prism.url" content="https://www.nature.com/articles/s41598-020-74399-w"/> <meta name="prism.doi" content="doi:10.1038/s41598-020-74399-w"/> <meta name="citation_pdf_url" content="https://www.nature.com/articles/s41598-020-74399-w.pdf"/> <meta name="citation_fulltext_html_url" content="https://www.nature.com/articles/s41598-020-74399-w"/> <meta name="citation_journal_title" content="Scientific Reports"/> <meta name="citation_journal_abbrev" content="Sci Rep"/> <meta name="citation_publisher" content="Nature Publishing Group"/> <meta name="citation_issn" content="2045-2322"/> <meta name="citation_title" content="Multimodal deep learning models for early detection of Alzheimer’s disease stage"/> <meta name="citation_volume" content="11"/> <meta name="citation_issue" content="1"/> <meta name="citation_online_date" content="2021/02/05"/> <meta name="citation_firstpage" content="1"/> <meta name="citation_lastpage" content="13"/> <meta name="citation_article_type" content="Article"/> <meta name="citation_fulltext_world_readable" content=""/> <meta name="citation_language" content="en"/> <meta name="dc.identifier" content="doi:10.1038/s41598-020-74399-w"/> <meta name="DOI" content="10.1038/s41598-020-74399-w"/> <meta name="size" content="173800"/> <meta name="citation_doi" content="10.1038/s41598-020-74399-w"/> <meta name="citation_springer_api_url" content="http://api.springer.com/xmldata/jats?q=doi:10.1038/s41598-020-74399-w&amp;api_key="/> <meta name="description" content="Most current Alzheimer&#8217;s disease (AD) and mild cognitive disorders (MCI) studies use single data modality to make predictions such as AD stages. The fusion of multiple data modalities can provide a holistic view of AD staging analysis. Thus, we use deep learning&amp;nbsp;(DL) to integrally analyze imaging (magnetic resonance imaging&amp;nbsp;(MRI)), genetic (single nucleotide polymorphisms (SNPs)), and clinical test data to classify patients into AD, MCI, and controls&amp;nbsp;(CN). We use stacked denoising auto-encoders to extract features from clinical and genetic data, and use 3D-convolutional neural networks (CNNs) for imaging data.&amp;nbsp;We also develop a novel data interpretation method to identify top-performing features learned by the deep-models with clustering and perturbation analysis. Using Alzheimer&#8217;s disease neuroimaging initiative (ADNI) dataset, we demonstrate that deep models outperform shallow models, including support vector machines, decision trees, random forests, and k-nearest neighbors. In addition, we demonstrate that integrating multi-modality data outperforms single modality models in terms of accuracy, precision, recall, and meanF1 scores. Our models have identified hippocampus, amygdala brain areas, and the&amp;nbsp;Rey Auditory Verbal Learning Test (RAVLT) as top distinguished features, which are consistent with the known AD literature."/> <meta name="dc.creator" content="Venugopalan, Janani"/> <meta name="dc.creator" content="Tong, Li"/> <meta name="dc.creator" content="Hassanzadeh, Hamid Reza"/> <meta name="dc.creator" content="Wang, May D."/> <meta name="dc.subject" content="Data integration"/> <meta name="dc.subject" content="Data mining"/> <meta name="citation_reference" content="citation_journal_title=JAMA; citation_title=Development and validation of a deep learning algorithm for detection of diabetic retinopathy in retinal fundus photographs; citation_author=V Gulshan; citation_volume=316; citation_publication_date=2016; citation_pages=2402-2410; citation_doi=10.1001/jama.2016.17216; citation_id=CR1"/> <meta name="citation_reference" content="citation_journal_title=JAMA; citation_title=Development and validation of a deep learning system for diabetic retinopathy and related eye diseases using retinal images from multiethnic populations with diabetes; citation_author=DSW Ting; citation_volume=318; citation_publication_date=2017; citation_pages=2211-2223; citation_doi=10.1001/jama.2017.18152; citation_id=CR2"/> <meta name="citation_reference" content="citation_journal_title=Nature; citation_title=Dermatologist-level classification of skin cancer with deep neural networks; citation_author=A Esteva; citation_volume=542; citation_publication_date=2017; citation_pages=115; citation_doi=10.1038/nature21056; citation_id=CR3"/> <meta name="citation_reference" content="citation_journal_title=J. Biomed. Opt.; citation_title=Combining deep learning and coherent anti-Stokes Raman scattering imaging for automated differential diagnosis of lung cancer; citation_author=S Weng, X Xu, J Li, ST Wong; citation_volume=22; citation_publication_date=2017; citation_pages=106017; citation_doi=10.1117/1.JBO.22.10.106017; citation_id=CR4"/> <meta name="citation_reference" content="citation_title=Medical Image Computing and Computer-Assisted Intervention&#8211;MICCAI 2013; citation_publication_date=2013; citation_id=CR5; citation_author=H-I Suk; citation_author=D Shen; citation_publisher=Springer"/> <meta name="citation_reference" content="citation_journal_title=Biomed. Eng. IEEE Trans.; citation_title=Multimodal neuroimaging feature learning for multiclass diagnosis of Alzheimer&#39;s disease; citation_author=S Liu; citation_volume=62; citation_publication_date=2015; citation_pages=1132-1140; citation_doi=10.1109/TBME.2014.2372011; citation_id=CR6"/> <meta name="citation_reference" content="citation_journal_title=Brain Struct. Funct.; citation_title=Deep sparse multi-task learning for feature selection in Alzheimer&#8217;s disease diagnosis; citation_author=HI Suk, SW Lee, D Shen; citation_volume=221; citation_issue=5; citation_publication_date=2016; citation_pages=2569-2587; citation_doi=10.1007/s00429-015-1059-y; citation_id=CR7"/> <meta name="citation_reference" content="Schulam, P., Wigley, F. &amp; Saria, S. In AAAI, 2956&#8211;2964 (2015)."/> <meta name="citation_reference" content="Suk, H.-I. &amp; Shen, D. In International Conference on Medical Image Computing and Computer-Assisted Intervention, 583&#8211;590 (Springer, New York, 2013)."/> <meta name="citation_reference" content="Choi, E., Bahadori, M.T. &amp; Sun, J. Doctor ai: Predicting clinical events via recurrent neural networks. arXiv preprint arXiv:1511.05942 (2015)."/> <meta name="citation_reference" content="citation_journal_title=Nat. Methods; citation_title=Predicting effects of noncoding variants with deep learning-based sequence model; citation_author=J Zhou, OG Troyanskaya; citation_volume=12; citation_publication_date=2015; citation_pages=931-934; citation_doi=10.1038/nmeth.3547; citation_id=CR11"/> <meta name="citation_reference" content="Ngiam, J., Khosla, A., Kim, M., Nam, J., Lee, H. &amp; Ng, A. Y. Multimodal deep learning. In Proceedings of the 28th International Conference on Machine Learning (ICML-11) 689&#8211;696 (2011)."/> <meta name="citation_reference" content="citation_journal_title=Alzheimer&#8217;s Dement.; citation_title=2016 Alzheimer&#8217;s disease facts and figures; citation_author=; citation_volume=12; citation_issue=4; citation_publication_date=2016; citation_pages=459-509; citation_doi=10.1016/j.jalz.2016.03.001; citation_id=CR13"/> <meta name="citation_reference" content="citation_journal_title=Alzheimer&#8217;s Dement.; citation_title=2013 Alzheimer&#8217;s disease facts and figures; citation_author=; citation_volume=9; citation_issue=2; citation_publication_date=2013; citation_pages=208-245; citation_doi=10.1016/j.jalz.2013.02.003; citation_id=CR14"/> <meta name="citation_reference" content="Patterson, C. World Alzheimer Report 2018&#8212;The State of the Art of Dementia Research: New Frontiers. (Alzheimer&#8217;s Disease International (ADI), London, 2018)."/> <meta name="citation_reference" content="citation_journal_title=Nature; citation_title=Multimodal techniques for diagnosis and prognosis of Alzheimer&#39;s disease; citation_author=RJ Perrin, AM Fagan, DM Holtzman; citation_volume=461; citation_publication_date=2009; citation_pages=916-922; citation_doi=10.1038/nature08538; citation_id=CR16"/> <meta name="citation_reference" content="citation_journal_title=Alzheimer&#39;s Dement.; citation_title=Clinical utility of cerebrospinal fluid biomarkers in the diagnosis of early Alzheimer&#39;s disease; citation_author=K Blennow; citation_volume=11; citation_publication_date=2015; citation_pages=58-69; citation_doi=10.1016/j.jalz.2014.02.004; citation_id=CR17"/> <meta name="citation_reference" content="citation_journal_title=Neurobiol. Aging; citation_title=Structural imaging biomarkers of Alzheimer&#39;s disease: Predicting disease progression; citation_author=SF Eskildsen; citation_volume=36; citation_publication_date=2015; citation_pages=S23-S31; citation_doi=10.1016/j.neurobiolaging.2014.04.034; citation_id=CR18"/> <meta name="citation_reference" content="citation_journal_title=J. Nucl. Med.; citation_title=Visual versus fully automated analyses of 18F-FDG and amyloid PET for prediction of dementia due to Alzheimer disease in mild cognitive impairment; citation_author=T Grimmer; citation_volume=57; citation_publication_date=2016; citation_pages=204-207; citation_doi=10.2967/jnumed.115.163717; citation_id=CR19"/> <meta name="citation_reference" content="citation_journal_title=Comput. Med. Imaging Graph.; citation_title=RNN-based longitudinal analysis for diagnosis of Alzheimer&#8217;s disease; citation_author=R Cui, M Liu, ASDN Initiative; citation_volume=73; citation_publication_date=2019; citation_pages=1-10; citation_doi=10.1016/j.compmedimag.2019.01.005; citation_id=CR20"/> <meta name="citation_reference" content="citation_journal_title=Neurobiol. Aging; citation_title=Vascular and Alzheimer&#39;s disease markers independently predict brain atrophy rate in Alzheimer&#39;s Disease Neuroimaging Initiative controls; citation_author=J Barnes; citation_volume=34; citation_publication_date=2013; citation_pages=1996-2002; citation_doi=10.1016/j.neurobiolaging.2013.02.003; citation_id=CR21"/> <meta name="citation_reference" content="citation_journal_title=Arch. Neurol.; citation_title=BLood-based protein biomarkers for diagnosis of alzheimer disease; citation_author=JD Doecke, SM Laws, NG Faux; citation_volume=69; citation_publication_date=2012; citation_pages=1318-1325; citation_doi=10.1001/archneurol.2012.1282; citation_id=CR22"/> <meta name="citation_reference" content="citation_journal_title=Sci. Rep.; citation_title=Predicting Alzheimer&#8217;s disease progression using multi-modal deep learning approach; citation_author=G Lee, K Nho, B Kang, K-A Sohn, D Kim; citation_volume=9; citation_publication_date=2019; citation_pages=1952; citation_doi=10.1038/s41598-018-37769-z; citation_id=CR23"/> <meta name="citation_reference" content="citation_journal_title=Sci. Rep.; citation_title=Learning from longitudinal data in electronic health record and genetic data to improve cardiovascular event prediction; citation_author=J Zhao; citation_volume=9; citation_publication_date=2019; citation_pages=717; citation_doi=10.1038/s41598-018-36745-x; citation_id=CR24"/> <meta name="citation_reference" content="Wu, W., Venugopalan, J. &amp; Wang, M. D. 11C-PIB PET image analysis for Alzheimer&#8217;s diagnosis using weighted voting ensembles. In 2017 39th Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC) 3914&#8211;3917 (IEEE, 2017)."/> <meta name="citation_reference" content="citation_journal_title=Progress Brain Res; citation_title=Functional genomics and proteomics in the clinical neurosciences: data mining and bioinformatics; citation_author=JH Phan, CF Quo, MD Wang; citation_volume=158; citation_publication_date=2006; citation_pages=83-108; citation_doi=10.1016/S0079-6123(06)58004-5; citation_id=CR26"/> <meta name="citation_reference" content="citation_journal_title=Hum. Brain Mapp.; citation_title=Multimodal analysis of functional and structural disconnection in Alzheimer&#39;s disease using multiple kernel SVM; citation_author=M Dyrba, M Grothe, T Kirste, SJ Teipel; citation_volume=36; citation_publication_date=2015; citation_pages=2118-2131; citation_doi=10.1002/hbm.22759; citation_id=CR27"/> <meta name="citation_reference" content="citation_journal_title=Radiology; citation_title=Predicting cognitive decline in subjects at risk for Alzheimer disease by using combined cerebrospinal fluid, MR imaging, and PET biomarkers; citation_author=JL Shaffer; citation_volume=266; citation_publication_date=2013; citation_pages=583-591; citation_doi=10.1148/radiol.12120010; citation_id=CR28"/> <meta name="citation_reference" content="citation_journal_title=NeuroImage; citation_title=Discriminative analysis of early Alzheimer&#39;s disease using multi-modal imaging and multi-level characterization with multi-classifier (M3); citation_author=Z Dai; citation_volume=59; citation_publication_date=2012; citation_pages=2187-2195; citation_doi=10.1016/j.neuroimage.2011.10.003; citation_id=CR29"/> <meta name="citation_reference" content="citation_journal_title=J. Neuroimaging; citation_title=Predicting prodromal Alzheimer&#39;s disease in subjects with mild cognitive impairment using machine learning classification of multimodal multicenter diffusion-tensor and magnetic resonance imaging data; citation_author=M Dyrba; citation_volume=25; citation_publication_date=2015; citation_pages=738-747; citation_doi=10.1111/jon.12214; citation_id=CR30"/> <meta name="citation_reference" content="citation_journal_title=Sci. Rep.; citation_title=Multimodal image analysis in Alzheimer&#8217;s disease via statistical modelling of non-local intensity correlations; citation_author=M Lorenzi; citation_volume=6; citation_publication_date=2016; citation_pages=22161; citation_doi=10.1038/srep22161; citation_id=CR31"/> <meta name="citation_reference" content="citation_journal_title=Brain; citation_title=Brain properties predict proximity to symptom onset in sporadic Alzheimer&#8217;s disease; citation_author=JW Vogel; citation_volume=141; citation_publication_date=2018; citation_pages=1871-1883; citation_doi=10.1093/brain/awy093; citation_id=CR32"/> <meta name="citation_reference" content="citation_journal_title=NeuroImage; citation_title=Random forest-based similarity measures for multi-modal classification of Alzheimer&#39;s disease; citation_author=KR Gray, P Aljabar, RA Heckemann, A Hammers, D Rueckert; citation_volume=65; citation_publication_date=2013; citation_pages=167-175; citation_doi=10.1016/j.neuroimage.2012.09.065; citation_id=CR33"/> <meta name="citation_reference" content="citation_journal_title=NeuroImage; citation_title=Multimodal classification of Alzheimer&#39;s disease and mild cognitive impairment; citation_author=D Zhang, Y Wang, L Zhou, H Yuan, D Shen; citation_volume=55; citation_publication_date=2011; citation_pages=856-867; citation_doi=10.1016/j.neuroimage.2011.01.008; citation_id=CR34"/> <meta name="citation_reference" content="citation_journal_title=Bioinformatics; citation_title=Identifying disease sensitive and quantitative trait-relevant biomarkers from multidimensional heterogeneous imaging genetics data via sparse multimodal multitask learning; citation_author=H Wang; citation_volume=28; citation_publication_date=2012; citation_pages=i127-i136; citation_doi=10.1093/bioinformatics/bts228; citation_id=CR35"/> <meta name="citation_reference" content="citation_journal_title=NeuroImage; citation_title=Hierarchical feature representation and multimodal fusion with deep learning for AD/MCI diagnosis; citation_author=H-I Suk, S-W Lee, D Shen; citation_volume=101; citation_publication_date=2014; citation_pages=569-582; citation_doi=10.1016/j.neuroimage.2014.06.077; citation_id=CR36"/> <meta name="citation_reference" content="citation_journal_title=Alzheimer&#39;s Dement.; citation_title=Ways toward an early diagnosis in Alzheimer&#8217;s disease: the Alzheimer&#8217;s Disease Neuroimaging Initiative (ADNI); citation_author=SG Mueller; citation_volume=1; citation_publication_date=2005; citation_pages=55-66; citation_doi=10.1016/j.jalz.2005.06.003; citation_id=CR37"/> <meta name="citation_reference" content="citation_journal_title=Brain Imaging Behav.; citation_title=Genetic analysis of quantitative phenotypes in AD and MCI: Imaging, cognition and biomarkers; citation_author=L Shen; citation_volume=8; citation_publication_date=2014; citation_pages=183-207; citation_doi=10.1007/s11682-013-9262-z; citation_id=CR38"/> <meta name="citation_reference" content="citation_journal_title=IEEE Rev. Biomed. Eng.; citation_title=Quantitative MRI brain studies in mild cognitive impairment and Alzheimer&#39;s disease: A methodological review; citation_author=S Leandrou, S Petroudi, CC Reyes-Aldasoro, PA Kyriacou, CS Pattichis; citation_volume=11; citation_publication_date=2018; citation_pages=97-111; citation_doi=10.1109/RBME.2018.2796598; citation_id=CR39"/> <meta name="citation_reference" content="Mhaskar, H., Liao, Q. &amp; Poggio, T. Learning functions: when is deep better than shallow. arXiv preprint arXiv:1603.00988 (2016)."/> <meta name="citation_reference" content="citation_journal_title=IEEE Trans. Med. Imaging; citation_title=Deep convolutional neural networks for computer-aided detection: CNN architectures, dataset characteristics and transfer learning; citation_author=H-C Shin; citation_volume=35; citation_issue=5; citation_publication_date=2016; citation_pages=1285-1298; citation_doi=10.1109/TMI.2016.2528162; citation_id=CR41"/> <meta name="citation_reference" content="citation_journal_title=BMC Med. Inform. Decis. Mak.; citation_title=Deep learning based feature-level integration of multi-omics data for breast cancer patients survival analysis; citation_author=L Tong, J Mitchel, K Chatlin, MD Wang; citation_volume=20; citation_publication_date=2020; citation_pages=1-12; citation_doi=10.1186/s12911-020-01225-8; citation_id=CR42"/> <meta name="citation_reference" content="citation_journal_title=Methods; citation_title=Integrating multi-omics data by learning modality invariant representations for improved prediction of overall survival of cancer; citation_author=L Tong, H Wu, MD Wang; citation_publication_date=2020; citation_doi=10.1016/j.ymeth.2020.07.008; citation_id=CR43"/> <meta name="citation_reference" content="Che, Z., Purushotham, S., Khemani, R. &amp; Liu, Y. Distilling knowledge from deep networks with applications to healthcare domain. arXiv preprint arXiv:1512.03542 (2015)."/> <meta name="citation_reference" content="citation_journal_title=Neurobiol. Aging; citation_title=Alzheimer&#39;s disease markers, hypertension, and gray matter damage in normal elderly; citation_author=L Glodzik; citation_volume=33; citation_publication_date=2012; citation_pages=1215-1227; citation_doi=10.1016/j.neurobiolaging.2011.02.012; citation_id=CR45"/> <meta name="citation_reference" content="citation_journal_title=Lancet Neurol.; citation_title=Advancing research diagnostic criteria for Alzheimer&#39;s disease: the IWG-2 criteria; citation_author=B Dubois; citation_volume=13; citation_publication_date=2014; citation_pages=614-629; citation_doi=10.1016/S1474-4422(14)70090-0; citation_id=CR46"/> <meta name="citation_reference" content="citation_journal_title=J. Neural Eng.; citation_title=Facilitation of memory encoding in primate hippocampus by a neuroprosthesis that promotes task-specific neural firing; citation_author=RE Hampson; citation_volume=10; citation_publication_date=2013; citation_pages=066013; citation_doi=10.1088/1741-2560/10/6/066013; citation_id=CR47"/> <meta name="citation_reference" content="citation_journal_title=NeuroImage; citation_title=A new SPM toolbox for combining probabilistic cytoarchitectonic maps and functional imaging data; citation_author=SB Eickhoff; citation_volume=25; citation_publication_date=2005; citation_pages=1325-1335; citation_doi=10.1016/j.neuroimage.2004.12.034; citation_id=CR48"/> <meta name="citation_reference" content="citation_journal_title=IEEE Trans. Pattern Anal. Mach. Intell.; citation_title=Feature selection based on mutual information criteria of max-dependency, max-relevance, and min-redundancy; citation_author=H Peng, F Long, C Ding; citation_volume=27; citation_publication_date=2005; citation_pages=1226-1238; citation_doi=10.1109/TPAMI.2005.159; citation_id=CR49"/> <meta name="citation_reference" content="citation_journal_title=J. Bioinform. Comput. Biol.; citation_title=Minimum redundancy feature selection from microarray gene expression data; citation_author=C Ding, H Peng; citation_volume=3; citation_publication_date=2005; citation_pages=185-205; citation_doi=10.1142/S0219720005001004; citation_id=CR50"/> <meta name="citation_reference" content="Ioffe, S. &amp; Szegedy, C. Batch normalization: Accelerating deep network training by reducing internal covariate shift. arXiv preprint arXiv:1502.03167 (2015)."/> <meta name="citation_reference" content="citation_journal_title=Sci. Rep.; citation_title=Deep patient: An unsupervised representation to predict the future of patients from the electronic health records; citation_author=R Miotto, L Li, BA Kidd, JT Dudley; citation_volume=6; citation_publication_date=2016; citation_pages=26094; citation_doi=10.1038/srep26094; citation_id=CR52"/> <meta name="citation_reference" content="Kingma, D. &amp; Ba, J. Adam: A method for stochastic optimization. arXiv preprint arXiv:1412.6980 (2014)."/> <meta name="citation_reference" content="citation_journal_title=Nat. Rev. Genet.; citation_title=Methods of integrating data to uncover genotype&#8211;phenotype interactions; citation_author=MD Ritchie, ER Holzinger, R Li, SA Pendergrass, D Kim; citation_volume=16; citation_publication_date=2015; citation_pages=85-97; citation_doi=10.1038/nrg3868; citation_id=CR54"/> <meta name="citation_author" content="Venugopalan, Janani"/> <meta name="citation_author_institution" content="Department of Biomedical Engineering, Georgia Institute of Technology and Emory University, Atlanta, USA"/> <meta name="citation_author" content="Tong, Li"/> <meta name="citation_author_institution" content="Department of Biomedical Engineering, Georgia Institute of Technology and Emory University, Atlanta, USA"/> <meta name="citation_author" content="Hassanzadeh, Hamid Reza"/> <meta name="citation_author_institution" content="School of Computational Science and Engineering, Georgia Institute of Technology, Atlanta, USA"/> <meta name="citation_author" content="Wang, May D."/> <meta name="citation_author_institution" content="Department of Biomedical Engineering, Georgia Institute of Technology and Emory University, Atlanta, USA"/> <meta name="citation_author_institution" content="School of Electrical and Computer Engineering, Georgia Institute of Technology, Atlanta, USA"/> <meta name="citation_author_institution" content="Winship Cancer Institute, Parker H. Petit Institute for Bioengineering and Biosciences, Institute of People and Technology, Georgia Institute of Technology and Emory University, Atlanta, USA"/> <meta name="access_endpoint" content="https://www.nature.com/platform/readcube-access"/> <meta name="twitter:site" content="@SciReports"/> <meta name="twitter:card" content="summary_large_image"/> <meta name="twitter:image:alt" content="Content cover image"/> <meta name="twitter:title" content="Multimodal deep learning models for early detection of Alzheimer’s disease stage"/> <meta name="twitter:description" content="Scientific Reports - Multimodal deep learning models for early detection of Alzheimer&#8217;s disease stage"/> <meta name="twitter:image" content="https://media.springernature.com/full/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig1_HTML.png"/> <meta property="og:url" content="https://www.nature.com/articles/s41598-020-74399-w"/> <meta property="og:type" content="article"/> <meta property="og:site_name" content="Nature"/> <meta property="og:title" content="Multimodal deep learning models for early detection of Alzheimer’s disease stage - Scientific Reports"/> <meta property="og:image" content="https://media.springernature.com/m685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig1_HTML.png"/> <script> window.eligibleForRa21 = 'false'; </script> </head> <body class="article-page"> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-MRVXSHQ" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <div class="position-relative cleared z-index-50 background-white" data-test="top-containers"> <a class="c-skip-link" href="#content">Skip to main content</a> <div class="c-grade-c-banner u-hide"> <div class="c-grade-c-banner__container"> <p>Thank you for visiting nature.com. You are using a browser version with limited support for CSS. To obtain the best experience, we recommend you use a more up to date browser (or turn off compatibility mode in Internet Explorer). In the meantime, to ensure continued support, we are displaying the site without styles and JavaScript.</p> </div> </div> <div class="u-hide u-show-following-ad"></div> <aside class="c-ad c-ad--728x90"> <div class="c-ad__inner" data-container-type="banner-advert"> <p class="c-ad__label">Advertisement</p> <div id="div-gpt-ad-top-1" class="div-gpt-ad advert leaderboard js-ad text-center hide-print grade-c-hide" data-ad-type="top" data-test="top-ad" data-pa11y-ignore data-gpt data-gpt-unitpath="/285/scientific_reports/article" data-gpt-sizes="728x90" data-gpt-targeting="type=article;pos=top;artid=s41598-020-74399-w;doi=10.1038/s41598-020-74399-w;subjmeta=114,2164,2401,631;kwrd=Data+integration,Data+mining"> <noscript> <a href="//pubads.g.doubleclick.net/gampad/jump?iu=/285/scientific_reports/article&amp;sz=728x90&amp;c=1951887070&amp;t=pos%3Dtop%26type%3Darticle%26artid%3Ds41598-020-74399-w%26doi%3D10.1038/s41598-020-74399-w%26subjmeta%3D114,2164,2401,631%26kwrd%3DData+integration,Data+mining"> <img data-test="gpt-advert-fallback-img" src="//pubads.g.doubleclick.net/gampad/ad?iu=/285/scientific_reports/article&amp;sz=728x90&amp;c=1951887070&amp;t=pos%3Dtop%26type%3Darticle%26artid%3Ds41598-020-74399-w%26doi%3D10.1038/s41598-020-74399-w%26subjmeta%3D114,2164,2401,631%26kwrd%3DData+integration,Data+mining" alt="Advertisement" width="728" height="90"></a> </noscript> </div> </div> </aside> <header class="c-header" id="header" data-header data-track-component="nature-150-split-header" style="border-color:#cedde4"> <div class="c-header__row"> <div class="c-header__container"> <div class="c-header__split"> <div class="c-header__logo-container"> <a href="/srep" data-track="click" data-track-action="home" data-track-label="image"> <picture class="c-header__logo"> <source srcset="https://media.springernature.com/full/nature-cms/uploads/product/srep/header-d3c533c187c710c1bedbd8e293815d5f.svg" media="(min-width: 875px)"> <img src="https://media.springernature.com/full/nature-cms/uploads/product/srep/header-d3c533c187c710c1bedbd8e293815d5f.svg" height="32" alt="Scientific Reports"> </picture> </a> </div> <ul class="c-header__menu c-header__menu--global"> <li class="c-header__item c-header__item--padding c-header__item--hide-md-max"> <a class="c-header__link" href="https://www.nature.com/siteindex" data-test="siteindex-link" data-track="click" data-track-action="open nature research index" data-track-label="link"> <span>View all journals</span> </a> </li> <li class="c-header__item c-header__item--padding c-header__item--pipe"> <a class="c-header__link c-header__link--search" href="#search-menu" data-header-expander data-test="search-link" data-track="click" data-track-action="open search tray" data-track-label="button"> <svg role="img" aria-hidden="true" focusable="false" height="22" width="22" viewBox="0 0 18 18" xmlns="http://www.w3.org/2000/svg"><path d="M16.48 15.455c.283.282.29.749.007 1.032a.738.738 0 01-1.032-.007l-3.045-3.044a7 7 0 111.026-1.026zM8 14A6 6 0 108 2a6 6 0 000 12z"/></svg><span>Search</span> </a> </li> <li class="c-header__item c-header__item--padding c-header__item--snid-account-widget c-header__item--pipe"> <a class="c-header__link eds-c-header__link" id="identity-account-widget" href='https://idp.nature.com/auth/personal/springernature?redirect_uri=https://www.nature.com/articles/s41598-020-74399-w?error=cookies_not_supported&code=26ffc125-f6f4-4f0e-99a6-644419e7055c'><span class="eds-c-header__widget-fragment-title">Log in</span></a> </li> </ul> </div> </div> </div> <div class="c-header__row"> <div class="c-header__container" data-test="navigation-row"> <div class="c-header__split"> <ul class="c-header__menu c-header__menu--journal"> <li class="c-header__item c-header__item--dropdown-menu" data-test="explore-content-button"> <a href="#explore" class="c-header__link" data-header-expander data-test="menu-button--explore" data-track="click" data-track-action="open explore expander" data-track-label="button"> <span><span class="c-header__show-text">Explore</span> content</span><svg role="img" aria-hidden="true" focusable="false" height="16" viewBox="0 0 16 16" width="16" xmlns="http://www.w3.org/2000/svg"><path d="m5.58578644 3-3.29289322-3.29289322c-.39052429-.39052429-.39052429-1.02368927 0-1.41421356s1.02368927-.39052429 1.41421356 0l4 4c.39052429.39052429.39052429 1.02368927 0 1.41421356l-4 4c-.39052429.39052429-1.02368927.39052429-1.41421356 0s-.39052429-1.02368927 0-1.41421356z" transform="matrix(0 1 -1 0 11 3)"/></svg> </a> </li> <li class="c-header__item c-header__item--dropdown-menu"> <a href="#about-the-journal" class="c-header__link" data-header-expander data-test="menu-button--about-the-journal" data-track="click" data-track-action="open about the journal expander" data-track-label="button"> <span>About <span class="c-header__show-text">the journal</span></span><svg role="img" aria-hidden="true" focusable="false" height="16" viewBox="0 0 16 16" width="16" xmlns="http://www.w3.org/2000/svg"><path d="m5.58578644 3-3.29289322-3.29289322c-.39052429-.39052429-.39052429-1.02368927 0-1.41421356s1.02368927-.39052429 1.41421356 0l4 4c.39052429.39052429.39052429 1.02368927 0 1.41421356l-4 4c-.39052429.39052429-1.02368927.39052429-1.41421356 0s-.39052429-1.02368927 0-1.41421356z" transform="matrix(0 1 -1 0 11 3)"/></svg> </a> </li> <li class="c-header__item c-header__item--dropdown-menu" data-test="publish-with-us-button"> <a href="#publish-with-us" class="c-header__link c-header__link--dropdown-menu" data-header-expander data-test="menu-button--publish" data-track="click" data-track-action="open publish with us expander" data-track-label="button"> <span>Publish <span class="c-header__show-text">with us</span></span><svg role="img" aria-hidden="true" focusable="false" height="16" viewBox="0 0 16 16" width="16" xmlns="http://www.w3.org/2000/svg"><path d="m5.58578644 3-3.29289322-3.29289322c-.39052429-.39052429-.39052429-1.02368927 0-1.41421356s1.02368927-.39052429 1.41421356 0l4 4c.39052429.39052429.39052429 1.02368927 0 1.41421356l-4 4c-.39052429.39052429-1.02368927.39052429-1.41421356 0s-.39052429-1.02368927 0-1.41421356z" transform="matrix(0 1 -1 0 11 3)"/></svg> </a> </li> </ul> <ul class="c-header__menu c-header__menu--hide-lg-max"> <li class="c-header__item"> <a class="c-header__link" href="https://idp.nature.com/auth/personal/springernature?redirect_uri&#x3D;https%3A%2F%2Fwww.nature.com%2Fmy-account%2Falerts%2Fsubscribe-journal%3Flist-id%3D288%26journal-link%3Dhttps%253A%252F%252Fwww.nature.com%252Fsrep%252F" rel="nofollow" data-track="click" data-track-action="Sign up for alerts" data-track-label="link (desktop site header)" data-track-external> <span>Sign up for alerts</span><svg role="img" aria-hidden="true" focusable="false" height="18" viewBox="0 0 18 18" width="18" xmlns="http://www.w3.org/2000/svg"><path d="m4 10h2.5c.27614237 0 .5.2238576.5.5s-.22385763.5-.5.5h-3.08578644l-1.12132034 1.1213203c-.18753638.1875364-.29289322.4418903-.29289322.7071068v.1715729h14v-.1715729c0-.2652165-.1053568-.5195704-.2928932-.7071068l-1.7071068-1.7071067v-3.4142136c0-2.76142375-2.2385763-5-5-5-2.76142375 0-5 2.23857625-5 5zm3 4c0 1.1045695.8954305 2 2 2s2-.8954305 2-2zm-5 0c-.55228475 0-1-.4477153-1-1v-.1715729c0-.530433.21071368-1.0391408.58578644-1.4142135l1.41421356-1.4142136v-3c0-3.3137085 2.6862915-6 6-6s6 2.6862915 6 6v3l1.4142136 1.4142136c.3750727.3750727.5857864.8837805.5857864 1.4142135v.1715729c0 .5522847-.4477153 1-1 1h-4c0 1.6568542-1.3431458 3-3 3-1.65685425 0-3-1.3431458-3-3z" fill="#222"/></svg> </a> </li> <li class="c-header__item c-header__item--pipe"> <a class="c-header__link" href="https://www.nature.com/srep.rss" data-track="click" data-track-action="rss feed" data-track-label="link"> <span>RSS feed</span> </a> </li> </ul> </div> </div> </div> </header> <nav class="u-mb-16" aria-label="breadcrumbs"> <div class="u-container"> <ol class="c-breadcrumbs" itemscope itemtype="https://schema.org/BreadcrumbList"> <li class="c-breadcrumbs__item" id="breadcrumb0" itemprop="itemListElement" itemscope itemtype="https://schema.org/ListItem"><a class="c-breadcrumbs__link" href="/" itemprop="item" data-track="click" data-track-action="breadcrumb" data-track-category="header" data-track-label="link:nature"><span itemprop="name">nature</span></a><meta itemprop="position" content="1"> <svg class="c-breadcrumbs__chevron" role="img" aria-hidden="true" focusable="false" height="10" viewBox="0 0 10 10" width="10" xmlns="http://www.w3.org/2000/svg"> <path d="m5.96738168 4.70639573 2.39518594-2.41447274c.37913917-.38219212.98637524-.38972225 1.35419292-.01894278.37750606.38054586.37784436.99719163-.00013556 1.37821513l-4.03074001 4.06319683c-.37758093.38062133-.98937525.38100976-1.367372-.00003075l-4.03091981-4.06337806c-.37759778-.38063832-.38381821-.99150444-.01600053-1.3622839.37750607-.38054587.98772445-.38240057 1.37006824.00302197l2.39538588 2.4146743.96295325.98624457z" fill="#666" fill-rule="evenodd" transform="matrix(0 -1 1 0 0 10)"/> </svg> </li><li class="c-breadcrumbs__item" id="breadcrumb1" itemprop="itemListElement" itemscope itemtype="https://schema.org/ListItem"><a class="c-breadcrumbs__link" href="/srep" itemprop="item" data-track="click" data-track-action="breadcrumb" data-track-category="header" data-track-label="link:scientific reports"><span itemprop="name">scientific reports</span></a><meta itemprop="position" content="2"> <svg class="c-breadcrumbs__chevron" role="img" aria-hidden="true" focusable="false" height="10" viewBox="0 0 10 10" width="10" xmlns="http://www.w3.org/2000/svg"> <path d="m5.96738168 4.70639573 2.39518594-2.41447274c.37913917-.38219212.98637524-.38972225 1.35419292-.01894278.37750606.38054586.37784436.99719163-.00013556 1.37821513l-4.03074001 4.06319683c-.37758093.38062133-.98937525.38100976-1.367372-.00003075l-4.03091981-4.06337806c-.37759778-.38063832-.38381821-.99150444-.01600053-1.3622839.37750607-.38054587.98772445-.38240057 1.37006824.00302197l2.39538588 2.4146743.96295325.98624457z" fill="#666" fill-rule="evenodd" transform="matrix(0 -1 1 0 0 10)"/> </svg> </li><li class="c-breadcrumbs__item" id="breadcrumb2" itemprop="itemListElement" itemscope itemtype="https://schema.org/ListItem"><a class="c-breadcrumbs__link" href="/srep/articles?type&#x3D;article" itemprop="item" data-track="click" data-track-action="breadcrumb" data-track-category="header" data-track-label="link:articles"><span itemprop="name">articles</span></a><meta itemprop="position" content="3"> <svg class="c-breadcrumbs__chevron" role="img" aria-hidden="true" focusable="false" height="10" viewBox="0 0 10 10" width="10" xmlns="http://www.w3.org/2000/svg"> <path d="m5.96738168 4.70639573 2.39518594-2.41447274c.37913917-.38219212.98637524-.38972225 1.35419292-.01894278.37750606.38054586.37784436.99719163-.00013556 1.37821513l-4.03074001 4.06319683c-.37758093.38062133-.98937525.38100976-1.367372-.00003075l-4.03091981-4.06337806c-.37759778-.38063832-.38381821-.99150444-.01600053-1.3622839.37750607-.38054587.98772445-.38240057 1.37006824.00302197l2.39538588 2.4146743.96295325.98624457z" fill="#666" fill-rule="evenodd" transform="matrix(0 -1 1 0 0 10)"/> </svg> </li><li class="c-breadcrumbs__item" id="breadcrumb3" itemprop="itemListElement" itemscope itemtype="https://schema.org/ListItem"> <span itemprop="name">article</span><meta itemprop="position" content="4"></li> </ol> </div> </nav> </div> <div class="u-container u-mt-32 u-mb-32 u-clearfix" id="content" data-component="article-container" data-container-type="article"> <main class="c-article-main-column u-float-left js-main-column" data-track-component="article body"> <div class="c-context-bar u-hide" data-test="context-bar" data-context-bar aria-hidden="true"> <div class="c-context-bar__container u-container" data-track-context="sticky banner"> <div class="c-context-bar__title"> Multimodal deep learning models for early detection of Alzheimer’s disease stage </div> <div class="c-pdf-download u-clear-both js-pdf-download"> <a href="/articles/s41598-020-74399-w.pdf" class="u-button u-button--full-width u-button--primary u-justify-content-space-between c-pdf-download__link" data-article-pdf="true" data-readcube-pdf-url="true" data-test="download-pdf" data-draft-ignore="true" data-track="content_download" data-track-type="article pdf download" data-track-action="download pdf" data-track-label="link" data-track-external download> <span class="c-pdf-download__text">Download PDF</span> <svg aria-hidden="true" focusable="false" width="16" height="16" class="u-icon"><use xlink:href="#icon-download"/></svg> </a> </div> </div> </div> <article lang="en"> <div class="c-pdf-button__container u-mb-16 u-hide-at-lg js-context-bar-sticky-point-mobile"> <div class="c-pdf-container" data-track-context="article body"> <div class="c-pdf-download u-clear-both js-pdf-download"> <a href="/articles/s41598-020-74399-w.pdf" class="u-button u-button--full-width u-button--primary u-justify-content-space-between c-pdf-download__link" data-article-pdf="true" data-readcube-pdf-url="true" data-test="download-pdf" data-draft-ignore="true" data-track="content_download" data-track-type="article pdf download" data-track-action="download pdf" data-track-label="link" data-track-external download> <span class="c-pdf-download__text">Download PDF</span> <svg aria-hidden="true" focusable="false" width="16" height="16" class="u-icon"><use xlink:href="#icon-download"/></svg> </a> </div> </div> </div> <div class="c-article-header"> <header> <ul class="c-article-identifiers" data-test="article-identifier"> <li class="c-article-identifiers__item" data-test="article-category">Article</li> <li class="c-article-identifiers__item"> <a href="https://www.springernature.com/gp/open-research/about/the-fundamentals-of-open-access-and-open-research" data-track="click" data-track-action="open access" data-track-label="link" class="u-color-open-access" data-test="open-access">Open access</a> </li> <li class="c-article-identifiers__item">Published: <time datetime="2021-02-05">05 February 2021</time></li> </ul> <h1 class="c-article-title" data-test="article-title" data-article-title="">Multimodal deep learning models for early detection of Alzheimer’s disease stage</h1> <ul class="c-article-author-list c-article-author-list--short" data-test="authors-list" data-component-authors-activator="authors-list"><li class="c-article-author-list__item"><a data-test="author-name" data-track="click" data-track-action="open author" data-track-label="link" href="#auth-Janani-Venugopalan-Aff1" data-author-popup="auth-Janani-Venugopalan-Aff1" data-author-search="Venugopalan, Janani">Janani Venugopalan</a><span class="u-js-hide">  <a class="js-orcid" href="http://orcid.org/0000-0002-9049-6024"><span class="u-visually-hidden">ORCID: </span>orcid.org/0000-0002-9049-6024</a></span><sup class="u-js-hide"><a href="#Aff1">1</a></sup>, </li><li class="c-article-author-list__item"><a data-test="author-name" data-track="click" data-track-action="open author" data-track-label="link" href="#auth-Li-Tong-Aff1" data-author-popup="auth-Li-Tong-Aff1" data-author-search="Tong, Li">Li Tong</a><sup class="u-js-hide"><a href="#Aff1">1</a></sup>, </li><li class="c-article-author-list__item c-article-author-list__item--hide-small-screen"><a data-test="author-name" data-track="click" data-track-action="open author" data-track-label="link" href="#auth-Hamid_Reza-Hassanzadeh-Aff2" data-author-popup="auth-Hamid_Reza-Hassanzadeh-Aff2" data-author-search="Hassanzadeh, Hamid Reza">Hamid Reza Hassanzadeh</a><sup class="u-js-hide"><a href="#Aff2">2</a></sup> &amp; </li><li class="c-article-author-list__show-more" aria-label="Show all 4 authors for this article" title="Show all 4 authors for this article">…</li><li class="c-article-author-list__item"><a data-test="author-name" data-track="click" data-track-action="open author" data-track-label="link" href="#auth-May_D_-Wang-Aff1-Aff3-Aff4" data-author-popup="auth-May_D_-Wang-Aff1-Aff3-Aff4" data-author-search="Wang, May D." data-corresp-id="c1">May D. Wang<svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-mail-medium"></use></svg></a><sup class="u-js-hide"><a href="#Aff1">1</a>,<a href="#Aff3">3</a>,<a href="#Aff4">4</a></sup> </li></ul><button aria-expanded="false" class="c-article-author-list__button"><svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-chevron-down-medium"></use></svg><span>Show authors</span></button> <p class="c-article-info-details" data-container-section="info"> <a data-test="journal-link" href="/srep" data-track="click" data-track-action="journal homepage" data-track-category="article body" data-track-label="link"><i data-test="journal-title">Scientific Reports</i></a> <b data-test="journal-volume"><span class="u-visually-hidden">volume</span> 11</b>, Article number: <span data-test="article-number">3254</span> (<span data-test="article-publication-year">2021</span>) <a href="#citeas" class="c-article-info-details__cite-as u-hide-print" data-track="click" data-track-action="cite this article" data-track-label="link">Cite this article</a> </p> <div class="c-article-metrics-bar__wrapper u-clear-both"> <ul class="c-article-metrics-bar u-list-reset"> <li class=" c-article-metrics-bar__item" data-test="access-count"> <p class="c-article-metrics-bar__count">58k <span class="c-article-metrics-bar__label">Accesses</span></p> </li> <li class="c-article-metrics-bar__item" data-test="citation-count"> <p class="c-article-metrics-bar__count">315 <span class="c-article-metrics-bar__label">Citations</span></p> </li> <li class="c-article-metrics-bar__item" data-test="altmetric-score"> <p class="c-article-metrics-bar__count">23 <span class="c-article-metrics-bar__label">Altmetric</span></p> </li> <li class="c-article-metrics-bar__item"> <p class="c-article-metrics-bar__details"><a href="/articles/s41598-020-74399-w/metrics" data-track="click" data-track-action="view metrics" data-track-label="link" rel="nofollow">Metrics <span class="u-visually-hidden">details</span></a></p> </li> </ul> </div> </header> <div class="u-js-hide" data-component="article-subject-links"> <h3 class="c-article__sub-heading">Subjects</h3> <ul class="c-article-subject-list"> <li class="c-article-subject-list__subject"><a href="/subjects/data-integration" data-track="click" data-track-action="view subject" data-track-label="link">Data integration</a></li><li class="c-article-subject-list__subject"><a href="/subjects/data-mining" data-track="click" data-track-action="view subject" data-track-label="link">Data mining</a></li> </ul> </div> </div> <div class="c-article-body"> <section aria-labelledby="Abs1" data-title="Abstract" lang="en"><div class="c-article-section" id="Abs1-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Abs1">Abstract</h2><div class="c-article-section__content" id="Abs1-content"><p>Most current Alzheimer’s disease (AD) and mild cognitive disorders (MCI) studies use single data modality to make predictions such as AD stages. The fusion of multiple data modalities can provide a holistic view of AD staging analysis. Thus, we use deep learning (DL) to integrally analyze imaging (magnetic resonance imaging (MRI)), genetic (single nucleotide polymorphisms (SNPs)), and clinical test data to classify patients into AD, MCI, and controls (CN). We use stacked denoising auto-encoders to extract features from clinical and genetic data, and use 3D-convolutional neural networks (CNNs) for imaging data. We also develop a novel data interpretation method to identify top-performing features learned by the deep-models with clustering and perturbation analysis. Using Alzheimer’s disease neuroimaging initiative (ADNI) dataset, we demonstrate that deep models outperform shallow models, including support vector machines, decision trees, random forests, and k-nearest neighbors. In addition, we demonstrate that integrating multi-modality data outperforms single modality models in terms of accuracy, precision, recall, and meanF1 scores. Our models have identified hippocampus, amygdala brain areas, and the Rey Auditory Verbal Learning Test (RAVLT) as top distinguished features, which are consistent with the known AD literature.</p></div></div></section> <noscript> </noscript> <section aria-labelledby="inline-recommendations" data-title="Inline Recommendations" class="c-article-recommendations" data-track-component="inline-recommendations"> <h3 class="c-article-recommendations-title" id="inline-recommendations">Similar content being viewed by others</h3> <div class="c-article-recommendations-list"> <div class="c-article-recommendations-list__item"> <article class="c-article-recommendations-card" itemscope itemtype="http://schema.org/ScholarlyArticle"> <div class="c-article-recommendations-card__img"><img src="https://media.springernature.com/w215h120/springer-static/image/art%3A10.1038%2Fs41467-022-31037-5/MediaObjects/41467_2022_31037_Fig1_HTML.png" loading="lazy" alt=""></div> <div class="c-article-recommendations-card__main"> <h3 class="c-article-recommendations-card__heading" itemprop="name headline"> <a class="c-article-recommendations-card__link" itemprop="url" href="https://www.nature.com/articles/s41467-022-31037-5?fromPaywallRec=false" data-track="select_recommendations_1" data-track-context="inline recommendations" data-track-action="click recommendations inline - 1" data-track-label="10.1038/s41467-022-31037-5">Multimodal deep learning for Alzheimer’s disease dementia assessment </a> </h3> <div class="c-article-meta-recommendations" data-test="recommendation-info"> <span class="c-article-meta-recommendations__item-type">Article</span> <span class="c-article-meta-recommendations__access-type">Open access</span> <span class="c-article-meta-recommendations__date">20 June 2022</span> </div> </div> </article> </div> <div class="c-article-recommendations-list__item"> <article class="c-article-recommendations-card" itemscope itemtype="http://schema.org/ScholarlyArticle"> <div class="c-article-recommendations-card__img"><img src="https://media.springernature.com/w215h120/springer-static/image/art%3A10.1038%2Fs41598-024-77829-1/MediaObjects/41598_2024_77829_Fig1_HTML.png" loading="lazy" alt=""></div> <div class="c-article-recommendations-card__main"> <h3 class="c-article-recommendations-card__heading" itemprop="name headline"> <a class="c-article-recommendations-card__link" itemprop="url" href="https://www.nature.com/articles/s41598-024-77829-1?fromPaywallRec=false" data-track="select_recommendations_2" data-track-context="inline recommendations" data-track-action="click recommendations inline - 2" data-track-label="10.1038/s41598-024-77829-1">Prediction and clustering of Alzheimer’s disease by race and sex: a multi-head deep-learning approach to analyze irregular and heterogeneous data </a> </h3> <div class="c-article-meta-recommendations" data-test="recommendation-info"> <span class="c-article-meta-recommendations__item-type">Article</span> <span class="c-article-meta-recommendations__access-type">Open access</span> <span class="c-article-meta-recommendations__date">04 November 2024</span> </div> </div> </article> </div> <div class="c-article-recommendations-list__item"> <article class="c-article-recommendations-card" itemscope itemtype="http://schema.org/ScholarlyArticle"> <div class="c-article-recommendations-card__img"><img src="https://media.springernature.com/w215h120/springer-static/image/art%3A10.1038%2Fs41598-024-53733-6/MediaObjects/41598_2024_53733_Fig1_HTML.png" loading="lazy" alt=""></div> <div class="c-article-recommendations-card__main"> <h3 class="c-article-recommendations-card__heading" itemprop="name headline"> <a class="c-article-recommendations-card__link" itemprop="url" href="https://www.nature.com/articles/s41598-024-53733-6?fromPaywallRec=false" data-track="select_recommendations_3" data-track-context="inline recommendations" data-track-action="click recommendations inline - 3" data-track-label="10.1038/s41598-024-53733-6">A novel CNN architecture for accurate early detection and classification of Alzheimer’s disease using MRI data </a> </h3> <div class="c-article-meta-recommendations" data-test="recommendation-info"> <span class="c-article-meta-recommendations__item-type">Article</span> <span class="c-article-meta-recommendations__access-type">Open access</span> <span class="c-article-meta-recommendations__date">12 February 2024</span> </div> </div> </article> </div> </div> </section> <script> window.dataLayer = window.dataLayer || []; window.dataLayer.push({ recommendations: { recommender: 'semantic', model: 'specter', policy_id: 'NA', timestamp: 1732368727, embedded_user: 'null' } }); </script> <div class="main-content"> <div class="c-article-section__content c-article-section__content--separator"><p>Deep-learning (DL) has shown tremendous potential for clinical decision support for a variety of diseases, including diabetic retinopathy<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 1" title="Gulshan, V. et al. Development and validation of a deep learning algorithm for detection of diabetic retinopathy in retinal fundus photographs. JAMA 316, 2402–2410 (2016)." href="/articles/s41598-020-74399-w#ref-CR1" id="ref-link-section-d105447676e456">1</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 2" title="Ting, D. S. W. et al. Development and validation of a deep learning system for diabetic retinopathy and related eye diseases using retinal images from multiethnic populations with diabetes. JAMA 318, 2211–2223 (2017)." href="/articles/s41598-020-74399-w#ref-CR2" id="ref-link-section-d105447676e459">2</a></sup>, cancers<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 3" title="Esteva, A. et al. Dermatologist-level classification of skin cancer with deep neural networks. Nature 542, 115 (2017)." href="/articles/s41598-020-74399-w#ref-CR3" id="ref-link-section-d105447676e463">3</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 4" title="Weng, S., Xu, X., Li, J. &amp; Wong, S. T. Combining deep learning and coherent anti-Stokes Raman scattering imaging for automated differential diagnosis of lung cancer. J. Biomed. Opt. 22, 106017 (2017)." href="/articles/s41598-020-74399-w#ref-CR4" id="ref-link-section-d105447676e466">4</a>,</sup> and Alzheimer’s disease (for imaging analysis)<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Suk, H.-I. &amp; Shen, D. Medical Image Computing and Computer-Assisted Intervention–MICCAI 2013 583–590 (Springer, New York, 2013)." href="#ref-CR5" id="ref-link-section-d105447676e471">5</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Liu, S. et al. Multimodal neuroimaging feature learning for multiclass diagnosis of Alzheimer’s disease. Biomed. Eng. IEEE Trans. 62, 1132–1140 (2015)." href="#ref-CR6" id="ref-link-section-d105447676e471_1">6</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 7" title="Suk, H. I., Lee, S. W., Shen, D. &amp; Alzheimer’s Disease Neuroimaging Initiative. Deep sparse multi-task learning for feature selection in Alzheimer’s disease diagnosis. Brain Struct. Funct. 221(5), 2569–2587 (2016)." href="/articles/s41598-020-74399-w#ref-CR7" id="ref-link-section-d105447676e474">7</a></sup>. The major strength of DL over other shallow learning models is their ability to learn the most predictive features directly from the raw data given a dataset of labeled examples. DL has shown improvement over shallow learning for single data modality such as images<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 8" title="Schulam, P., Wigley, F. &amp; Saria, S. In AAAI, 2956–2964 (2015)." href="/articles/s41598-020-74399-w#ref-CR8" id="ref-link-section-d105447676e478">8</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 9" title="Suk, H.-I. &amp; Shen, D. In International Conference on Medical Image Computing and Computer-Assisted Intervention, 583–590 (Springer, New York, 2013)." href="/articles/s41598-020-74399-w#ref-CR9" id="ref-link-section-d105447676e481">9</a></sup>, electronic health records (EHRs)<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 10" title="Choi, E., Bahadori, M.T. &amp; Sun, J. Doctor ai: Predicting clinical events via recurrent neural networks. arXiv preprint arXiv:1511.05942 (2015)." href="/articles/s41598-020-74399-w#ref-CR10" id="ref-link-section-d105447676e485">10</a>,</sup> and SNPs<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 11" title="Zhou, J. &amp; Troyanskaya, O. G. Predicting effects of noncoding variants with deep learning-based sequence model. Nat. Methods 12, 931–934 (2015)." href="/articles/s41598-020-74399-w#ref-CR11" id="ref-link-section-d105447676e491">11</a></sup>. DL techniques also facilitate the training and prediction in the presence of partial data<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 12" title="Ngiam, J., Khosla, A., Kim, M., Nam, J., Lee, H. &amp; Ng, A. Y. Multimodal deep learning. In Proceedings of the 28th International Conference on Machine Learning (ICML-11) 689–696 (2011)." href="/articles/s41598-020-74399-w#ref-CR12" id="ref-link-section-d105447676e495">12</a></sup>. In this study, we develop a novel DL architecture for clinical decision support that predicts the Alzheimer’s disease (AD) stage using multi-modality data (images, clinical data, and genetic information).</p></div><div class="c-article-section__content"><p>AD is the most common neurodegenerative disorder and the 6th leading cause of death in the United States<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 13" title="Alzheimer’s Association. 2016 Alzheimer’s disease facts and figures. Alzheimer’s Dement. 12(4), 459–509 (2016)." href="/articles/s41598-020-74399-w#ref-CR13" id="ref-link-section-d105447676e502">13</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 14" title="Alzheimer’s Association. 2013 Alzheimer’s disease facts and figures. Alzheimer’s Dement. 9(2), 208–245 (2013)." href="/articles/s41598-020-74399-w#ref-CR14" id="ref-link-section-d105447676e505">14</a></sup>. The world-wide disease burden of AD is projected to reach $2 trillion by 2030<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 15" title="Patterson, C. World Alzheimer Report 2018—The State of the Art of Dementia Research: New Frontiers. (Alzheimer’s Disease International (ADI), London, 2018)." href="/articles/s41598-020-74399-w#ref-CR15" id="ref-link-section-d105447676e509">15</a></sup>, which necessitates early detection. Despite extensive research and advances in clinical practice, less than 50% of the AD patients are diagnosed accurately for their pathology and disease progression based on their clinical symptoms<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 13" title="Alzheimer’s Association. 2016 Alzheimer’s disease facts and figures. Alzheimer’s Dement. 12(4), 459–509 (2016)." href="/articles/s41598-020-74399-w#ref-CR13" id="ref-link-section-d105447676e513">13</a></sup>. The most conclusive evidences for AD are the presence of amyloid plaques and neurofibrillary tangles in histopathology. However, the early onset of AD is not correlated with the presence of plaque but with synaptic and neuronal loss<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 16" title="Perrin, R. J., Fagan, A. M. &amp; Holtzman, D. M. Multimodal techniques for diagnosis and prognosis of Alzheimer’s disease. Nature 461, 916–922 (2009)." href="/articles/s41598-020-74399-w#ref-CR16" id="ref-link-section-d105447676e517">16</a></sup>.</p></div><div class="c-article-section__content"><p>Research on data and data mining strategies from AD initiative<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Blennow, K. et al. Clinical utility of cerebrospinal fluid biomarkers in the diagnosis of early Alzheimer’s disease. Alzheimer’s Dement. 11, 58–69 (2015)." href="#ref-CR17" id="ref-link-section-d105447676e524">17</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Eskildsen, S. F. et al. Structural imaging biomarkers of Alzheimer’s disease: Predicting disease progression. Neurobiol. Aging 36, S23–S31 (2015)." href="#ref-CR18" id="ref-link-section-d105447676e524_1">18</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 19" title="Grimmer, T. et al. Visual versus fully automated analyses of 18F-FDG and amyloid PET for prediction of dementia due to Alzheimer disease in mild cognitive impairment. J. Nucl. Med. 57, 204–207 (2016)." href="/articles/s41598-020-74399-w#ref-CR19" id="ref-link-section-d105447676e527">19</a></sup> are ongoing to improve our understanding of the underlying disease processes. AD biomarkers including clinical symptoms<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 20" title="Cui, R., Liu, M. &amp; Initiative, A. S. D. N. RNN-based longitudinal analysis for diagnosis of Alzheimer’s disease. Comput. Med. Imaging Graph. 73, 1–10 (2019)." href="/articles/s41598-020-74399-w#ref-CR20" id="ref-link-section-d105447676e531">20</a></sup> (such as dementia, memory loss), neurological tests and scores such as MMSE scores are augmented with imaging, genetic, and protein biomarkers<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Barnes, J. et al. Vascular and Alzheimer’s disease markers independently predict brain atrophy rate in Alzheimer’s Disease Neuroimaging Initiative controls. Neurobiol. Aging 34, 1996–2002 (2013)." href="#ref-CR21" id="ref-link-section-d105447676e535">21</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Doecke, J. D. et al. BLood-based protein biomarkers for diagnosis of alzheimer disease. Arch. Neurol. 69, 1318–1325 (2012)." href="#ref-CR22" id="ref-link-section-d105447676e535_1">22</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Lee, G., Nho, K., Kang, B., Sohn, K.-A. &amp; Kim, D. Predicting Alzheimer’s disease progression using multi-modal deep learning approach. Sci. Rep. 9, 1952 (2019)." href="#ref-CR23" id="ref-link-section-d105447676e535_2">23</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Zhao, J. et al. Learning from longitudinal data in electronic health record and genetic data to improve cardiovascular event prediction. Sci. Rep. 9, 717 (2019)." href="#ref-CR24" id="ref-link-section-d105447676e535_3">24</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Wu, W., Venugopalan, J. &amp; Wang, M. D. 11C-PIB PET image analysis for Alzheimer’s diagnosis using weighted voting ensembles. In 2017 39th Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC) 3914–3917 (IEEE, 2017)." href="#ref-CR25" id="ref-link-section-d105447676e535_4">25</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 26" title="Phan, J. H., Quo, C. F. &amp; Wang, M. D. Functional genomics and proteomics in the clinical neurosciences: data mining and bioinformatics. Prog. Brain Res. 158, 83–108 (2006)." href="/articles/s41598-020-74399-w#ref-CR26" id="ref-link-section-d105447676e538">26</a></sup>. Most of these studies identify biomarkers using a single-modality data, which restricts a holistic assessment of AD disease progression. There have been AD multi-modal analyses that combine various imaging modalities<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Dyrba, M., Grothe, M., Kirste, T. &amp; Teipel, S. J. Multimodal analysis of functional and structural disconnection in Alzheimer’s disease using multiple kernel SVM. Hum. Brain Mapp. 36, 2118–2131 (2015)." href="#ref-CR27" id="ref-link-section-d105447676e542">27</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Shaffer, J. L. et al. Predicting cognitive decline in subjects at risk for Alzheimer disease by using combined cerebrospinal fluid, MR imaging, and PET biomarkers. Radiology 266, 583–591 (2013)." href="#ref-CR28" id="ref-link-section-d105447676e542_1">28</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Dai, Z. et al. Discriminative analysis of early Alzheimer’s disease using multi-modal imaging and multi-level characterization with multi-classifier (M3). NeuroImage 59, 2187–2195 (2012)." href="#ref-CR29" id="ref-link-section-d105447676e542_2">29</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Dyrba, M. et al. Predicting prodromal Alzheimer’s disease in subjects with mild cognitive impairment using machine learning classification of multimodal multicenter diffusion-tensor and magnetic resonance imaging data. J. Neuroimaging 25, 738–747 (2015)." href="#ref-CR30" id="ref-link-section-d105447676e542_3">30</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" title="Lorenzi, M. et al. Multimodal image analysis in Alzheimer’s disease via statistical modelling of non-local intensity correlations. Sci. Rep. 6, 22161 (2016)." href="#ref-CR31" id="ref-link-section-d105447676e542_4">31</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 32" title="Vogel, J. W. et al. Brain properties predict proximity to symptom onset in sporadic Alzheimer’s disease. Brain 141, 1871–1883 (2018)." href="/articles/s41598-020-74399-w#ref-CR32" id="ref-link-section-d105447676e545">32</a></sup> such as structural MRI (T1 weighted, T2 weighted), fMRI, positron emission tomography (PET)<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 33" title="Gray, K. R., Aljabar, P., Heckemann, R. A., Hammers, A. &amp; Rueckert, D. Random forest-based similarity measures for multi-modal classification of Alzheimer’s disease. NeuroImage 65, 167–175 (2013)." href="/articles/s41598-020-74399-w#ref-CR33" id="ref-link-section-d105447676e549">33</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 34" title="Zhang, D., Wang, Y., Zhou, L., Yuan, H. &amp; Shen, D. Multimodal classification of Alzheimer’s disease and mild cognitive impairment. NeuroImage 55, 856–867 (2011)." href="/articles/s41598-020-74399-w#ref-CR34" id="ref-link-section-d105447676e552">34</a></sup>, and imaging genetics<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 35" title="Wang, H. et al. Identifying disease sensitive and quantitative trait-relevant biomarkers from multidimensional heterogeneous imaging genetics data via sparse multimodal multitask learning. Bioinformatics 28, i127–i136 (2012)." href="/articles/s41598-020-74399-w#ref-CR35" id="ref-link-section-d105447676e557">35</a></sup>. In addition, genetics have been used with clinical data to augment data labels and phenotypes. Besides shallow learners, DL models such as auto-encoders<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 8" title="Schulam, P., Wigley, F. &amp; Saria, S. In AAAI, 2956–2964 (2015)." href="/articles/s41598-020-74399-w#ref-CR8" id="ref-link-section-d105447676e561">8</a></sup> and deep-belief networks<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 36" title="Suk, H.-I., Lee, S.-W. &amp; Shen, D. Hierarchical feature representation and multimodal fusion with deep learning for AD/MCI diagnosis. NeuroImage 101, 569–582 (2014)." href="/articles/s41598-020-74399-w#ref-CR36" id="ref-link-section-d105447676e565">36</a></sup> (Supplementary Table <a data-track="click" data-track-label="link" data-track-action="supplementary material anchor" href="/articles/s41598-020-74399-w#MOESM1">A1</a>) have been used for PET and MRI image data fusion with improved prediction.</p></div><div class="c-article-section__content"><p>In this study, we further the multi-modal AD data fusion to advance AD stage prediction by using DL to combine imaging, EHR, and genomic SNP data for the classification of patients into control (CN), MCI, and AD groups. We use stacked de-noising auto-encoders for EHR and SNP data respectively, and novel 3D convolutional neural networks (CNNs) to train MRI imaging data. After the networks are separately trained for each data modality, we combine them using different classification layers, including decision trees, random forests, support vectors machines (SVM), and k-nearest neighbors (kNN). We demonstrate the performance of our integration models using the ADNI<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 37" title="Mueller, S. G. et al. Ways toward an early diagnosis in Alzheimer’s disease: the Alzheimer’s Disease Neuroimaging Initiative (ADNI). Alzheimer’s Dement. 1, 55–66 (2005)." href="/articles/s41598-020-74399-w#ref-CR37" id="ref-link-section-d105447676e575">37</a></sup> dataset that contains SNP (808 patients), MRI imaging (503 patients), and clinical and neurological test data (2004 patients).</p></div><div class="c-article-section__content"><p>Despite superior performance in clinical decision support using multiple data types, a major drawback for widespread adoption of DL models for clinical decision making is the lack of well-defined methods for interpreting the deep models. We address this challenge by developing novel perturbations and a clustering-based approach for finding the top features contributing to the decision.</p></div><div class="c-article-section__content"><p>In this article, we report the major contributions for the AD stage prediction as follows:</p><ul class="u-list-style-bullet"> <li> <p>Novel DL architectures outperform shallow learning models;</p> </li> <li> <p>Multi-modality data analysis with DL outperforms single-modality DL models; and</p> </li> <li> <p>Novel interpretable DL methods are capable of extracting top performing features.</p> </li> </ul></div><section data-title="Data description"><div class="c-article-section" id="Sec2-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Sec2">Data description</h2><div class="c-article-section__content" id="Sec2-content"><p>This article uses Alzheimer’s Disease Neuroimaging Initiative* (ADNI) database (adni.loni.usc.edu)<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 37" title="Mueller, S. G. et al. Ways toward an early diagnosis in Alzheimer’s disease: the Alzheimer’s Disease Neuroimaging Initiative (ADNI). Alzheimer’s Dement. 1, 55–66 (2005)." href="/articles/s41598-020-74399-w#ref-CR37" id="ref-link-section-d105447676e613">37</a></sup> data for the analysis. ADNI aims to test whether serial MRI, PET, biological markers, and clinical and neuropsychological assessments can be combined to measure the progression of MCI and early AD. ADNI data repository contains imaging, clinical, and genetic data for over 2220 patients spanning over four studies (ADNI1, ADNI2, ADNI GO, and ADNI3). Our study focuses on ADNI1, 2 and GO because ADNI 3 is an ongoing study expected to end in 2022. The data is currently being released in phases with limited availability for unprocessed imaging data and no genetic data yet. The imaging data (ADNI1, 2 and GO) consists of MRI and PET images, of which we use cross-sectional MRI data corresponding to the baseline screenings from ADNI1 (503 patients). The data publisher has standardized the images to eliminate the non-linearities caused by the scanners from different vendors. In this study, we used the cross-sectional MRI data, consisting of 9108 voxels per patient distributed over 18 slices, with each slice having 22 × 23 voxels. For clinical or EHR data, we use 2004 patients (ADNI1, ADNI2, and ADNI GO) data from the clinical tests (e.g., memory tests, balance tests, and cognitive tests), medication data (e.g., usage of levodopa), imaging score summaries (e.g., levels of fluorodeoxyglucose (FDG) from PET, brain volumes from MRI), patient demographics (e.g., age and gender), and biochemical tests. The genetic data consists of the whole genome sequencing (WGS) data from 808 ADNI participants (at the time of sequencing, 128 with AD, 415 with MCI, and 267 controls) by Illumina’s non-Clinical Laboratory Improvement Amendments (non-CLIA) laboratory at roughly 30–40 × coverage in 2012 and 2013. The resulting variant call files (VCFs) have been generated by ADNI using Broad best practices (Burrows-Wheeler Aligner (BWA) and Genome Analysis Toolkit (GATK)-haplotype caller) in 2014. We use a total of 2004 patients in this study, with all 2004 patients have clinical data, 503 patients have imaging data, and 808 patients have genetic data (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig1">1</a>). For participants with multiple visits, we use the diagnosis from patient’s last visit. As shown in Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig1">1</a>c, 220 patients have all three data modalities, 588 patients have SNP and EHR, 283 patients have imaging and EHR, the remaining patients have only EHR data.</p><div class="c-article-section__figure js-c-reading-companion-figures-item" data-test="figure" data-container-section="figure" id="figure-1" data-title="Figure 1"><figure><figcaption><b id="Fig1" class="c-article-section__figure-caption" data-test="figure-caption-text">Figure 1</b></figcaption><div class="c-article-section__figure-content"><div class="c-article-section__figure-item"><a class="c-article-section__figure-link" data-test="img-link" data-track="click" data-track-label="image" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/1" rel="nofollow"><picture><source type="image/webp" srcset="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig1_HTML.png?as=webp"><img aria-describedby="Fig1" src="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig1_HTML.png" alt="figure 1" loading="lazy" width="685" height="814"></picture></a></div><div class="c-article-section__figure-description" data-test="bottom-caption" id="figure-1-desc"><p>(<b>a</b>) Description of ADNI data. Clinical data consists of demographics, neurological exams and assessments, medications, imaging volumes, and biomarkers. (<b>b</b>) Number of patients by modality and disease stage. (<i>CN</i> controls, <i>MCI</i> mild cognitive disorder, and <i>AD</i> Alzheimer’s disease). 220 patients have all the three data modalities, 588 patients have SNP and EHR, 283 patients have imaging and EHR, the remaining patients have only EHR data.</p></div></div><div class="u-text-right u-hide-print"><a class="c-article__pill-button" data-test="article-link" data-track="click" data-track-label="button" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/1" data-track-dest="link:Figure1 Full size image" aria-label="Full size image figure 1" rel="nofollow"><span>Full size image</span><svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-chevron-right-small"></use></svg></a></div></figure></div></div></div></section><section data-title="Study design for novel DL and multi-modality data analysis"><div class="c-article-section" id="Sec3-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Sec3">Study design for novel DL and multi-modality data analysis</h2><div class="c-article-section__content" id="Sec3-content"><p>As mentioned above, we use data from imaging (503 MRI images), SNP (808 patients) and the EHR (2004 patients) to predict AD stages. For each single data modality, we first demonstrate the superiority of deep models over shallow models such as kNN, one-vs-one coding SVM, random forests, and decision trees. The SNP and EHR features for shallow models and DL are the same. For imaging, when using DL, we apply multi-slice 3D voxels directly, while for shallow learners, we extract expert crafted features derived from the 3D voxels.</p><p>Regarding AD staging, only EHR has three-stage classes CN, MCI, and AD. SNP expression does not vary between MCI and AD<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 38" title="Shen, L. et al. Genetic analysis of quantitative phenotypes in AD and MCI: Imaging, cognition and biomarkers. Brain Imaging Behav. 8, 183–207 (2014)." href="/articles/s41598-020-74399-w#ref-CR38" id="ref-link-section-d105447676e667">38</a></sup>, and only has CN vs AD/MCI prediction. On images, patients with early MCI were structurally similar to CN, and those from patients with late MCI were structurally similar to AD. As such, for imaging, only CN and AD (as seen in Ref.<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 39" title="Leandrou, S., Petroudi, S., Reyes-Aldasoro, C. C., Kyriacou, P. A. &amp; Pattichis, C. S. Quantitative MRI brain studies in mild cognitive impairment and Alzheimer’s disease: A methodological review. IEEE Rev. Biomed. Eng. 11, 97–111 (2018)." href="/articles/s41598-020-74399-w#ref-CR39" id="ref-link-section-d105447676e671">39</a></sup>) are used for staging assessment. Thus, combining all three modalities can help us significantly improve AD staging prediction accuracy. As shown in Figs. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig2">2</a> and <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig3">3</a>. we have developed three data fusion strategies: (i) Feature-level combinations using shallow models, (ii) Intermediate-feature-level combinations using deep models, and (iii) Decision-level combinations using shallow models.</p><div class="c-article-section__figure js-c-reading-companion-figures-item" data-test="figure" data-container-section="figure" id="figure-2" data-title="Figure 2"><figure><figcaption><b id="Fig2" class="c-article-section__figure-caption" data-test="figure-caption-text">Figure 2</b></figcaption><div class="c-article-section__figure-content"><div class="c-article-section__figure-item"><a class="c-article-section__figure-link" data-test="img-link" data-track="click" data-track-label="image" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/2" rel="nofollow"><picture><source type="image/webp" srcset="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig2_HTML.png?as=webp"><img aria-describedby="Fig2" src="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig2_HTML.png" alt="figure 2" loading="lazy" width="685" height="228"></picture></a></div><div class="c-article-section__figure-description" data-test="bottom-caption" id="figure-2-desc"><p>Deep model for data integration compared with shallow models of data integration. (<b>a</b>) Feature level integration on shallow models, where the features are concatenated before passing into shallow models. (<b>b</b>) Deep intermediate feature level integration where the original features are transformed separatelyusing deep models prior to integration and prediction. (<b>c</b>) Decision level integration where voting is performed using decisions of individual classifiers. In this study, we comparee the performance of deep intermediate level integration against shallow feature and decision levels integrations for the prediction of Alzheimer’s stages.</p></div></div><div class="u-text-right u-hide-print"><a class="c-article__pill-button" data-test="article-link" data-track="click" data-track-label="button" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/2" data-track-dest="link:Figure2 Full size image" aria-label="Full size image figure 2" rel="nofollow"><span>Full size image</span><svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-chevron-right-small"></use></svg></a></div></figure></div><div class="c-article-section__figure js-c-reading-companion-figures-item" data-test="figure" data-container-section="figure" id="figure-3" data-title="Figure 3"><figure><figcaption><b id="Fig3" class="c-article-section__figure-caption" data-test="figure-caption-text">Figure 3</b></figcaption><div class="c-article-section__figure-content"><div class="c-article-section__figure-item"><a class="c-article-section__figure-link" data-test="img-link" data-track="click" data-track-label="image" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/3" rel="nofollow"><picture><source type="image/webp" srcset="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig3_HTML.png?as=webp"><img aria-describedby="Fig3" src="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig3_HTML.png" alt="figure 3" loading="lazy" width="685" height="558"></picture></a></div><div class="c-article-section__figure-description" data-test="bottom-caption" id="figure-3-desc"><p>Intermediate-feature-level combination deep models for multimodality data integration for clinical decision support. Data from diverse sources, imaging, EHR and SNP are combined using novel deep architectures. 3D convolutional neural network architectures used on 3D MR image regions to obtain intermediate imaging features. Deep stacked denoising autoencoders are used to obtain intermediate EHR features. Deep stacked denoising autoencoders are used obtain intermediate SNP features. The 3 types of intermediate features are passed into a classification layer for classification into Alzheimer’s stages (CN, MCI and AD).</p></div></div><div class="u-text-right u-hide-print"><a class="c-article__pill-button" data-test="article-link" data-track="click" data-track-label="button" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/3" data-track-dest="link:Figure3 Full size image" aria-label="Full size image figure 3" rel="nofollow"><span>Full size image</span><svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-chevron-right-small"></use></svg></a></div></figure></div><p>Feature-level combinations are performed through direct concatenation of the data modalities using shallow learners (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig2">2</a>). The intermediate-feature-level combinations are performed by extracting intermediate features using DL, followed by concatenating and passing through a classification layer (more details are provided in methods and supplement). Decision-level combinations are performed by voting on the single-modalities. We test shallow models such as kNN, one-vs-one coding SVM, random forests, and decision trees for decision-level combinations and present the best performing model. For the intermediate-feature-level models (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig3">3</a>), we evaluate four combinations, (i) EHR + imaging + SNP, (ii) EHR + imaging, (iii) EHR + SNP, and (iv) imaging + SNP. For all combinations except imaging + SNP, we perform three-stage classification (CN, AD, and MCI). For imaging + SNP we perform classification into AD vs CN.</p><p>All above-mentioned cases are evaluated using an internal cross-validation and an external test set. We first remove 10% of the data as an external test set. On the remaining 90%, we perform tenfold cross-validation, with 81% of the total data being used for training and 9% for internal cross-validation. The internal cross-validation data set is used to optimize the model.</p></div></div></section><section data-title="Results for novel DL and multi-modality data analysis"><div class="c-article-section" id="Sec4-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Sec4">Results for novel DL and multi-modality data analysis</h2><div class="c-article-section__content" id="Sec4-content"><p>We report the ADNI results for both the internal cross-validation partition and the external test dataset. For each of the DL models, or the baseline shallow models, we use mean values of accuracy, precision, recall, and meanF1 scores as metrics to show the superiority of deep models for single-modalities and the improvements gained from data integration.</p><h3 class="c-article__sub-heading" id="Sec5">3D convolutional neural network (DL) is superior to shallow models on imaging MRI data</h3><p>One patient’s imaging data consists of 9108 3D voxels of dimension 22 × 23 × 18, corresponding to each of the five selected brain areas.</p><p>The number of nodes in DL models for the first-level fully connected layers = 5 × 20 = 100, and the number of nodes for the second level fully connected layer is 20. The results (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig4">4</a>a) indicate that the CNN based imaging models outperform shallow models and give the best precision and meanF1 scores.</p><div class="c-article-section__figure js-c-reading-companion-figures-item" data-test="figure" data-container-section="figure" id="figure-4" data-title="Figure 4"><figure><figcaption><b id="Fig4" class="c-article-section__figure-caption" data-test="figure-caption-text">Figure 4</b></figcaption><div class="c-article-section__figure-content"><div class="c-article-section__figure-item"><a class="c-article-section__figure-link" data-test="img-link" data-track="click" data-track-label="image" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/4" rel="nofollow"><picture><source type="image/webp" srcset="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig4_HTML.png?as=webp"><img aria-describedby="Fig4" src="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig4_HTML.png" alt="figure 4" loading="lazy" width="685" height="701"></picture></a></div><div class="c-article-section__figure-description" data-test="bottom-caption" id="figure-4-desc"><p>Internal cross validation results for individual data modality to predict Alzheimer’s stage (<b>a</b>) Imaging results: deep learning prediction performs better than shallow learning predictions (<b>b</b>) EHR results: deep learning outperforms shallow models kNN and SVM and is comparable to decision trees and random forests (<b>c</b>) SNP results: deep learning outperforms shallow models. The kNN, SVM, RF and decision trees are shallow models. (<i>kNN</i> k-Nearest Neighbors, <i>SVM</i> support vector machines, and <i>RF</i> random forests).</p></div></div><div class="u-text-right u-hide-print"><a class="c-article__pill-button" data-test="article-link" data-track="click" data-track-label="button" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/4" data-track-dest="link:Figure4 Full size image" aria-label="Full size image figure 4" rel="nofollow"><span>Full size image</span><svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-chevron-right-small"></use></svg></a></div></figure></div><h3 class="c-article__sub-heading" id="Sec6">Deep autoencoder model is comparable to shallow models on EHR data</h3><p>EHR data consists of 2004 patients with 1680 normalized features per patient, which we use to classify the patients into AD, MCI, and CN (three class). We use a three-layer auto-encoder with 200, 100 and 50 nodes each. The deep networks are trained using Adam with a max epoch count (repetition of DL network training on the entire dataset to allow adequate training) of 25. After hyperparameter optimization, the regularization coefficients for initial training is fixed at 0.03 and those for fine tuning at 0.03. The dropout probability is set to 0.6 for all the layers. The results (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig4">4</a>b) indicate that the autoencoders outperform shallow models such as kNN and SVM, and they are comparable to decision trees and random forests.</p><h3 class="c-article__sub-heading" id="Sec7">Deep autoencoder model is superior to shallow models for SNP data</h3><p>Processed SNP data consists of 808 patients with 500 features (each with levels 1, 2, 3), which we use to classify the patients into AD/MCI vs CN (two class). The auto-encoder network consists of three hidden layers with 200, 100 and 50 nodes each. Using Adam optimization and a max epoch count of 30, the best performing models have regularization coefficients for initial training as 0.03 and those for fine tuning at 0.06. The corruption (dropouts) is 0.6 for each layer. The results (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig4">4</a>c) indicate that the auto-encoder models outperform all the baselines models.</p><h3 class="c-article__sub-heading" id="Sec8">Results for multi-modality classification</h3><p>The intermediate features generated from the single-modality deep-models are concatenated and passed to an additional classification layer for integration.</p><h4 class="c-article__sub-heading c-article__sub-heading--small" id="Sec9">Combination of all 3 modalities: (imaging + EHR + SNP): deep model outperforms shallow models</h4><p>When a particular modality is not available, we mask it as zeros when using DL. The intermediate features from the three modalities are passed to the classification layer. We test kNN, decision trees, random forests, and support vectors machines as alternatives for the classification layer. Internal cross-validation (CV) accuracy (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig5">5</a>a) using deep models followed by random forests as the classification layer are the best. Deep models for the combination of the three modalities outperform single-modalities DL. In addition, during combination deep model outperforms shallow models such as feature-level and decision-level for both CV and external test sets (Table <a data-track="click" data-track-label="link" data-track-action="table anchor" href="/articles/s41598-020-74399-w#Tab1">1</a>).</p><div class="c-article-section__figure js-c-reading-companion-figures-item" data-test="figure" data-container-section="figure" id="figure-5" data-title="Figure 5"><figure><figcaption><b id="Fig5" class="c-article-section__figure-caption" data-test="figure-caption-text">Figure 5</b></figcaption><div class="c-article-section__figure-content"><div class="c-article-section__figure-item"><a class="c-article-section__figure-link" data-test="img-link" data-track="click" data-track-label="image" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/5" rel="nofollow"><picture><source type="image/webp" srcset="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig5_HTML.png?as=webp"><img aria-describedby="Fig5" src="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig5_HTML.png" alt="figure 5" loading="lazy" width="685" height="827"></picture></a></div><div class="c-article-section__figure-description" data-test="bottom-caption" id="figure-5-desc"><p>Internal cross validation results for integration of data modalities to predict Alzheimer’s stage (<b>a</b>) Imaging + EHR + SNP. Deep learning prediction performs better than shallow learning predictions (<b>b</b>) EHR + SNP Deep learning prediction performs better than shallow learning predictions (<b>c</b>) Imaging + EHR deep learning prediction performs better than shallow learning predictions (<b>d</b>) Imaging + SNP results. Shallow learning gave a better prediction than deep learning due to small sample sizes. (<i>kNN</i> k-Nearest Neighbors, <i>SVM</i> support vector machines, <i>RF</i> random forests, <i>SM</i> shallow models, and <i>DL</i> deep learning).</p></div></div><div class="u-text-right u-hide-print"><a class="c-article__pill-button" data-test="article-link" data-track="click" data-track-label="button" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/5" data-track-dest="link:Figure5 Full size image" aria-label="Full size image figure 5" rel="nofollow"><span>Full size image</span><svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-chevron-right-small"></use></svg></a></div></figure></div><div class="c-article-table" data-test="inline-table" data-container-section="table" id="table-1"><figure><figcaption class="c-article-table__figcaption"><b id="Tab1" data-test="table-caption">Table 1 Features extraction from deep models and comparison of internal validation results with external test result.</b></figcaption><div class="u-text-right u-hide-print"><a class="c-article__pill-button" data-test="table-link" data-track="click" data-track-action="view table" data-track-label="button" rel="nofollow" href="/articles/s41598-020-74399-w/tables/1" aria-label="Full size table 1"><span>Full size table</span><svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-chevron-right-small"></use></svg></a></div></figure></div><h4 class="c-article__sub-heading c-article__sub-heading--small" id="Sec10">Combination of SNP and EHR modalities: deep model outperforms shallow models</h4><p>Internal CV accuracy of 0.78 ± 0 using deep models followed by random forests as the classification layer (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig5">5</a>b.) are the best. The deep models for EHR + SNP combinations outperform single-modalities DL. During combination, deep model outperforms shallow models such as feature-level combination models for both CV and external test sets (Table <a data-track="click" data-track-label="link" data-track-action="table anchor" href="/articles/s41598-020-74399-w#Tab1">1</a>).</p><h4 class="c-article__sub-heading c-article__sub-heading--small" id="Sec11">Combination of imaging and EHR modalities: deep model outperforms shallow models</h4><p>Internal CV accuracy of 0.79 ± 0 using deep models followed by random forests and SVM as the classification layers (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig5">5</a>c) are the best. The deep models for EHR+ imaging combinations outperform single-modalities DL. In addition, during combination, DL model outperforms shallow models such as feature decision-level combination models for both CV and external test sets (Table <a data-track="click" data-track-label="link" data-track-action="table anchor" href="/articles/s41598-020-74399-w#Tab1">1</a>). Random forests as the classification layer give the best performance on the external set.</p><h4 class="c-article__sub-heading c-article__sub-heading--small" id="Sec12">Combination of imaging and SNP modalities: shallow model outperforms deep models</h4><p>We perform two-class classification using a combination of SNP and imaging intermediate features (CN vs. AD/MCI). Internal CV accuracy of 0.75 ± 0.11, using feature-level combination models (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig5">5</a>d) is the best. However, the results on the external data are poor. The poor external validation can be attributed to having only 220 patients with both modalities of data.</p></div></div></section><section data-title="Discussion for novel DL and multi-modality data analysis"><div class="c-article-section" id="Sec13-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Sec13">Discussion for novel DL and multi-modality data analysis</h2><div class="c-article-section__content" id="Sec13-content"><p>Our results suggest that the deep models outperform traditional shallow models for single-modalities. The shallow models typically require handcrafted features by experts. On the contrary, deep models can find the optimal set of features during training. In addition, deep models such as auto-encoders and CNNs can be used to perform unsupervised feature generation, and then to combine with a more sophisticated decision layer. This architecture enables the modeling of complex decision boundaries for multiclass classification problems<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 40" title="Mhaskar, H., Liao, Q. &amp; Poggio, T. Learning functions: when is deep better than shallow. arXiv preprint arXiv:1603.00988 (2016)." href="/articles/s41598-020-74399-w#ref-CR40" id="ref-link-section-d105447676e1374">40</a></sup>. Due to this property, deep models are particularly effective for the identification of MCI, which has been a clinical challenge in Alzheimer’s research due to small differences between the three groups. Because shallow models (except random forests) do not tolerate noisy and missing data or missing modalities well, for noisy data, DL gives the best performance for single-modalities.</p><p>The integration of multiple modalities improves the prediction accuracy (three of four scenarios). The deep models for integration also show improved performance over traditional feature-level and decision-level integrations. The DL’s superior performance is due to its ability to extract relationships amongst features from different modalities. When the dataset is very small (e.g., the combination of imaging and SNP), deep models do not perform well. The degraded performance could be caused by the lack of training data for networks. Overall, our investigations show that:</p><ul class="u-list-style-bullet"> <li> <p>For single-modality data (clinical, and imaging), the performances of DL models are always better than those of shallow models; and</p> </li> <li> <p>When using DL models, predictions by multi-modality data is better than those by single-modality data. The three best fusion set ups are: EHR + SNP, EHR + Imaging + SNP, and EHR + Imaging.</p> </li> </ul><p>One bottleneck for our proposed DL-based data integration model is the small sample size of the ADNI dataset. To mitigate the small sample size challenge, we can utilize strategies such as transfer learning and domain adaptation<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 41" title="Shin, H.-C. et al. Deep convolutional neural networks for computer-aided detection: CNN architectures, dataset characteristics and transfer learning. IEEE Trans. Med. Imaging 35(5), 1285–1298 (2016)." href="/articles/s41598-020-74399-w#ref-CR41" id="ref-link-section-d105447676e1398">41</a></sup>. For each data modality, we can adopt neural networks pre-trained on other similar datasets (e.g., CNN-based MRI/CT brain imaging classification model trained for other conditions). By composing our model with these pre-trained networks and their parameters, we can perform domain adaptation or fine-tune the network parameters using our labeled ADNI data. On the other hand, we can also perform an unsupervised feature representation learning for each data modality using publicly available data (e.g., The Cancer Genome Atlas (TCGA) dataset for SNPs).Our feature extraction step is performed independently for each modality in the current DL model, which is not trained end-to-end with the integration and classification step. One future direction is to enable end-to-end training and combine auto-encoders with other integration strategies besides feature concatenation<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 42" title="Tong, L., Mitchel, J., Chatlin, K. &amp; Wang, M. D. Deep learning based feature-level integration of multi-omics data for breast cancer patients survival analysis. BMC Med. Inform. Decis. Mak. 20, 1–12 (2020)." href="/articles/s41598-020-74399-w#ref-CR42" id="ref-link-section-d105447676e1402">42</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 43" title="Tong, L., Wu, H. &amp; Wang, M. D. Integrating multi-omics data by learning modality invariant representations for improved prediction of overall survival of cancer. Methods. &#xA; https://doi.org/10.1016/j.ymeth.2020.07.008&#xA; &#xA; (2020)." href="/articles/s41598-020-74399-w#ref-CR43" id="ref-link-section-d105447676e1405">43</a></sup>.</p></div></div></section><section data-title="Study design of novel feature extraction to assist in DL model interpretation"><div class="c-article-section" id="Sec14-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Sec14">Study design of novel feature extraction to assist in DL model interpretation</h2><div class="c-article-section__content" id="Sec14-content"><p>Model interpretation is a major challenge for DL and is often considered as a barrier for real-world biomedical applications. Research has shown that the weights of deep models affect the results through several layers of combinations, hence do not yield clinically meaningful interpretation<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 44" title="Che, Z., Purushotham, S., Khemani, R. &amp; Liu, Y. Distilling knowledge from deep networks with applications to healthcare domain. arXiv preprint arXiv:1512.03542 (2015)." href="/articles/s41598-020-74399-w#ref-CR44" id="ref-link-section-d105447676e1417">44</a></sup>. In this study, we develop a novel interpretation method where we mask one feature at a time and measure the drop-in accuracy (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig6">6</a>). The features that give the maximum drop in accuracy are ranked higher for feature extraction.</p><div class="c-article-section__figure js-c-reading-companion-figures-item" data-test="figure" data-container-section="figure" id="figure-6" data-title="Figure 6"><figure><figcaption><b id="Fig6" class="c-article-section__figure-caption" data-test="figure-caption-text">Figure 6</b></figcaption><div class="c-article-section__figure-content"><div class="c-article-section__figure-item"><a class="c-article-section__figure-link" data-test="img-link" data-track="click" data-track-label="image" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/6" rel="nofollow"><picture><source type="image/webp" srcset="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig6_HTML.png?as=webp"><img aria-describedby="Fig6" src="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig6_HTML.png" alt="figure 6" loading="lazy" width="685" height="375"></picture></a></div><div class="c-article-section__figure-description" data-test="bottom-caption" id="figure-6-desc"><p>Feature extraction for deep model interpretation. Novel feature interpretation methodology where features are masked one at a time and the effect on the classification is observed. The feature which gives the highest drop in accuracy is ranked the highest. Once we ranked the features, we checked if the intermediate features picked associations different from raw data using cluster analysis. Deep models show associations which are different from shallow models, which accounts for superior performance.</p></div></div><div class="u-text-right u-hide-print"><a class="c-article__pill-button" data-test="article-link" data-track="click" data-track-label="button" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/6" data-track-dest="link:Figure6 Full size image" aria-label="Full size image figure 6" rel="nofollow"><span>Full size image</span><svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-chevron-right-small"></use></svg></a></div></figure></div></div></div></section><section data-title="Results and discussion of novel feature extraction to assist in DL model interpretation"><div class="c-article-section" id="Sec15-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Sec15">Results and discussion of novel feature extraction to assist in DL model interpretation</h2><div class="c-article-section__content" id="Sec15-content"><p>The top EHR features (Table <a data-track="click" data-track-label="link" data-track-action="table anchor" href="/articles/s41598-020-74399-w#Tab1">1</a>) include memory tests, imaging summary scores, and brain volumes. Changes to memory and brain volumes have been reported as AD biomarkers. Imaging markers such as involvement of limbic and cortical regions<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 45" title="Glodzik, L. et al. Alzheimer’s disease markers, hypertension, and gray matter damage in normal elderly. Neurobiol. Aging 33, 1215–1227 (2012)." href="/articles/s41598-020-74399-w#ref-CR45" id="ref-link-section-d105447676e1453">45</a></sup>, and changes in hippocampus volume and structure<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 46" title="Dubois, B. et al. Advancing research diagnostic criteria for Alzheimer’s disease: the IWG-2 criteria. Lancet Neurol. 13, 614–629 (2014)." href="/articles/s41598-020-74399-w#ref-CR46" id="ref-link-section-d105447676e1457">46</a>,<a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 47" title="Hampson, R. E. et al. Facilitation of memory encoding in primate hippocampus by a neuroprosthesis that promotes task-specific neural firing. J. Neural Eng. 10, 066013 (2013)." href="/articles/s41598-020-74399-w#ref-CR47" id="ref-link-section-d105447676e1460">47</a></sup> are known biomarkers in PET and MRI studies. SNP features picked chromosome 10, 4, 19, 1, and 5.</p><p>SNP + Imaging + EHR and SNP + EHR pick more EHR features (memory tests, metabolic markers and brain volume) which are known AD related features. EHR + Imaging pick EHR features including brain volumes, clinical dementia ratings, and metabolite markers. Imaging + SNP pick brain areas such as the hippocampus, and amygdala higher than SNP features.</p><p>In addition, we also cluster the intermediate features from EHR and SNP data using kmeans (<a data-track="click" data-track-label="link" data-track-action="supplementary material anchor" href="/articles/s41598-020-74399-w#MOESM1">Supplementary Information</a>) to show associations in intermediate features. On plotting the clusters for intermediate and raw features, we find that the intermediate features generate better separation as compared to the original features. This indicates subtle relationships in intermediate features, which are picked by deep models (Supplementary Figs. <a data-track="click" data-track-label="link" data-track-action="supplementary material anchor" href="/articles/s41598-020-74399-w#MOESM1">A5</a>, <a data-track="click" data-track-label="link" data-track-action="supplementary material anchor" href="/articles/s41598-020-74399-w#MOESM1">A6</a>).</p></div></div></section><section data-title="Conclusions"><div class="c-article-section" id="Sec16-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Sec16">Conclusions</h2><div class="c-article-section__content" id="Sec16-content"><p>Diagnosing patients with AD is challenging, and the prediction accuracy remains low for staging assessment. In this study, we report the potential of DL for multi-modal data fusion, including:</p><ul class="u-list-style-bullet"> <li> <p>Deep-models outperform shallow models for single-modality Alzheimer’s stage prediction.</p> </li> <li> <p>Novel DL framework for multi-modality data fusion outperforms single-modality DL.</p> </li> <li> <p>Novel perturbation and clustering-based feature extraction assisting DL model interpretations are capable of AD stage prediction.</p> </li> <li> <p>Application of 3D convolutional neural network architecture for MRI image data benefits the AD analysis.</p> </li> </ul><p>Despite the improved performance, our study suffers from short-comings such as limited dataset sizes. In the future, we will test our models on a larger and richer dataset.</p></div></div></section><section data-title="Methods"><div class="c-article-section" id="Sec17-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Sec17">Methods</h2><div class="c-article-section__content" id="Sec17-content"><p>In this study, we use DL models to perform multimodal data fusion (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig3">3</a>) (i.e. imaging, EHR and genomic SNP data) for classifying patients into CN, MCI, and AD groups. We use stacked de-noising auto-encoders for EHR and SNP, and 3D convolutional neural networks (CNNs) for MRI imaging data. After the networks are separately trained for each data modality, we apply decision trees, random forests, support vectors machines, and k-nearest neighbors to conduct integrated classification on AD staging.</p><h3 class="c-article__sub-heading" id="Sec18">Data pre-processing</h3><p>As mentioned above, ADNI dataset consists of clinical data, SNP data, and imaging data.</p><h4 class="c-article__sub-heading c-article__sub-heading--small" id="Sec19">MRI imaging data</h4><p>We first preprocess the 3D images to filter noise, perform skull stripping, segment different types of brain tissue, normalize and co-register the images to MNI space (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig7">7</a>a)<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 48" title="Eickhoff, S. B. et al. A new SPM toolbox for combining probabilistic cytoarchitectonic maps and functional imaging data. NeuroImage 25, 1325–1335 (2005)." href="/articles/s41598-020-74399-w#ref-CR48" id="ref-link-section-d105447676e1544">48</a></sup>. Following that, we extract 3D areas of 21 brain regions (associated with Alzheimer’s disease) including the right amygdala, left and right angular, left and right cerebellum, left and right Hippocampus, left and right occipital regions, and left and right superior temporal regions (<a data-track="click" data-track-label="link" data-track-action="supplementary material anchor" href="/articles/s41598-020-74399-w#MOESM1">Supplementary Information</a>).</p><div class="c-article-section__figure js-c-reading-companion-figures-item" data-test="figure" data-container-section="figure" id="figure-7" data-title="Figure 7"><figure><figcaption><b id="Fig7" class="c-article-section__figure-caption" data-test="figure-caption-text">Figure 7</b></figcaption><div class="c-article-section__figure-content"><div class="c-article-section__figure-item"><a class="c-article-section__figure-link" data-test="img-link" data-track="click" data-track-label="image" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/7" rel="nofollow"><picture><img aria-describedby="Fig7" src="//media.springernature.com/lw685/springer-static/image/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_Fig7_HTML.png" alt="figure 7" loading="lazy" width="685" height="228"></picture></a></div><div class="c-article-section__figure-description" data-test="bottom-caption" id="figure-7-desc"><p>Data pre-processing pipeline for three data modalities: (<b>a</b>) Imaging data is first skull stripped, segmented into white matter, grey matter, and cerebrospinal fluid. Then the images are registered to a standard space, prior to extracting 21 brain regions using anatomical automatic labeling atlases. (<b>b</b>) Clinical data is normalized between 1–2 or encoded as 1–2. Then we discard features with values missing values &gt; 70% to obtain 1680 features for 204 patients. (<b>c</b>) SNP data is first filtered, error corrected, feature selection using known genes and then followed by maximum relevance (maxrel) based methods, to obtain 500 SNPS for 808 patients.</p></div></div><div class="u-text-right u-hide-print"><a class="c-article__pill-button" data-test="article-link" data-track="click" data-track-label="button" data-track-action="view figure" href="/articles/s41598-020-74399-w/figures/7" data-track-dest="link:Figure7 Full size image" aria-label="Full size image figure 7" rel="nofollow"><span>Full size image</span><svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-chevron-right-small"></use></svg></a></div></figure></div><h4 class="c-article__sub-heading c-article__sub-heading--small" id="Sec20">Clinical features</h4><p>We extract 1680 common clinical features (quantitative real numbers, binary and categorical) from ADNI1, ADNI2, and ADNI GO. We normalize the quantitative data to the range 1–2, convert the categorical data into binary using one hot encoding., and finally, convert the binary data into values 1 or 2 (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig7">7</a>b).</p><h4 class="c-article__sub-heading c-article__sub-heading--small" id="Sec21">Genetic data</h4><p>Each subject has about ~ 3 million SNPs in the raw VCF file. We apply multiple filtering and feature selection steps (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig7">7</a>c) to eliminate SNPs with (i) low genotype quality, (ii) low minor allele frequency, (iii) high per-site missing rate and (iv) significant Hardy–Weinberg equilibrium p-value. After filtering, we apply a two-stage feature selection: (i) we retain SNPs that located on known AD-associated genes, (ii) we select 500 SNP features using minimum redundancy maximum relevance (mRMR)<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 49" title="Peng, H., Long, F. &amp; Ding, C. Feature selection based on mutual information criteria of max-dependency, max-relevance, and min-redundancy. IEEE Trans. Pattern Anal. Mach. Intell. 27, 1226–1238 (2005)." href="/articles/s41598-020-74399-w#ref-CR49" id="ref-link-section-d105447676e1600">49</a></sup> We chose mRMR as a feature selection method because it works well with categorical data (such the SNP data) and has been previously reported with genetic data<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 50" title="Ding, C. &amp; Peng, H. Minimum redundancy feature selection from microarray gene expression data. J. Bioinform. Comput. Biol. 3, 185–205 (2005)." href="/articles/s41598-020-74399-w#ref-CR50" id="ref-link-section-d105447676e1604">50</a></sup>. mRMR was chosen over other wrapper-based techniques such as sequential feature selection due to computational costs. In the future we will investigate other filter-based feature selection methods such as correlation techniques, ANOVA, and relieFF in the future (<a data-track="click" data-track-label="link" data-track-action="supplementary material anchor" href="/articles/s41598-020-74399-w#MOESM1">Supplementary Information</a>).</p><h3 class="c-article__sub-heading" id="Sec22">Intermediate feature generation using single-modalities</h3><p>We first perform feature extraction for each modality separately (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig7">7</a>), then we use DL for the generation of intermediate features. The intermediate features from EHR and SNP data are generated using auto-encoders and those of images are generated using 3D-convolutional neural networks. The intermediate features generated from each single-modality are subsequently used for multi-modal analysis. As a data-driven approach, DL’s performance heavily relies on a large amount of well-annotated training data. However, the ADNI dataset contains only a few thousand samples in total and even fewer samples with all three modalities. Thus, we use DL only for feature representation learning instead of end-to-end training.</p><h4 class="c-article__sub-heading c-article__sub-heading--small" id="Sec23">Intermediate features for imaging data</h4><p>First, we select the regions of interest and put them into a separate 3-dimensional convolutional neural network (Supplementary Fig. <a data-track="click" data-track-label="link" data-track-action="supplementary material anchor" href="/articles/s41598-020-74399-w#MOESM1">A2</a> in the supplementary material) with their weights shared across the CNN modules. CNN modules can extract higher level features from the abstraction of images to form concepts, that often correlate better with the targets. Each 3D CNN in the architecture above comprises ten 3D-convolutional kernels of size <span class="mathjax-tex">\(5 \times 5 \times 5\)</span> followed by pooling layers with pooling kernels of size <span class="mathjax-tex">\(3 \times 3 \times 3\)</span>. After the pooling layer, we feed the pooled 3D images into Rectified Linear Unit (ReLU) non-linearities to learn complex features from the input modalities. We use volumetric batch normalization<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 51" title="Ioffe, S. &amp; Szegedy, C. Batch normalization: Accelerating deep network training by reducing internal covariate shift. arXiv preprint arXiv:1502.03167 (2015)." href="/articles/s41598-020-74399-w#ref-CR51" id="ref-link-section-d105447676e1689">51</a></sup> that is an effective regularizer for convolutional neural networks. Next, the feature maps generated by each 3D CNN are flattened and fed into separate fully connected layers with ReLU activation functions, followed by drop-out regularizers. We integrate the features generated from each modality and feed them into the second level fully connected layer and the corresponding drop-out layer. Finally, we use a softmax layer with a negative-log-likelihood loss function to train the imaging network.</p><p>We use the combined features generated from the first level fully connected layers as the intermediate features that are fed into our multi-modality DL models.</p><h4 class="c-article__sub-heading c-article__sub-heading--small" id="Sec24">Intermediate features for EHR and SNP data using auto-encoders</h4><p>We represent each patient data (EHR and SNP inputs to the feature learning algorithm) as a vector of length <span class="mathjax-tex">\(m\)</span>(where <span class="mathjax-tex">\(m\)</span> is the number of features. Then, we pass this data through a two-layer stacked denoising auto-encoder network<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 52" title="Miotto, R., Li, L., Kidd, B. A. &amp; Dudley, J. T. Deep patient: An unsupervised representation to predict the future of patients from the electronic health records. Sci. Rep. 6, 26094 (2016)." href="/articles/s41598-020-74399-w#ref-CR52" id="ref-link-section-d105447676e1738">52</a></sup> (Supplementary Fig. <a data-track="click" data-track-label="link" data-track-action="supplementary material anchor" href="/articles/s41598-020-74399-w#MOESM1">A3</a> in supplementary material) to obtain a high level representation of the patient data. Each auto-encoder layer takes an input <span class="mathjax-tex">\(x\)</span> of dimension <span class="mathjax-tex">\(n \times d\)</span>, where <span class="mathjax-tex">\(n\)</span> is the number of training samples and <span class="mathjax-tex">\(d\)</span> is input dimensionality (<span class="mathjax-tex">\(d = m\)</span> for first layer). The input for each layer is first passed through an encoder to convert the input into a higher order representation of the data (<a data-track="click" data-track-label="link" data-track-action="equation anchor" href="/articles/s41598-020-74399-w#Equ1">1</a>).</p><div id="Equ1" class="c-article-equation"><div class="c-article-equation__content"><span class="mathjax-tex">$$y = f\left( {Wx + b} \right),$$</span></div><div class="c-article-equation__number"> (1) </div></div><p>where <span class="mathjax-tex">\(f\)</span> is an activation function such as sigmoidal or tanh, <span class="mathjax-tex">\(\left[ {W,b} \right]\)</span> are parameters to be trained. We then pass the mapped values <span class="mathjax-tex">\(\left( y \right)\)</span> through a decoder to obtain a representation of the input <span class="mathjax-tex">\((x\)</span>) (<a data-track="click" data-track-label="link" data-track-action="equation anchor" href="/articles/s41598-020-74399-w#Equ2">2</a>).</p><div id="Equ2" class="c-article-equation"><div class="c-article-equation__content"><span class="mathjax-tex">$$\hat{x} = f\left( {W^{T} y + b^{\prime}} \right),$$</span></div><div class="c-article-equation__number"> (2) </div></div><p>where <span class="mathjax-tex">\(b^{\prime}\)</span> needed to be trained, and the weights <span class="mathjax-tex">\(W^{T}\)</span> are tied with the encoder weights. We construct the network by stacking the trained encoder layers and implement denoising using dropouts, where a portion of the input values are masked (set to zero) to allow better generalization of the models in the presence of small and noisy training data. We perform training through back propagation by minimizing the average cross-entropy between the input and the reconstructed input data (<a data-track="click" data-track-label="link" data-track-action="equation anchor" href="/articles/s41598-020-74399-w#Equ3">3</a>).</p><div id="Equ3" class="c-article-equation"><div class="c-article-equation__content"><span class="mathjax-tex">$$\left[ {W,b,b^{\prime}} \right] = \begin{array}{*{20}c} {\arg min } \\ {\left[ {W, b, b^{\prime}} \right]} \\ \end{array} - \mathop \sum \limits_{k = 1}^{a} \left[ {x_{k} \log\hat{x}_{k} + \left( {1 - x_{k} } \right) \log\left( {1 - \hat{x}_{k} } \right)} \right] ,$$</span></div><div class="c-article-equation__number"> (3) </div></div><p>where <span class="mathjax-tex">\(a\)</span> is number of dimensions. Optimization is carried out using Adam optimization<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 53" title="Kingma, D. &amp; Ba, J. Adam: A method for stochastic optimization. arXiv preprint arXiv:1412.6980 (2014)." href="/articles/s41598-020-74399-w#ref-CR53" id="ref-link-section-d105447676e2297">53</a></sup> with a batch size of 3.</p><p>After the training of auto-encoder layers, we perform the network fine-tuning for each by adding a softmax layer that predicts the final class. The intermediate features are the output of the fine-tuned network after removing the softmax layer. The hyper-parameters in the model, such as the layer sizes, dropout parameters, and regularization coefficients (to prevent overfitting), are optimized using tenfold cross-validation.</p><h4 class="c-article__sub-heading c-article__sub-heading--small" id="Sec25">Multimodal data integration</h4><p>We propose data integration across modalities as a method for bridging the gaps in our understanding of disease processes and improve clinical outcome predictions and model performance. The data integration from different modalities can be performed at multiple levels (raw feature-level, intermediate feature-level, and decision-level)<sup><a data-track="click" data-track-action="reference anchor" data-track-label="link" data-test="citation-ref" aria-label="Reference 54" title="Ritchie, M. D., Holzinger, E. R., Li, R., Pendergrass, S. A. &amp; Kim, D. Methods of integrating data to uncover genotype–phenotype interactions. Nat. Rev. Genet. 16, 85–97 (2015)." href="/articles/s41598-020-74399-w#ref-CR54" id="ref-link-section-d105447676e2312">54</a></sup> (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig1">1</a>). In this study, we integrate the intermediate features generated in the previous step using a concatenation layer followed by a classification layer to predict the AD stage (Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig3">3</a>). We try k-nearest neighbors (kNN), decision trees, random forests, and support vectors machines (SVM) as alternatives for the classification layer. In the event any modality is missing for a specific patient, we mask the modality with zeros. This procedure minimizes the effect of missing values from propagating down the layers and hence allows prediction with some missing data. We evaluate our models using feature-level combinations and decision-level combinations as the baseline models.</p></div></div></section> </div> <div> <div id="MagazineFulltextArticleBodySuffix"><section aria-labelledby="Bib1" data-title="References"><div class="c-article-section" id="Bib1-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Bib1">References</h2><div class="c-article-section__content" id="Bib1-content"><div data-container-section="references"><ol class="c-article-references" data-track-component="outbound reference" data-track-context="references section"><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="1."><p class="c-article-references__text" id="ref-CR1">Gulshan, V. <i>et al.</i> Development and validation of a deep learning algorithm for detection of diabetic retinopathy in retinal fundus photographs. <i>JAMA</i> <b>316</b>, 2402–2410 (2016).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1001/jama.2016.17216" data-track-item_id="10.1001/jama.2016.17216" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1001%2Fjama.2016.17216" aria-label="Article reference 1" data-doi="10.1001/jama.2016.17216">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=27898976" aria-label="PubMed reference 1">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 1" href="http://scholar.google.com/scholar_lookup?&amp;title=Development%20and%20validation%20of%20a%20deep%20learning%20algorithm%20for%20detection%20of%20diabetic%20retinopathy%20in%20retinal%20fundus%20photographs&amp;journal=JAMA&amp;doi=10.1001%2Fjama.2016.17216&amp;volume=316&amp;pages=2402-2410&amp;publication_year=2016&amp;author=Gulshan%2CV"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="2."><p class="c-article-references__text" id="ref-CR2">Ting, D. S. W. <i>et al.</i> Development and validation of a deep learning system for diabetic retinopathy and related eye diseases using retinal images from multiethnic populations with diabetes. <i>JAMA</i> <b>318</b>, 2211–2223 (2017).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1001/jama.2017.18152" data-track-item_id="10.1001/jama.2017.18152" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1001%2Fjama.2017.18152" aria-label="Article reference 2" data-doi="10.1001/jama.2017.18152">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=29234807" aria-label="PubMed reference 2">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC5820739" aria-label="PubMed Central reference 2">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 2" href="http://scholar.google.com/scholar_lookup?&amp;title=Development%20and%20validation%20of%20a%20deep%20learning%20system%20for%20diabetic%20retinopathy%20and%20related%20eye%20diseases%20using%20retinal%20images%20from%20multiethnic%20populations%20with%20diabetes&amp;journal=JAMA&amp;doi=10.1001%2Fjama.2017.18152&amp;volume=318&amp;pages=2211-2223&amp;publication_year=2017&amp;author=Ting%2CDSW"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="3."><p class="c-article-references__text" id="ref-CR3">Esteva, A. <i>et al.</i> Dermatologist-level classification of skin cancer with deep neural networks. <i>Nature</i> <b>542</b>, 115 (2017).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1038/nature21056" data-track-item_id="10.1038/nature21056" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1038%2Fnature21056" aria-label="Article reference 3" data-doi="10.1038/nature21056">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="ads reference" data-track-action="ads reference" href="http://adsabs.harvard.edu/cgi-bin/nph-data_query?link_type=ABSTRACT&amp;bibcode=2017Natur.542..115E" aria-label="ADS reference 3">ADS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC2sXhsFGltrY%3D" aria-label="CAS reference 3">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=28117445" aria-label="PubMed reference 3">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC8382232" aria-label="PubMed Central reference 3">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 3" href="http://scholar.google.com/scholar_lookup?&amp;title=Dermatologist-level%20classification%20of%20skin%20cancer%20with%20deep%20neural%20networks&amp;journal=Nature&amp;doi=10.1038%2Fnature21056&amp;volume=542&amp;publication_year=2017&amp;author=Esteva%2CA"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="4."><p class="c-article-references__text" id="ref-CR4">Weng, S., Xu, X., Li, J. &amp; Wong, S. T. Combining deep learning and coherent anti-Stokes Raman scattering imaging for automated differential diagnosis of lung cancer. <i>J. Biomed. Opt.</i> <b>22</b>, 106017 (2017).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1117/1.JBO.22.10.106017" data-track-item_id="10.1117/1.JBO.22.10.106017" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1117%2F1.JBO.22.10.106017" aria-label="Article reference 4" data-doi="10.1117/1.JBO.22.10.106017">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="ads reference" data-track-action="ads reference" href="http://adsabs.harvard.edu/cgi-bin/nph-data_query?link_type=ABSTRACT&amp;bibcode=2017JBO....22j6017W" aria-label="ADS reference 4">ADS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC5661703" aria-label="PubMed Central reference 4">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 4" href="http://scholar.google.com/scholar_lookup?&amp;title=Combining%20deep%20learning%20and%20coherent%20anti-Stokes%20Raman%20scattering%20imaging%20for%20automated%20differential%20diagnosis%20of%20lung%20cancer&amp;journal=J.%20Biomed.%20Opt.&amp;doi=10.1117%2F1.JBO.22.10.106017&amp;volume=22&amp;publication_year=2017&amp;author=Weng%2CS&amp;author=Xu%2CX&amp;author=Li%2CJ&amp;author=Wong%2CST"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="5."><p class="c-article-references__text" id="ref-CR5">Suk, H.-I. &amp; Shen, D. <i>Medical Image Computing and Computer-Assisted Intervention–MICCAI 2013</i> 583–590 (Springer, New York, 2013).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 5" href="http://scholar.google.com/scholar_lookup?&amp;title=Medical%20Image%20Computing%20and%20Computer-Assisted%20Intervention%E2%80%93MICCAI%202013&amp;pages=583-590&amp;publication_year=2013&amp;author=Suk%2CH-I&amp;author=Shen%2CD"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="6."><p class="c-article-references__text" id="ref-CR6">Liu, S. <i>et al.</i> Multimodal neuroimaging feature learning for multiclass diagnosis of Alzheimer’s disease. <i>Biomed. Eng. IEEE Trans.</i> <b>62</b>, 1132–1140 (2015).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1109/TBME.2014.2372011" data-track-item_id="10.1109/TBME.2014.2372011" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1109%2FTBME.2014.2372011" aria-label="Article reference 6" data-doi="10.1109/TBME.2014.2372011">Article</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 6" href="http://scholar.google.com/scholar_lookup?&amp;title=Multimodal%20neuroimaging%20feature%20learning%20for%20multiclass%20diagnosis%20of%20Alzheimer%27s%20disease&amp;journal=Biomed.%20Eng.%20IEEE%20Trans.&amp;doi=10.1109%2FTBME.2014.2372011&amp;volume=62&amp;pages=1132-1140&amp;publication_year=2015&amp;author=Liu%2CS"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="7."><p class="c-article-references__text" id="ref-CR7">Suk, H. I., Lee, S. W., Shen, D. &amp; Alzheimer’s Disease Neuroimaging Initiative. Deep sparse multi-task learning for feature selection in Alzheimer’s disease diagnosis. <i>Brain Struct. Funct.</i> <b>221</b>(5), 2569–2587 (2016).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="noopener" data-track-label="10.1007/s00429-015-1059-y" data-track-item_id="10.1007/s00429-015-1059-y" data-track-value="article reference" data-track-action="article reference" href="https://link.springer.com/doi/10.1007/s00429-015-1059-y" aria-label="Article reference 7" data-doi="10.1007/s00429-015-1059-y">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC2MXovFOisL8%3D" aria-label="CAS reference 7">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=25993900" aria-label="PubMed reference 7">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 7" href="http://scholar.google.com/scholar_lookup?&amp;title=Deep%20sparse%20multi-task%20learning%20for%20feature%20selection%20in%20Alzheimer%E2%80%99s%20disease%20diagnosis&amp;journal=Brain%20Struct.%20Funct.&amp;doi=10.1007%2Fs00429-015-1059-y&amp;volume=221&amp;issue=5&amp;pages=2569-2587&amp;publication_year=2016&amp;author=Suk%2CHI&amp;author=Lee%2CSW&amp;author=Shen%2CD"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="8."><p class="c-article-references__text" id="ref-CR8">Schulam, P., Wigley, F. &amp; Saria, S. In <i>AAAI</i>, 2956–2964 (2015).</p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="9."><p class="c-article-references__text" id="ref-CR9">Suk, H.-I. &amp; Shen, D. In <i>International Conference on Medical Image Computing and Computer-Assisted Intervention</i>, 583–590 (Springer, New York, 2013).</p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="10."><p class="c-article-references__text" id="ref-CR10">Choi, E., Bahadori, M.T. &amp; Sun, J. Doctor ai: Predicting clinical events via recurrent neural networks. arXiv preprint arXiv:1511.05942 (2015).</p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="11."><p class="c-article-references__text" id="ref-CR11">Zhou, J. &amp; Troyanskaya, O. G. Predicting effects of noncoding variants with deep learning-based sequence model. <i>Nat. Methods</i> <b>12</b>, 931–934 (2015).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1038/nmeth.3547" data-track-item_id="10.1038/nmeth.3547" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1038%2Fnmeth.3547" aria-label="Article reference 11" data-doi="10.1038/nmeth.3547">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC2MXhtlynsL%2FL" aria-label="CAS reference 11">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=26301843" aria-label="PubMed reference 11">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC4768299" aria-label="PubMed Central reference 11">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 11" href="http://scholar.google.com/scholar_lookup?&amp;title=Predicting%20effects%20of%20noncoding%20variants%20with%20deep%20learning-based%20sequence%20model&amp;journal=Nat.%20Methods&amp;doi=10.1038%2Fnmeth.3547&amp;volume=12&amp;pages=931-934&amp;publication_year=2015&amp;author=Zhou%2CJ&amp;author=Troyanskaya%2COG"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="12."><p class="c-article-references__text" id="ref-CR12">Ngiam, J., Khosla, A., Kim, M., Nam, J., Lee, H. &amp; Ng, A. Y. Multimodal deep learning. In <i>Proceedings of the 28th International Conference on Machine Learning (ICML-11)</i> 689–696 (2011).</p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="13."><p class="c-article-references__text" id="ref-CR13">Alzheimer’s Association. 2016 Alzheimer’s disease facts and figures. <i>Alzheimer’s Dement.</i> <b>12</b>(4), 459–509 (2016).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.jalz.2016.03.001" data-track-item_id="10.1016/j.jalz.2016.03.001" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.jalz.2016.03.001" aria-label="Article reference 13" data-doi="10.1016/j.jalz.2016.03.001">Article</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 13" href="http://scholar.google.com/scholar_lookup?&amp;title=2016%20Alzheimer%E2%80%99s%20disease%20facts%20and%20figures&amp;journal=Alzheimer%E2%80%99s%20Dement.&amp;doi=10.1016%2Fj.jalz.2016.03.001&amp;volume=12&amp;issue=4&amp;pages=459-509&amp;publication_year=2016"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="14."><p class="c-article-references__text" id="ref-CR14">Alzheimer’s Association. 2013 Alzheimer’s disease facts and figures. <i>Alzheimer’s Dement.</i> <b>9</b>(2), 208–245 (2013).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.jalz.2013.02.003" data-track-item_id="10.1016/j.jalz.2013.02.003" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.jalz.2013.02.003" aria-label="Article reference 14" data-doi="10.1016/j.jalz.2013.02.003">Article</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 14" href="http://scholar.google.com/scholar_lookup?&amp;title=2013%20Alzheimer%E2%80%99s%20disease%20facts%20and%20figures&amp;journal=Alzheimer%E2%80%99s%20Dement.&amp;doi=10.1016%2Fj.jalz.2013.02.003&amp;volume=9&amp;issue=2&amp;pages=208-245&amp;publication_year=2013"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="15."><p class="c-article-references__text" id="ref-CR15">Patterson, C. World Alzheimer Report 2018—The State of the Art of Dementia Research: New Frontiers. (Alzheimer’s Disease International (ADI), London, 2018).</p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="16."><p class="c-article-references__text" id="ref-CR16">Perrin, R. J., Fagan, A. M. &amp; Holtzman, D. M. Multimodal techniques for diagnosis and prognosis of Alzheimer’s disease. <i>Nature</i> <b>461</b>, 916–922 (2009).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1038/nature08538" data-track-item_id="10.1038/nature08538" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1038%2Fnature08538" aria-label="Article reference 16" data-doi="10.1038/nature08538">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="ads reference" data-track-action="ads reference" href="http://adsabs.harvard.edu/cgi-bin/nph-data_query?link_type=ABSTRACT&amp;bibcode=2009Natur.461..916P" aria-label="ADS reference 16">ADS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BD1MXht1KntLvO" aria-label="CAS reference 16">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=19829371" aria-label="PubMed reference 16">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2810658" aria-label="PubMed Central reference 16">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 16" href="http://scholar.google.com/scholar_lookup?&amp;title=Multimodal%20techniques%20for%20diagnosis%20and%20prognosis%20of%20Alzheimer%27s%20disease&amp;journal=Nature&amp;doi=10.1038%2Fnature08538&amp;volume=461&amp;pages=916-922&amp;publication_year=2009&amp;author=Perrin%2CRJ&amp;author=Fagan%2CAM&amp;author=Holtzman%2CDM"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="17."><p class="c-article-references__text" id="ref-CR17">Blennow, K. <i>et al.</i> Clinical utility of cerebrospinal fluid biomarkers in the diagnosis of early Alzheimer’s disease. <i>Alzheimer’s Dement.</i> <b>11</b>, 58–69 (2015).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.jalz.2014.02.004" data-track-item_id="10.1016/j.jalz.2014.02.004" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.jalz.2014.02.004" aria-label="Article reference 17" data-doi="10.1016/j.jalz.2014.02.004">Article</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 17" href="http://scholar.google.com/scholar_lookup?&amp;title=Clinical%20utility%20of%20cerebrospinal%20fluid%20biomarkers%20in%20the%20diagnosis%20of%20early%20Alzheimer%27s%20disease&amp;journal=Alzheimer%27s%20Dement.&amp;doi=10.1016%2Fj.jalz.2014.02.004&amp;volume=11&amp;pages=58-69&amp;publication_year=2015&amp;author=Blennow%2CK"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="18."><p class="c-article-references__text" id="ref-CR18">Eskildsen, S. F. <i>et al.</i> Structural imaging biomarkers of Alzheimer’s disease: Predicting disease progression. <i>Neurobiol. Aging</i> <b>36</b>, S23–S31 (2015).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.neurobiolaging.2014.04.034" data-track-item_id="10.1016/j.neurobiolaging.2014.04.034" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.neurobiolaging.2014.04.034" aria-label="Article reference 18" data-doi="10.1016/j.neurobiolaging.2014.04.034">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC2cXhsFKqurnK" aria-label="CAS reference 18">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=25260851" aria-label="PubMed reference 18">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 18" href="http://scholar.google.com/scholar_lookup?&amp;title=Structural%20imaging%20biomarkers%20of%20Alzheimer%27s%20disease%3A%20Predicting%20disease%20progression&amp;journal=Neurobiol.%20Aging&amp;doi=10.1016%2Fj.neurobiolaging.2014.04.034&amp;volume=36&amp;pages=S23-S31&amp;publication_year=2015&amp;author=Eskildsen%2CSF"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="19."><p class="c-article-references__text" id="ref-CR19">Grimmer, T. <i>et al.</i> Visual versus fully automated analyses of 18F-FDG and amyloid PET for prediction of dementia due to Alzheimer disease in mild cognitive impairment. <i>J. Nucl. Med.</i> <b>57</b>, 204–207 (2016).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.2967/jnumed.115.163717" data-track-item_id="10.2967/jnumed.115.163717" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.2967%2Fjnumed.115.163717" aria-label="Article reference 19" data-doi="10.2967/jnumed.115.163717">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC28XitFSktr7K" aria-label="CAS reference 19">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=26585056" aria-label="PubMed reference 19">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 19" href="http://scholar.google.com/scholar_lookup?&amp;title=Visual%20versus%20fully%20automated%20analyses%20of%2018F-FDG%20and%20amyloid%20PET%20for%20prediction%20of%20dementia%20due%20to%20Alzheimer%20disease%20in%20mild%20cognitive%20impairment&amp;journal=J.%20Nucl.%20Med.&amp;doi=10.2967%2Fjnumed.115.163717&amp;volume=57&amp;pages=204-207&amp;publication_year=2016&amp;author=Grimmer%2CT"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="20."><p class="c-article-references__text" id="ref-CR20">Cui, R., Liu, M. &amp; Initiative, A. S. D. N. RNN-based longitudinal analysis for diagnosis of Alzheimer’s disease. <i>Comput. Med. Imaging Graph.</i> <b>73</b>, 1–10 (2019).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.compmedimag.2019.01.005" data-track-item_id="10.1016/j.compmedimag.2019.01.005" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.compmedimag.2019.01.005" aria-label="Article reference 20" data-doi="10.1016/j.compmedimag.2019.01.005">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=30763637" aria-label="PubMed reference 20">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 20" href="http://scholar.google.com/scholar_lookup?&amp;title=RNN-based%20longitudinal%20analysis%20for%20diagnosis%20of%20Alzheimer%E2%80%99s%20disease&amp;journal=Comput.%20Med.%20Imaging%20Graph.&amp;doi=10.1016%2Fj.compmedimag.2019.01.005&amp;volume=73&amp;pages=1-10&amp;publication_year=2019&amp;author=Cui%2CR&amp;author=Liu%2CM&amp;author=Initiative%2CASDN"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="21."><p class="c-article-references__text" id="ref-CR21">Barnes, J. <i>et al.</i> Vascular and Alzheimer’s disease markers independently predict brain atrophy rate in Alzheimer’s Disease Neuroimaging Initiative controls. <i>Neurobiol. Aging</i> <b>34</b>, 1996–2002 (2013).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.neurobiolaging.2013.02.003" data-track-item_id="10.1016/j.neurobiolaging.2013.02.003" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.neurobiolaging.2013.02.003" aria-label="Article reference 21" data-doi="10.1016/j.neurobiolaging.2013.02.003">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC3sXksFeqtbY%3D" aria-label="CAS reference 21">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=23522844" aria-label="PubMed reference 21">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC3810644" aria-label="PubMed Central reference 21">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 21" href="http://scholar.google.com/scholar_lookup?&amp;title=Vascular%20and%20Alzheimer%27s%20disease%20markers%20independently%20predict%20brain%20atrophy%20rate%20in%20Alzheimer%27s%20Disease%20Neuroimaging%20Initiative%20controls&amp;journal=Neurobiol.%20Aging&amp;doi=10.1016%2Fj.neurobiolaging.2013.02.003&amp;volume=34&amp;pages=1996-2002&amp;publication_year=2013&amp;author=Barnes%2CJ"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="22."><p class="c-article-references__text" id="ref-CR22">Doecke, J. D. <i>et al.</i> BLood-based protein biomarkers for diagnosis of alzheimer disease. <i>Arch. Neurol.</i> <b>69</b>, 1318–1325 (2012).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1001/archneurol.2012.1282" data-track-item_id="10.1001/archneurol.2012.1282" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1001%2Farchneurol.2012.1282" aria-label="Article reference 22" data-doi="10.1001/archneurol.2012.1282">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=22801742" aria-label="PubMed reference 22">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC6287606" aria-label="PubMed Central reference 22">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 22" href="http://scholar.google.com/scholar_lookup?&amp;title=BLood-based%20protein%20biomarkers%20for%20diagnosis%20of%20alzheimer%20disease&amp;journal=Arch.%20Neurol.&amp;doi=10.1001%2Farchneurol.2012.1282&amp;volume=69&amp;pages=1318-1325&amp;publication_year=2012&amp;author=Doecke%2CJD&amp;author=Laws%2CSM&amp;author=Faux%2CNG"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="23."><p class="c-article-references__text" id="ref-CR23">Lee, G., Nho, K., Kang, B., Sohn, K.-A. &amp; Kim, D. Predicting Alzheimer’s disease progression using multi-modal deep learning approach. <i>Sci. Rep.</i> <b>9</b>, 1952 (2019).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1038/s41598-018-37769-z" data-track-item_id="10.1038/s41598-018-37769-z" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1038%2Fs41598-018-37769-z" aria-label="Article reference 23" data-doi="10.1038/s41598-018-37769-z">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="ads reference" data-track-action="ads reference" href="http://adsabs.harvard.edu/cgi-bin/nph-data_query?link_type=ABSTRACT&amp;bibcode=2019NatSR...9.1952L" aria-label="ADS reference 23">ADS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=30760848" aria-label="PubMed reference 23">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC6374429" aria-label="PubMed Central reference 23">PubMed Central</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC1MXnslegu7s%3D" aria-label="CAS reference 23">CAS</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 23" href="http://scholar.google.com/scholar_lookup?&amp;title=Predicting%20Alzheimer%E2%80%99s%20disease%20progression%20using%20multi-modal%20deep%20learning%20approach&amp;journal=Sci.%20Rep.&amp;doi=10.1038%2Fs41598-018-37769-z&amp;volume=9&amp;publication_year=2019&amp;author=Lee%2CG&amp;author=Nho%2CK&amp;author=Kang%2CB&amp;author=Sohn%2CK-A&amp;author=Kim%2CD"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="24."><p class="c-article-references__text" id="ref-CR24">Zhao, J. <i>et al.</i> Learning from longitudinal data in electronic health record and genetic data to improve cardiovascular event prediction. <i>Sci. Rep.</i> <b>9</b>, 717 (2019).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1038/s41598-018-36745-x" data-track-item_id="10.1038/s41598-018-36745-x" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1038%2Fs41598-018-36745-x" aria-label="Article reference 24" data-doi="10.1038/s41598-018-36745-x">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="ads reference" data-track-action="ads reference" href="http://adsabs.harvard.edu/cgi-bin/nph-data_query?link_type=ABSTRACT&amp;bibcode=2019NatSR...9..717Z" aria-label="ADS reference 24">ADS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=30679510" aria-label="PubMed reference 24">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC6345960" aria-label="PubMed Central reference 24">PubMed Central</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC1MXntlGrtrg%3D" aria-label="CAS reference 24">CAS</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 24" href="http://scholar.google.com/scholar_lookup?&amp;title=Learning%20from%20longitudinal%20data%20in%20electronic%20health%20record%20and%20genetic%20data%20to%20improve%20cardiovascular%20event%20prediction&amp;journal=Sci.%20Rep.&amp;doi=10.1038%2Fs41598-018-36745-x&amp;volume=9&amp;publication_year=2019&amp;author=Zhao%2CJ"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="25."><p class="c-article-references__text" id="ref-CR25">Wu, W., Venugopalan, J. &amp; Wang, M. D. 11C-PIB PET image analysis for Alzheimer’s diagnosis using weighted voting ensembles. In <i>2017 39th Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC)</i> 3914–3917 (IEEE, 2017).</p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="26."><p class="c-article-references__text" id="ref-CR26">Phan, J. H., Quo, C. F. &amp; Wang, M. D. Functional genomics and proteomics in the clinical neurosciences: data mining and bioinformatics. <i>Prog. Brain Res.</i> <b>158</b>, 83–108 (2006).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/S0079-6123(06)58004-5" data-track-item_id="10.1016/S0079-6123(06)58004-5" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2FS0079-6123%2806%2958004-5" aria-label="Article reference 26" data-doi="10.1016/S0079-6123(06)58004-5">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BD2sXpslelsA%3D%3D" aria-label="CAS reference 26">CAS</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 26" href="http://scholar.google.com/scholar_lookup?&amp;title=Functional%20genomics%20and%20proteomics%20in%20the%20clinical%20neurosciences%3A%20data%20mining%20and%20bioinformatics&amp;journal=Progress%20Brain%20Res&amp;doi=10.1016%2FS0079-6123%2806%2958004-5&amp;volume=158&amp;pages=83-108&amp;publication_year=2006&amp;author=Phan%2CJH&amp;author=Quo%2CCF&amp;author=Wang%2CMD"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="27."><p class="c-article-references__text" id="ref-CR27">Dyrba, M., Grothe, M., Kirste, T. &amp; Teipel, S. J. Multimodal analysis of functional and structural disconnection in Alzheimer’s disease using multiple kernel SVM. <i>Hum. Brain Mapp.</i> <b>36</b>, 2118–2131 (2015).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1002/hbm.22759" data-track-item_id="10.1002/hbm.22759" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1002%2Fhbm.22759" aria-label="Article reference 27" data-doi="10.1002/hbm.22759">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=25664619" aria-label="PubMed reference 27">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC6869829" aria-label="PubMed Central reference 27">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 27" href="http://scholar.google.com/scholar_lookup?&amp;title=Multimodal%20analysis%20of%20functional%20and%20structural%20disconnection%20in%20Alzheimer%27s%20disease%20using%20multiple%20kernel%20SVM&amp;journal=Hum.%20Brain%20Mapp.&amp;doi=10.1002%2Fhbm.22759&amp;volume=36&amp;pages=2118-2131&amp;publication_year=2015&amp;author=Dyrba%2CM&amp;author=Grothe%2CM&amp;author=Kirste%2CT&amp;author=Teipel%2CSJ"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="28."><p class="c-article-references__text" id="ref-CR28">Shaffer, J. L. <i>et al.</i> Predicting cognitive decline in subjects at risk for Alzheimer disease by using combined cerebrospinal fluid, MR imaging, and PET biomarkers. <i>Radiology</i> <b>266</b>, 583–591 (2013).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1148/radiol.12120010" data-track-item_id="10.1148/radiol.12120010" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1148%2Fradiol.12120010" aria-label="Article reference 28" data-doi="10.1148/radiol.12120010">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=23232293" aria-label="PubMed reference 28">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC3558874" aria-label="PubMed Central reference 28">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 28" href="http://scholar.google.com/scholar_lookup?&amp;title=Predicting%20cognitive%20decline%20in%20subjects%20at%20risk%20for%20Alzheimer%20disease%20by%20using%20combined%20cerebrospinal%20fluid%2C%20MR%20imaging%2C%20and%20PET%20biomarkers&amp;journal=Radiology&amp;doi=10.1148%2Fradiol.12120010&amp;volume=266&amp;pages=583-591&amp;publication_year=2013&amp;author=Shaffer%2CJL"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="29."><p class="c-article-references__text" id="ref-CR29">Dai, Z. <i>et al.</i> Discriminative analysis of early Alzheimer’s disease using multi-modal imaging and multi-level characterization with multi-classifier (M3). <i>NeuroImage</i> <b>59</b>, 2187–2195 (2012).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.neuroimage.2011.10.003" data-track-item_id="10.1016/j.neuroimage.2011.10.003" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.neuroimage.2011.10.003" aria-label="Article reference 29" data-doi="10.1016/j.neuroimage.2011.10.003">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=22008370" aria-label="PubMed reference 29">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 29" href="http://scholar.google.com/scholar_lookup?&amp;title=Discriminative%20analysis%20of%20early%20Alzheimer%27s%20disease%20using%20multi-modal%20imaging%20and%20multi-level%20characterization%20with%20multi-classifier%20%28M3%29&amp;journal=NeuroImage&amp;doi=10.1016%2Fj.neuroimage.2011.10.003&amp;volume=59&amp;pages=2187-2195&amp;publication_year=2012&amp;author=Dai%2CZ"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="30."><p class="c-article-references__text" id="ref-CR30">Dyrba, M. <i>et al.</i> Predicting prodromal Alzheimer’s disease in subjects with mild cognitive impairment using machine learning classification of multimodal multicenter diffusion-tensor and magnetic resonance imaging data. <i>J. Neuroimaging</i> <b>25</b>, 738–747 (2015).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1111/jon.12214" data-track-item_id="10.1111/jon.12214" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1111%2Fjon.12214" aria-label="Article reference 30" data-doi="10.1111/jon.12214">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=25644739" aria-label="PubMed reference 30">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 30" href="http://scholar.google.com/scholar_lookup?&amp;title=Predicting%20prodromal%20Alzheimer%27s%20disease%20in%20subjects%20with%20mild%20cognitive%20impairment%20using%20machine%20learning%20classification%20of%20multimodal%20multicenter%20diffusion-tensor%20and%20magnetic%20resonance%20imaging%20data&amp;journal=J.%20Neuroimaging&amp;doi=10.1111%2Fjon.12214&amp;volume=25&amp;pages=738-747&amp;publication_year=2015&amp;author=Dyrba%2CM"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="31."><p class="c-article-references__text" id="ref-CR31">Lorenzi, M. <i>et al.</i> Multimodal image analysis in Alzheimer’s disease via statistical modelling of non-local intensity correlations. <i>Sci. Rep.</i> <b>6</b>, 22161 (2016).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1038/srep22161" data-track-item_id="10.1038/srep22161" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1038%2Fsrep22161" aria-label="Article reference 31" data-doi="10.1038/srep22161">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="ads reference" data-track-action="ads reference" href="http://adsabs.harvard.edu/cgi-bin/nph-data_query?link_type=ABSTRACT&amp;bibcode=2016NatSR...622161L" aria-label="ADS reference 31">ADS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC28XmtVShsr8%3D" aria-label="CAS reference 31">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=27064442" aria-label="PubMed reference 31">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC4827392" aria-label="PubMed Central reference 31">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 31" href="http://scholar.google.com/scholar_lookup?&amp;title=Multimodal%20image%20analysis%20in%20Alzheimer%E2%80%99s%20disease%20via%20statistical%20modelling%20of%20non-local%20intensity%20correlations&amp;journal=Sci.%20Rep.&amp;doi=10.1038%2Fsrep22161&amp;volume=6&amp;publication_year=2016&amp;author=Lorenzi%2CM"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="32."><p class="c-article-references__text" id="ref-CR32">Vogel, J. W. <i>et al.</i> Brain properties predict proximity to symptom onset in sporadic Alzheimer’s disease. <i>Brain</i> <b>141</b>, 1871–1883 (2018).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1093/brain/awy093" data-track-item_id="10.1093/brain/awy093" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1093%2Fbrain%2Fawy093" aria-label="Article reference 32" data-doi="10.1093/brain/awy093">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=29688388" aria-label="PubMed reference 32">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC5972641" aria-label="PubMed Central reference 32">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 32" href="http://scholar.google.com/scholar_lookup?&amp;title=Brain%20properties%20predict%20proximity%20to%20symptom%20onset%20in%20sporadic%20Alzheimer%E2%80%99s%20disease&amp;journal=Brain&amp;doi=10.1093%2Fbrain%2Fawy093&amp;volume=141&amp;pages=1871-1883&amp;publication_year=2018&amp;author=Vogel%2CJW"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="33."><p class="c-article-references__text" id="ref-CR33">Gray, K. R., Aljabar, P., Heckemann, R. A., Hammers, A. &amp; Rueckert, D. Random forest-based similarity measures for multi-modal classification of Alzheimer’s disease. <i>NeuroImage</i> <b>65</b>, 167–175 (2013).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.neuroimage.2012.09.065" data-track-item_id="10.1016/j.neuroimage.2012.09.065" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.neuroimage.2012.09.065" aria-label="Article reference 33" data-doi="10.1016/j.neuroimage.2012.09.065">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=23041336" aria-label="PubMed reference 33">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 33" href="http://scholar.google.com/scholar_lookup?&amp;title=Random%20forest-based%20similarity%20measures%20for%20multi-modal%20classification%20of%20Alzheimer%27s%20disease&amp;journal=NeuroImage&amp;doi=10.1016%2Fj.neuroimage.2012.09.065&amp;volume=65&amp;pages=167-175&amp;publication_year=2013&amp;author=Gray%2CKR&amp;author=Aljabar%2CP&amp;author=Heckemann%2CRA&amp;author=Hammers%2CA&amp;author=Rueckert%2CD"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="34."><p class="c-article-references__text" id="ref-CR34">Zhang, D., Wang, Y., Zhou, L., Yuan, H. &amp; Shen, D. Multimodal classification of Alzheimer’s disease and mild cognitive impairment. <i>NeuroImage</i> <b>55</b>, 856–867 (2011).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.neuroimage.2011.01.008" data-track-item_id="10.1016/j.neuroimage.2011.01.008" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.neuroimage.2011.01.008" aria-label="Article reference 34" data-doi="10.1016/j.neuroimage.2011.01.008">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=21236349" aria-label="PubMed reference 34">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 34" href="http://scholar.google.com/scholar_lookup?&amp;title=Multimodal%20classification%20of%20Alzheimer%27s%20disease%20and%20mild%20cognitive%20impairment&amp;journal=NeuroImage&amp;doi=10.1016%2Fj.neuroimage.2011.01.008&amp;volume=55&amp;pages=856-867&amp;publication_year=2011&amp;author=Zhang%2CD&amp;author=Wang%2CY&amp;author=Zhou%2CL&amp;author=Yuan%2CH&amp;author=Shen%2CD"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="35."><p class="c-article-references__text" id="ref-CR35">Wang, H. <i>et al.</i> Identifying disease sensitive and quantitative trait-relevant biomarkers from multidimensional heterogeneous imaging genetics data via sparse multimodal multitask learning. <i>Bioinformatics</i> <b>28</b>, i127–i136 (2012).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1093/bioinformatics/bts228" data-track-item_id="10.1093/bioinformatics/bts228" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1093%2Fbioinformatics%2Fbts228" aria-label="Article reference 35" data-doi="10.1093/bioinformatics/bts228">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=22689752" aria-label="PubMed reference 35">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC3371860" aria-label="PubMed Central reference 35">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 35" href="http://scholar.google.com/scholar_lookup?&amp;title=Identifying%20disease%20sensitive%20and%20quantitative%20trait-relevant%20biomarkers%20from%20multidimensional%20heterogeneous%20imaging%20genetics%20data%20via%20sparse%20multimodal%20multitask%20learning&amp;journal=Bioinformatics&amp;doi=10.1093%2Fbioinformatics%2Fbts228&amp;volume=28&amp;pages=i127-i136&amp;publication_year=2012&amp;author=Wang%2CH"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="36."><p class="c-article-references__text" id="ref-CR36">Suk, H.-I., Lee, S.-W. &amp; Shen, D. Hierarchical feature representation and multimodal fusion with deep learning for AD/MCI diagnosis. <i>NeuroImage</i> <b>101</b>, 569–582 (2014).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.neuroimage.2014.06.077" data-track-item_id="10.1016/j.neuroimage.2014.06.077" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.neuroimage.2014.06.077" aria-label="Article reference 36" data-doi="10.1016/j.neuroimage.2014.06.077">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=25042445" aria-label="PubMed reference 36">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 36" href="http://scholar.google.com/scholar_lookup?&amp;title=Hierarchical%20feature%20representation%20and%20multimodal%20fusion%20with%20deep%20learning%20for%20AD%2FMCI%20diagnosis&amp;journal=NeuroImage&amp;doi=10.1016%2Fj.neuroimage.2014.06.077&amp;volume=101&amp;pages=569-582&amp;publication_year=2014&amp;author=Suk%2CH-I&amp;author=Lee%2CS-W&amp;author=Shen%2CD"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="37."><p class="c-article-references__text" id="ref-CR37">Mueller, S. G. <i>et al.</i> Ways toward an early diagnosis in Alzheimer’s disease: the Alzheimer’s Disease Neuroimaging Initiative (ADNI). <i>Alzheimer’s Dement.</i> <b>1</b>, 55–66 (2005).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.jalz.2005.06.003" data-track-item_id="10.1016/j.jalz.2005.06.003" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.jalz.2005.06.003" aria-label="Article reference 37" data-doi="10.1016/j.jalz.2005.06.003">Article</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 37" href="http://scholar.google.com/scholar_lookup?&amp;title=Ways%20toward%20an%20early%20diagnosis%20in%20Alzheimer%E2%80%99s%20disease%3A%20the%20Alzheimer%E2%80%99s%20Disease%20Neuroimaging%20Initiative%20%28ADNI%29&amp;journal=Alzheimer%27s%20Dement.&amp;doi=10.1016%2Fj.jalz.2005.06.003&amp;volume=1&amp;pages=55-66&amp;publication_year=2005&amp;author=Mueller%2CSG"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="38."><p class="c-article-references__text" id="ref-CR38">Shen, L. <i>et al.</i> Genetic analysis of quantitative phenotypes in AD and MCI: Imaging, cognition and biomarkers. <i>Brain Imaging Behav.</i> <b>8</b>, 183–207 (2014).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="noopener" data-track-label="10.1007/s11682-013-9262-z" data-track-item_id="10.1007/s11682-013-9262-z" data-track-value="article reference" data-track-action="article reference" href="https://link.springer.com/doi/10.1007/s11682-013-9262-z" aria-label="Article reference 38" data-doi="10.1007/s11682-013-9262-z">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC2cXpt1KltLw%3D" aria-label="CAS reference 38">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=24092460" aria-label="PubMed reference 38">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 38" href="http://scholar.google.com/scholar_lookup?&amp;title=Genetic%20analysis%20of%20quantitative%20phenotypes%20in%20AD%20and%20MCI%3A%20Imaging%2C%20cognition%20and%20biomarkers&amp;journal=Brain%20Imaging%20Behav.&amp;doi=10.1007%2Fs11682-013-9262-z&amp;volume=8&amp;pages=183-207&amp;publication_year=2014&amp;author=Shen%2CL"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="39."><p class="c-article-references__text" id="ref-CR39">Leandrou, S., Petroudi, S., Reyes-Aldasoro, C. C., Kyriacou, P. A. &amp; Pattichis, C. S. Quantitative MRI brain studies in mild cognitive impairment and Alzheimer’s disease: A methodological review. <i>IEEE Rev. Biomed. Eng.</i> <b>11</b>, 97–111 (2018).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1109/RBME.2018.2796598" data-track-item_id="10.1109/RBME.2018.2796598" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1109%2FRBME.2018.2796598" aria-label="Article reference 39" data-doi="10.1109/RBME.2018.2796598">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=29994606" aria-label="PubMed reference 39">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 39" href="http://scholar.google.com/scholar_lookup?&amp;title=Quantitative%20MRI%20brain%20studies%20in%20mild%20cognitive%20impairment%20and%20Alzheimer%27s%20disease%3A%20A%20methodological%20review&amp;journal=IEEE%20Rev.%20Biomed.%20Eng.&amp;doi=10.1109%2FRBME.2018.2796598&amp;volume=11&amp;pages=97-111&amp;publication_year=2018&amp;author=Leandrou%2CS&amp;author=Petroudi%2CS&amp;author=Reyes-Aldasoro%2CCC&amp;author=Kyriacou%2CPA&amp;author=Pattichis%2CCS"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="40."><p class="c-article-references__text" id="ref-CR40">Mhaskar, H., Liao, Q. &amp; Poggio, T. Learning functions: when is deep better than shallow. arXiv preprint arXiv:1603.00988 (2016).</p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="41."><p class="c-article-references__text" id="ref-CR41">Shin, H.-C. <i>et al.</i> Deep convolutional neural networks for computer-aided detection: CNN architectures, dataset characteristics and transfer learning. <i>IEEE Trans. Med. Imaging</i> <b>35</b>(5), 1285–1298 (2016).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1109/TMI.2016.2528162" data-track-item_id="10.1109/TMI.2016.2528162" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1109%2FTMI.2016.2528162" aria-label="Article reference 41" data-doi="10.1109/TMI.2016.2528162">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=26886976" aria-label="PubMed reference 41">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 41" href="http://scholar.google.com/scholar_lookup?&amp;title=Deep%20convolutional%20neural%20networks%20for%20computer-aided%20detection%3A%20CNN%20architectures%2C%20dataset%20characteristics%20and%20transfer%20learning&amp;journal=IEEE%20Trans.%20Med.%20Imaging&amp;doi=10.1109%2FTMI.2016.2528162&amp;volume=35&amp;issue=5&amp;pages=1285-1298&amp;publication_year=2016&amp;author=Shin%2CH-C"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="42."><p class="c-article-references__text" id="ref-CR42">Tong, L., Mitchel, J., Chatlin, K. &amp; Wang, M. D. Deep learning based feature-level integration of multi-omics data for breast cancer patients survival analysis. <i>BMC Med. Inform. Decis. Mak.</i> <b>20</b>, 1–12 (2020).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="noopener" data-track-label="10.1186/s12911-020-01225-8" data-track-item_id="10.1186/s12911-020-01225-8" data-track-value="article reference" data-track-action="article reference" href="https://link.springer.com/doi/10.1186/s12911-020-01225-8" aria-label="Article reference 42" data-doi="10.1186/s12911-020-01225-8">Article</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 42" href="http://scholar.google.com/scholar_lookup?&amp;title=Deep%20learning%20based%20feature-level%20integration%20of%20multi-omics%20data%20for%20breast%20cancer%20patients%20survival%20analysis&amp;journal=BMC%20Med.%20Inform.%20Decis.%20Mak.&amp;doi=10.1186%2Fs12911-020-01225-8&amp;volume=20&amp;pages=1-12&amp;publication_year=2020&amp;author=Tong%2CL&amp;author=Mitchel%2CJ&amp;author=Chatlin%2CK&amp;author=Wang%2CMD"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="43."><p class="c-article-references__text" id="ref-CR43">Tong, L., Wu, H. &amp; Wang, M. D. Integrating multi-omics data by learning modality invariant representations for improved prediction of overall survival of cancer. <i>Methods</i>. <a href="https://doi.org/10.1016/j.ymeth.2020.07.008" data-track="click_references" data-track-action="external reference" data-track-value="external reference" data-track-label="10.1016/j.ymeth.2020.07.008">https://doi.org/10.1016/j.ymeth.2020.07.008</a> (2020).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.ymeth.2020.07.008" data-track-item_id="10.1016/j.ymeth.2020.07.008" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.ymeth.2020.07.008" aria-label="Article reference 43" data-doi="10.1016/j.ymeth.2020.07.008">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=32763377" aria-label="PubMed reference 43">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 43" href="http://scholar.google.com/scholar_lookup?&amp;title=Integrating%20multi-omics%20data%20by%20learning%20modality%20invariant%20representations%20for%20improved%20prediction%20of%20overall%20survival%20of%20cancer&amp;journal=Methods&amp;doi=10.1016%2Fj.ymeth.2020.07.008&amp;publication_year=2020&amp;author=Tong%2CL&amp;author=Wu%2CH&amp;author=Wang%2CMD"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="44."><p class="c-article-references__text" id="ref-CR44">Che, Z., Purushotham, S., Khemani, R. &amp; Liu, Y. Distilling knowledge from deep networks with applications to healthcare domain. arXiv preprint arXiv:1512.03542 (2015).</p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="45."><p class="c-article-references__text" id="ref-CR45">Glodzik, L. <i>et al.</i> Alzheimer’s disease markers, hypertension, and gray matter damage in normal elderly. <i>Neurobiol. Aging</i> <b>33</b>, 1215–1227 (2012).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.neurobiolaging.2011.02.012" data-track-item_id="10.1016/j.neurobiolaging.2011.02.012" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.neurobiolaging.2011.02.012" aria-label="Article reference 45" data-doi="10.1016/j.neurobiolaging.2011.02.012">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC38Xnt1Kntbc%3D" aria-label="CAS reference 45">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=21530003" aria-label="PubMed reference 45">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 45" href="http://scholar.google.com/scholar_lookup?&amp;title=Alzheimer%27s%20disease%20markers%2C%20hypertension%2C%20and%20gray%20matter%20damage%20in%20normal%20elderly&amp;journal=Neurobiol.%20Aging&amp;doi=10.1016%2Fj.neurobiolaging.2011.02.012&amp;volume=33&amp;pages=1215-1227&amp;publication_year=2012&amp;author=Glodzik%2CL"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="46."><p class="c-article-references__text" id="ref-CR46">Dubois, B. <i>et al.</i> Advancing research diagnostic criteria for Alzheimer’s disease: the IWG-2 criteria. <i>Lancet Neurol.</i> <b>13</b>, 614–629 (2014).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/S1474-4422(14)70090-0" data-track-item_id="10.1016/S1474-4422(14)70090-0" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2FS1474-4422%2814%2970090-0" aria-label="Article reference 46" data-doi="10.1016/S1474-4422(14)70090-0">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=24849862" aria-label="PubMed reference 46">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 46" href="http://scholar.google.com/scholar_lookup?&amp;title=Advancing%20research%20diagnostic%20criteria%20for%20Alzheimer%27s%20disease%3A%20the%20IWG-2%20criteria&amp;journal=Lancet%20Neurol.&amp;doi=10.1016%2FS1474-4422%2814%2970090-0&amp;volume=13&amp;pages=614-629&amp;publication_year=2014&amp;author=Dubois%2CB"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="47."><p class="c-article-references__text" id="ref-CR47">Hampson, R. E. <i>et al.</i> Facilitation of memory encoding in primate hippocampus by a neuroprosthesis that promotes task-specific neural firing. <i>J. Neural Eng.</i> <b>10</b>, 066013 (2013).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1088/1741-2560/10/6/066013" data-track-item_id="10.1088/1741-2560/10/6/066013" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1088%2F1741-2560%2F10%2F6%2F066013" aria-label="Article reference 47" data-doi="10.1088/1741-2560/10/6/066013">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="ads reference" data-track-action="ads reference" href="http://adsabs.harvard.edu/cgi-bin/nph-data_query?link_type=ABSTRACT&amp;bibcode=2013JNEng..10f6013H" aria-label="ADS reference 47">ADS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=24216292" aria-label="PubMed reference 47">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC3919468" aria-label="PubMed Central reference 47">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 47" href="http://scholar.google.com/scholar_lookup?&amp;title=Facilitation%20of%20memory%20encoding%20in%20primate%20hippocampus%20by%20a%20neuroprosthesis%20that%20promotes%20task-specific%20neural%20firing&amp;journal=J.%20Neural%20Eng.&amp;doi=10.1088%2F1741-2560%2F10%2F6%2F066013&amp;volume=10&amp;publication_year=2013&amp;author=Hampson%2CRE"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="48."><p class="c-article-references__text" id="ref-CR48">Eickhoff, S. B. <i>et al.</i> A new SPM toolbox for combining probabilistic cytoarchitectonic maps and functional imaging data. <i>NeuroImage</i> <b>25</b>, 1325–1335 (2005).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1016/j.neuroimage.2004.12.034" data-track-item_id="10.1016/j.neuroimage.2004.12.034" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1016%2Fj.neuroimage.2004.12.034" aria-label="Article reference 48" data-doi="10.1016/j.neuroimage.2004.12.034">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=15850749" aria-label="PubMed reference 48">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 48" href="http://scholar.google.com/scholar_lookup?&amp;title=A%20new%20SPM%20toolbox%20for%20combining%20probabilistic%20cytoarchitectonic%20maps%20and%20functional%20imaging%20data&amp;journal=NeuroImage&amp;doi=10.1016%2Fj.neuroimage.2004.12.034&amp;volume=25&amp;pages=1325-1335&amp;publication_year=2005&amp;author=Eickhoff%2CSB"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="49."><p class="c-article-references__text" id="ref-CR49">Peng, H., Long, F. &amp; Ding, C. Feature selection based on mutual information criteria of max-dependency, max-relevance, and min-redundancy. <i>IEEE Trans. Pattern Anal. Mach. Intell.</i> <b>27</b>, 1226–1238 (2005).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1109/TPAMI.2005.159" data-track-item_id="10.1109/TPAMI.2005.159" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1109%2FTPAMI.2005.159" aria-label="Article reference 49" data-doi="10.1109/TPAMI.2005.159">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=16119262" aria-label="PubMed reference 49">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 49" href="http://scholar.google.com/scholar_lookup?&amp;title=Feature%20selection%20based%20on%20mutual%20information%20criteria%20of%20max-dependency%2C%20max-relevance%2C%20and%20min-redundancy&amp;journal=IEEE%20Trans.%20Pattern%20Anal.%20Mach.%20Intell.&amp;doi=10.1109%2FTPAMI.2005.159&amp;volume=27&amp;pages=1226-1238&amp;publication_year=2005&amp;author=Peng%2CH&amp;author=Long%2CF&amp;author=Ding%2CC"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="50."><p class="c-article-references__text" id="ref-CR50">Ding, C. &amp; Peng, H. Minimum redundancy feature selection from microarray gene expression data. <i>J. Bioinform. Comput. Biol.</i> <b>3</b>, 185–205 (2005).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1142/S0219720005001004" data-track-item_id="10.1142/S0219720005001004" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1142%2FS0219720005001004" aria-label="Article reference 50" data-doi="10.1142/S0219720005001004">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BD2MXpsVersr0%3D" aria-label="CAS reference 50">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=15852500" aria-label="PubMed reference 50">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 50" href="http://scholar.google.com/scholar_lookup?&amp;title=Minimum%20redundancy%20feature%20selection%20from%20microarray%20gene%20expression%20data&amp;journal=J.%20Bioinform.%20Comput.%20Biol.&amp;doi=10.1142%2FS0219720005001004&amp;volume=3&amp;pages=185-205&amp;publication_year=2005&amp;author=Ding%2CC&amp;author=Peng%2CH"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="51."><p class="c-article-references__text" id="ref-CR51">Ioffe, S. &amp; Szegedy, C. Batch normalization: Accelerating deep network training by reducing internal covariate shift. arXiv preprint arXiv:1502.03167 (2015).</p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="52."><p class="c-article-references__text" id="ref-CR52">Miotto, R., Li, L., Kidd, B. A. &amp; Dudley, J. T. Deep patient: An unsupervised representation to predict the future of patients from the electronic health records. <i>Sci. Rep.</i> <b>6</b>, 26094 (2016).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1038/srep26094" data-track-item_id="10.1038/srep26094" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1038%2Fsrep26094" aria-label="Article reference 52" data-doi="10.1038/srep26094">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="ads reference" data-track-action="ads reference" href="http://adsabs.harvard.edu/cgi-bin/nph-data_query?link_type=ABSTRACT&amp;bibcode=2016NatSR...626094M" aria-label="ADS reference 52">ADS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC28Xot1Gnu7s%3D" aria-label="CAS reference 52">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=27185194" aria-label="PubMed reference 52">PubMed</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed central reference" data-track-action="pubmed central reference" href="http://www.ncbi.nlm.nih.gov/pmc/articles/PMC4869115" aria-label="PubMed Central reference 52">PubMed Central</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 52" href="http://scholar.google.com/scholar_lookup?&amp;title=Deep%20patient%3A%20An%20unsupervised%20representation%20to%20predict%20the%20future%20of%20patients%20from%20the%20electronic%20health%20records&amp;journal=Sci.%20Rep.&amp;doi=10.1038%2Fsrep26094&amp;volume=6&amp;publication_year=2016&amp;author=Miotto%2CR&amp;author=Li%2CL&amp;author=Kidd%2CBA&amp;author=Dudley%2CJT"> Google Scholar</a>  </p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="53."><p class="c-article-references__text" id="ref-CR53">Kingma, D. &amp; Ba, J. Adam: A method for stochastic optimization. arXiv preprint arXiv:1412.6980 (2014).</p></li><li class="c-article-references__item js-c-reading-companion-references-item" data-counter="54."><p class="c-article-references__text" id="ref-CR54">Ritchie, M. D., Holzinger, E. R., Li, R., Pendergrass, S. A. &amp; Kim, D. Methods of integrating data to uncover genotype–phenotype interactions. <i>Nat. Rev. Genet.</i> <b>16</b>, 85–97 (2015).</p><p class="c-article-references__links u-hide-print"><a data-track="click_references" rel="nofollow noopener" data-track-label="10.1038/nrg3868" data-track-item_id="10.1038/nrg3868" data-track-value="article reference" data-track-action="article reference" href="https://doi.org/10.1038%2Fnrg3868" aria-label="Article reference 54" data-doi="10.1038/nrg3868">Article</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="cas reference" data-track-action="cas reference" href="/articles/cas-redirect/1:CAS:528:DC%2BC2MXmslKrsA%3D%3D" aria-label="CAS reference 54">CAS</a>  <a data-track="click_references" rel="nofollow noopener" data-track-label="link" data-track-item_id="link" data-track-value="pubmed reference" data-track-action="pubmed reference" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=Retrieve&amp;db=PubMed&amp;dopt=Abstract&amp;list_uids=25582081" aria-label="PubMed reference 54">PubMed</a>  <a data-track="click_references" data-track-action="google scholar reference" data-track-value="google scholar reference" data-track-label="link" data-track-item_id="link" rel="nofollow noopener" aria-label="Google Scholar reference 54" href="http://scholar.google.com/scholar_lookup?&amp;title=Methods%20of%20integrating%20data%20to%20uncover%20genotype%E2%80%93phenotype%20interactions&amp;journal=Nat.%20Rev.%20Genet.&amp;doi=10.1038%2Fnrg3868&amp;volume=16&amp;pages=85-97&amp;publication_year=2015&amp;author=Ritchie%2CMD&amp;author=Holzinger%2CER&amp;author=Li%2CR&amp;author=Pendergrass%2CSA&amp;author=Kim%2CD"> Google Scholar</a>  </p></li></ol><p class="c-article-references__download u-hide-print"><a data-track="click" data-track-action="download citation references" data-track-label="link" rel="nofollow" href="https://citation-needed.springer.com/v2/references/10.1038/s41598-020-74399-w?format=refman&amp;flavour=references">Download references<svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-download-medium"></use></svg></a></p></div></div></div></section></div><section data-title="Acknowledgements"><div class="c-article-section" id="Ack1-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Ack1">Acknowledgements</h2><div class="c-article-section__content" id="Ack1-content"><p>Data used in preparation of this article were obtained from the Alzheimer’s Disease Neuroimaging Initiative (ADNI) database (adni.loni.usc.edu). As such, the investigators within the ADNI contributed to the design and implementation of ADNI and/or provided data but did not participate in analysis or writing of this report. A complete listing of ADNI investigators can be found at: <a href="https://adni.loni.usc.edu/wp-content/uploads/how_to_apply/ADNI_Acknowledgement_List.pdf">https://adni.loni.usc.edu/wp-content/uploads/how_to_apply/ADNI_Acknowledgement_List.pdf</a>.</p></div></div></section><section data-title="Funding"><div class="c-article-section" id="Fun-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Fun">Funding</h2><div class="c-article-section__content" id="Fun-content"><p>The work was supported in part by Petit Institute Faculty Fellow Fund, Carol Ann and David D. Flanagan Faculty Fellow Research Fund, Amazon Faculty Research Fellowship. This work was also supported in part by the scholarship from China Scholarship Council (CSC) under the Grant CSC NO. 201406010343. The content of this article is solely the responsibility of the authors.</p></div></div></section><section aria-labelledby="author-information" data-title="Author information"><div class="c-article-section" id="author-information-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="author-information">Author information</h2><div class="c-article-section__content" id="author-information-content"><h3 class="c-article__sub-heading" id="affiliations">Authors and Affiliations</h3><ol class="c-article-author-affiliation__list"><li id="Aff1"><p class="c-article-author-affiliation__address">Department of Biomedical Engineering, Georgia Institute of Technology and Emory University, Atlanta, GA, USA</p><p class="c-article-author-affiliation__authors-list">Janani Venugopalan, Li Tong &amp; May D. Wang</p></li><li id="Aff2"><p class="c-article-author-affiliation__address">School of Computational Science and Engineering, Georgia Institute of Technology, Atlanta, GA, USA</p><p class="c-article-author-affiliation__authors-list">Hamid Reza Hassanzadeh</p></li><li id="Aff3"><p class="c-article-author-affiliation__address">School of Electrical and Computer Engineering, Georgia Institute of Technology, Atlanta, GA, USA</p><p class="c-article-author-affiliation__authors-list">May D. Wang</p></li><li id="Aff4"><p class="c-article-author-affiliation__address">Winship Cancer Institute, Parker H. Petit Institute for Bioengineering and Biosciences, Institute of People and Technology, Georgia Institute of Technology and Emory University, Atlanta, GA, USA</p><p class="c-article-author-affiliation__authors-list">May D. Wang</p></li></ol><div class="u-js-hide u-hide-print" data-test="author-info"><span class="c-article__sub-heading">Authors</span><ol class="c-article-authors-search u-list-reset"><li id="auth-Janani-Venugopalan-Aff1"><span class="c-article-authors-search__title u-h3 js-search-name">Janani Venugopalan</span><div class="c-article-authors-search__list"><div class="c-article-authors-search__item c-article-authors-search__list-item--left"><a href="/search?author=Janani%20Venugopalan" class="c-article-button" data-track="click" data-track-action="author link - publication" data-track-label="link" rel="nofollow">View author publications</a></div><div class="c-article-authors-search__item c-article-authors-search__list-item--right"><p class="search-in-title-js c-article-authors-search__text">You can also search for this author in <span class="c-article-identifiers"><a class="c-article-identifiers__item" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=search&amp;term=Janani%20Venugopalan" data-track="click" data-track-action="author link - pubmed" data-track-label="link" rel="nofollow">PubMed</a><span class="u-hide"> </span><a class="c-article-identifiers__item" href="http://scholar.google.co.uk/scholar?as_q=&amp;num=10&amp;btnG=Search+Scholar&amp;as_epq=&amp;as_oq=&amp;as_eq=&amp;as_occt=any&amp;as_sauthors=%22Janani%20Venugopalan%22&amp;as_publication=&amp;as_ylo=&amp;as_yhi=&amp;as_allsubj=all&amp;hl=en" data-track="click" data-track-action="author link - scholar" data-track-label="link" rel="nofollow">Google Scholar</a></span></p></div></div></li><li id="auth-Li-Tong-Aff1"><span class="c-article-authors-search__title u-h3 js-search-name">Li Tong</span><div class="c-article-authors-search__list"><div class="c-article-authors-search__item c-article-authors-search__list-item--left"><a href="/search?author=Li%20Tong" class="c-article-button" data-track="click" data-track-action="author link - publication" data-track-label="link" rel="nofollow">View author publications</a></div><div class="c-article-authors-search__item c-article-authors-search__list-item--right"><p class="search-in-title-js c-article-authors-search__text">You can also search for this author in <span class="c-article-identifiers"><a class="c-article-identifiers__item" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=search&amp;term=Li%20Tong" data-track="click" data-track-action="author link - pubmed" data-track-label="link" rel="nofollow">PubMed</a><span class="u-hide"> </span><a class="c-article-identifiers__item" href="http://scholar.google.co.uk/scholar?as_q=&amp;num=10&amp;btnG=Search+Scholar&amp;as_epq=&amp;as_oq=&amp;as_eq=&amp;as_occt=any&amp;as_sauthors=%22Li%20Tong%22&amp;as_publication=&amp;as_ylo=&amp;as_yhi=&amp;as_allsubj=all&amp;hl=en" data-track="click" data-track-action="author link - scholar" data-track-label="link" rel="nofollow">Google Scholar</a></span></p></div></div></li><li id="auth-Hamid_Reza-Hassanzadeh-Aff2"><span class="c-article-authors-search__title u-h3 js-search-name">Hamid Reza Hassanzadeh</span><div class="c-article-authors-search__list"><div class="c-article-authors-search__item c-article-authors-search__list-item--left"><a href="/search?author=Hamid%20Reza%20Hassanzadeh" class="c-article-button" data-track="click" data-track-action="author link - publication" data-track-label="link" rel="nofollow">View author publications</a></div><div class="c-article-authors-search__item c-article-authors-search__list-item--right"><p class="search-in-title-js c-article-authors-search__text">You can also search for this author in <span class="c-article-identifiers"><a class="c-article-identifiers__item" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=search&amp;term=Hamid%20Reza%20Hassanzadeh" data-track="click" data-track-action="author link - pubmed" data-track-label="link" rel="nofollow">PubMed</a><span class="u-hide"> </span><a class="c-article-identifiers__item" href="http://scholar.google.co.uk/scholar?as_q=&amp;num=10&amp;btnG=Search+Scholar&amp;as_epq=&amp;as_oq=&amp;as_eq=&amp;as_occt=any&amp;as_sauthors=%22Hamid%20Reza%20Hassanzadeh%22&amp;as_publication=&amp;as_ylo=&amp;as_yhi=&amp;as_allsubj=all&amp;hl=en" data-track="click" data-track-action="author link - scholar" data-track-label="link" rel="nofollow">Google Scholar</a></span></p></div></div></li><li id="auth-May_D_-Wang-Aff1-Aff3-Aff4"><span class="c-article-authors-search__title u-h3 js-search-name">May D. Wang</span><div class="c-article-authors-search__list"><div class="c-article-authors-search__item c-article-authors-search__list-item--left"><a href="/search?author=May%20D.%20Wang" class="c-article-button" data-track="click" data-track-action="author link - publication" data-track-label="link" rel="nofollow">View author publications</a></div><div class="c-article-authors-search__item c-article-authors-search__list-item--right"><p class="search-in-title-js c-article-authors-search__text">You can also search for this author in <span class="c-article-identifiers"><a class="c-article-identifiers__item" href="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?cmd=search&amp;term=May%20D.%20Wang" data-track="click" data-track-action="author link - pubmed" data-track-label="link" rel="nofollow">PubMed</a><span class="u-hide"> </span><a class="c-article-identifiers__item" href="http://scholar.google.co.uk/scholar?as_q=&amp;num=10&amp;btnG=Search+Scholar&amp;as_epq=&amp;as_oq=&amp;as_eq=&amp;as_occt=any&amp;as_sauthors=%22May%20D.%20Wang%22&amp;as_publication=&amp;as_ylo=&amp;as_yhi=&amp;as_allsubj=all&amp;hl=en" data-track="click" data-track-action="author link - scholar" data-track-label="link" rel="nofollow">Google Scholar</a></span></p></div></div></li></ol></div><h3 class="c-article__sub-heading" id="contributions">Contributions</h3><p>J.V., contributed to the study design, the pre-processing, data analysis for the EHR data, the combination of the three data modalities, and the writing of the manuscript, including Figs. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig1">1</a>, <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig2">2</a>, <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig3">3</a> and the Tables. L.T., contributed to the pre-processing and analysis of the SNP data, the writing of the manuscipt (including background and discussions, sections related to SNP results and pre-processing including Fig. <a data-track="click" data-track-label="link" data-track-action="figure anchor" href="/articles/s41598-020-74399-w#Fig4">4</a>, and relevant figures in the supplementary section), and the revision of the manuscript. H.H. contributed to the image processing pipeline and writing of the results pertaining to image processing, including the relevant figures in supplementary section. Prof. M.D.W., contributed to the study design, result evaluation, and extensive refining and the revision of the manuscript.</p><h3 class="c-article__sub-heading" id="corresponding-author">Corresponding author</h3><p id="corresponding-author-list">Correspondence to <a id="corresp-c1" href="mailto:maywang@gatech.edu">May D. Wang</a>.</p></div></div></section><section data-title="Ethics declarations"><div class="c-article-section" id="ethics-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="ethics">Ethics declarations</h2><div class="c-article-section__content" id="ethics-content"> <h3 class="c-article__sub-heading" id="FPar1">Competing interests</h3> <p>The authors declare no competing interests.</p> </div></div></section><section data-title="Additional information"><div class="c-article-section" id="additional-information-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="additional-information">Additional information</h2><div class="c-article-section__content" id="additional-information-content"><h3 class="c-article__sub-heading">Publisher's note</h3><p>Springer Nature remains neutral with regard to jurisdictional claims in published maps and institutional affiliations.</p></div></div></section><section data-title="Supplementary information"><div class="c-article-section" id="Sec26-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="Sec26">Supplementary information</h2><div class="c-article-section__content" id="Sec26-content"><div data-test="supplementary-info"><div id="figshareContainer" class="c-article-figshare-container" data-test="figshare-container"></div><div class="c-article-supplementary__item" data-test="supp-item" id="MOESM1"><h3 class="c-article-supplementary__title u-h3"><a class="print-link" data-track="click" data-track-action="view supplementary info" data-test="supp-info-link" data-track-label="supplementary information." href="https://static-content.springer.com/esm/art%3A10.1038%2Fs41598-020-74399-w/MediaObjects/41598_2020_74399_MOESM1_ESM.docx" data-supp-info-image="">Supplementary Information.</a></h3></div></div></div></div></section><section data-title="Rights and permissions"><div class="c-article-section" id="rightslink-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="rightslink">Rights and permissions</h2><div class="c-article-section__content" id="rightslink-content"> <p><b>Open Access</b> This article is licensed under a Creative Commons Attribution 4.0 International License, which permits use, sharing, adaptation, distribution and reproduction in any medium or format, as long as you give appropriate credit to the original author(s) and the source, provide a link to the Creative Commons licence, and indicate if changes were made. The images or other third party material in this article are included in the article's Creative Commons licence, unless indicated otherwise in a credit line to the material. If material is not included in the article's Creative Commons licence and your intended use is not permitted by statutory regulation or exceeds the permitted use, you will need to obtain permission directly from the copyright holder. To view a copy of this licence, visit <a href="http://creativecommons.org/licenses/by/4.0/" rel="license">http://creativecommons.org/licenses/by/4.0/</a>.</p> <p class="c-article-rights"><a data-track="click" data-track-action="view rights and permissions" data-track-label="link" href="https://s100.copyright.com/AppDispatchServlet?title=Multimodal%20deep%20learning%20models%20for%20early%20detection%20of%20Alzheimer%E2%80%99s%20disease%20stage&amp;author=Janani%20Venugopalan%20et%20al&amp;contentID=10.1038%2Fs41598-020-74399-w&amp;copyright=The%20Author%28s%29&amp;publication=2045-2322&amp;publicationDate=2021-02-05&amp;publisherName=SpringerNature&amp;orderBeanReset=true&amp;oa=CC%20BY">Reprints and permissions</a></p></div></div></section><section aria-labelledby="article-info" data-title="About this article"><div class="c-article-section" id="article-info-section"><h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="article-info">About this article</h2><div class="c-article-section__content" id="article-info-content"><div class="c-bibliographic-information"><div class="u-hide-print c-bibliographic-information__column c-bibliographic-information__column--border"><a data-crossmark="10.1038/s41598-020-74399-w" target="_blank" rel="noopener" href="https://crossmark.crossref.org/dialog/?doi=10.1038/s41598-020-74399-w" data-track="click" data-track-action="Click Crossmark" data-track-label="link" data-test="crossmark"><img loading="lazy" width="57" height="81" alt="Check for updates. Verify currency and authenticity via CrossMark" src="data:image/svg+xml;base64,<svg height="81" width="57" xmlns="http://www.w3.org/2000/svg"><g fill="none" fill-rule="evenodd"><path d="m17.35 35.45 21.3-14.2v-17.03h-21.3" fill="#989898"/><path d="m38.65 35.45-21.3-14.2v-17.03h21.3" fill="#747474"/><path d="m28 .5c-12.98 0-23.5 10.52-23.5 23.5s10.52 23.5 23.5 23.5 23.5-10.52 23.5-23.5c0-6.23-2.48-12.21-6.88-16.62-4.41-4.4-10.39-6.88-16.62-6.88zm0 41.25c-9.8 0-17.75-7.95-17.75-17.75s7.95-17.75 17.75-17.75 17.75 7.95 17.75 17.75c0 4.71-1.87 9.22-5.2 12.55s-7.84 5.2-12.55 5.2z" fill="#535353"/><path d="m41 36c-5.81 6.23-15.23 7.45-22.43 2.9-7.21-4.55-10.16-13.57-7.03-21.5l-4.92-3.11c-4.95 10.7-1.19 23.42 8.78 29.71 9.97 6.3 23.07 4.22 30.6-4.86z" fill="#9c9c9c"/><path d="m.2 58.45c0-.75.11-1.42.33-2.01s.52-1.09.91-1.5c.38-.41.83-.73 1.34-.94.51-.22 1.06-.32 1.65-.32.56 0 1.06.11 1.51.35.44.23.81.5 1.1.81l-.91 1.01c-.24-.24-.49-.42-.75-.56-.27-.13-.58-.2-.93-.2-.39 0-.73.08-1.05.23-.31.16-.58.37-.81.66-.23.28-.41.63-.53 1.04-.13.41-.19.88-.19 1.39 0 1.04.23 1.86.68 2.46.45.59 1.06.88 1.84.88.41 0 .77-.07 1.07-.23s.59-.39.85-.68l.91 1c-.38.43-.8.76-1.28.99-.47.22-1 .34-1.58.34-.59 0-1.13-.1-1.64-.31-.5-.2-.94-.51-1.31-.91-.38-.4-.67-.9-.88-1.48-.22-.59-.33-1.26-.33-2.02zm8.4-5.33h1.61v2.54l-.05 1.33c.29-.27.61-.51.96-.72s.76-.31 1.24-.31c.73 0 1.27.23 1.61.71.33.47.5 1.14.5 2.02v4.31h-1.61v-4.1c0-.57-.08-.97-.25-1.21-.17-.23-.45-.35-.83-.35-.3 0-.56.08-.79.22-.23.15-.49.36-.78.64v4.8h-1.61zm7.37 6.45c0-.56.09-1.06.26-1.51.18-.45.42-.83.71-1.14.29-.3.63-.54 1.01-.71.39-.17.78-.25 1.18-.25.47 0 .88.08 1.23.24.36.16.65.38.89.67s.42.63.54 1.03c.12.41.18.84.18 1.32 0 .32-.02.57-.07.76h-4.36c.07.62.29 1.1.65 1.44.36.33.82.5 1.38.5.29 0 .57-.04.83-.13s.51-.21.76-.37l.55 1.01c-.33.21-.69.39-1.09.53-.41.14-.83.21-1.26.21-.48 0-.92-.08-1.34-.25-.41-.16-.76-.4-1.07-.7-.31-.31-.55-.69-.72-1.13-.18-.44-.26-.95-.26-1.52zm4.6-.62c0-.55-.11-.98-.34-1.28-.23-.31-.58-.47-1.06-.47-.41 0-.77.15-1.07.45-.31.29-.5.73-.58 1.3zm2.5.62c0-.57.09-1.08.28-1.53.18-.44.43-.82.75-1.13s.69-.54 1.1-.71c.42-.16.85-.24 1.31-.24.45 0 .84.08 1.17.23s.61.34.85.57l-.77 1.02c-.19-.16-.38-.28-.56-.37-.19-.09-.39-.14-.61-.14-.56 0-1.01.21-1.35.63-.35.41-.52.97-.52 1.67 0 .69.17 1.24.51 1.66.34.41.78.62 1.32.62.28 0 .54-.06.78-.17.24-.12.45-.26.64-.42l.67 1.03c-.33.29-.69.51-1.08.65-.39.15-.78.23-1.18.23-.46 0-.9-.08-1.31-.24-.4-.16-.75-.39-1.05-.7s-.53-.69-.7-1.13c-.17-.45-.25-.96-.25-1.53zm6.91-6.45h1.58v6.17h.05l2.54-3.16h1.77l-2.35 2.8 2.59 4.07h-1.75l-1.77-2.98-1.08 1.23v1.75h-1.58zm13.69 1.27c-.25-.11-.5-.17-.75-.17-.58 0-.87.39-.87 1.16v.75h1.34v1.27h-1.34v5.6h-1.61v-5.6h-.92v-1.2l.92-.07v-.72c0-.35.04-.68.13-.98.08-.31.21-.57.4-.79s.42-.39.71-.51c.28-.12.63-.18 1.04-.18.24 0 .48.02.69.07.22.05.41.1.57.17zm.48 5.18c0-.57.09-1.08.27-1.53.17-.44.41-.82.72-1.13.3-.31.65-.54 1.04-.71.39-.16.8-.24 1.23-.24s.84.08 1.24.24c.4.17.74.4 1.04.71s.54.69.72 1.13c.19.45.28.96.28 1.53s-.09 1.08-.28 1.53c-.18.44-.42.82-.72 1.13s-.64.54-1.04.7-.81.24-1.24.24-.84-.08-1.23-.24-.74-.39-1.04-.7c-.31-.31-.55-.69-.72-1.13-.18-.45-.27-.96-.27-1.53zm1.65 0c0 .69.14 1.24.43 1.66.28.41.68.62 1.18.62.51 0 .9-.21 1.19-.62.29-.42.44-.97.44-1.66 0-.7-.15-1.26-.44-1.67-.29-.42-.68-.63-1.19-.63-.5 0-.9.21-1.18.63-.29.41-.43.97-.43 1.67zm6.48-3.44h1.33l.12 1.21h.05c.24-.44.54-.79.88-1.02.35-.24.7-.36 1.07-.36.32 0 .59.05.78.14l-.28 1.4-.33-.09c-.11-.01-.23-.02-.38-.02-.27 0-.56.1-.86.31s-.55.58-.77 1.1v4.2h-1.61zm-47.87 15h1.61v4.1c0 .57.08.97.25 1.2.17.24.44.35.81.35.3 0 .57-.07.8-.22.22-.15.47-.39.73-.73v-4.7h1.61v6.87h-1.32l-.12-1.01h-.04c-.3.36-.63.64-.98.86-.35.21-.76.32-1.24.32-.73 0-1.27-.24-1.61-.71-.33-.47-.5-1.14-.5-2.02zm9.46 7.43v2.16h-1.61v-9.59h1.33l.12.72h.05c.29-.24.61-.45.97-.63.35-.17.72-.26 1.1-.26.43 0 .81.08 1.15.24.33.17.61.4.84.71.24.31.41.68.53 1.11.13.42.19.91.19 1.44 0 .59-.09 1.11-.25 1.57-.16.47-.38.85-.65 1.16-.27.32-.58.56-.94.73-.35.16-.72.25-1.1.25-.3 0-.6-.07-.9-.2s-.59-.31-.87-.56zm0-2.3c.26.22.5.37.73.45.24.09.46.13.66.13.46 0 .84-.2 1.15-.6.31-.39.46-.98.46-1.77 0-.69-.12-1.22-.35-1.61-.23-.38-.61-.57-1.13-.57-.49 0-.99.26-1.52.77zm5.87-1.69c0-.56.08-1.06.25-1.51.16-.45.37-.83.65-1.14.27-.3.58-.54.93-.71s.71-.25 1.08-.25c.39 0 .73.07 1 .2.27.14.54.32.81.55l-.06-1.1v-2.49h1.61v9.88h-1.33l-.11-.74h-.06c-.25.25-.54.46-.88.64-.33.18-.69.27-1.06.27-.87 0-1.56-.32-2.07-.95s-.76-1.51-.76-2.65zm1.67-.01c0 .74.13 1.31.4 1.7.26.38.65.58 1.15.58.51 0 .99-.26 1.44-.77v-3.21c-.24-.21-.48-.36-.7-.45-.23-.08-.46-.12-.7-.12-.45 0-.82.19-1.13.59-.31.39-.46.95-.46 1.68zm6.35 1.59c0-.73.32-1.3.97-1.71.64-.4 1.67-.68 3.08-.84 0-.17-.02-.34-.07-.51-.05-.16-.12-.3-.22-.43s-.22-.22-.38-.3c-.15-.06-.34-.1-.58-.1-.34 0-.68.07-1 .2s-.63.29-.93.47l-.59-1.08c.39-.24.81-.45 1.28-.63.47-.17.99-.26 1.54-.26.86 0 1.51.25 1.93.76s.63 1.25.63 2.21v4.07h-1.32l-.12-.76h-.05c-.3.27-.63.48-.98.66s-.73.27-1.14.27c-.61 0-1.1-.19-1.48-.56-.38-.36-.57-.85-.57-1.46zm1.57-.12c0 .3.09.53.27.67.19.14.42.21.71.21.28 0 .54-.07.77-.2s.48-.31.73-.56v-1.54c-.47.06-.86.13-1.18.23-.31.09-.57.19-.76.31s-.33.25-.41.4c-.09.15-.13.31-.13.48zm6.29-3.63h-.98v-1.2l1.06-.07.2-1.88h1.34v1.88h1.75v1.27h-1.75v3.28c0 .8.32 1.2.97 1.2.12 0 .24-.01.37-.04.12-.03.24-.07.34-.11l.28 1.19c-.19.06-.4.12-.64.17-.23.05-.49.08-.76.08-.4 0-.74-.06-1.02-.18-.27-.13-.49-.3-.67-.52-.17-.21-.3-.48-.37-.78-.08-.3-.12-.64-.12-1.01zm4.36 2.17c0-.56.09-1.06.27-1.51s.41-.83.71-1.14c.29-.3.63-.54 1.01-.71.39-.17.78-.25 1.18-.25.47 0 .88.08 1.23.24.36.16.65.38.89.67s.42.63.54 1.03c.12.41.18.84.18 1.32 0 .32-.02.57-.07.76h-4.37c.08.62.29 1.1.65 1.44.36.33.82.5 1.38.5.3 0 .58-.04.84-.13.25-.09.51-.21.76-.37l.54 1.01c-.32.21-.69.39-1.09.53s-.82.21-1.26.21c-.47 0-.92-.08-1.33-.25-.41-.16-.77-.4-1.08-.7-.3-.31-.54-.69-.72-1.13-.17-.44-.26-.95-.26-1.52zm4.61-.62c0-.55-.11-.98-.34-1.28-.23-.31-.58-.47-1.06-.47-.41 0-.77.15-1.08.45-.31.29-.5.73-.57 1.3zm3.01 2.23c.31.24.61.43.92.57.3.13.63.2.98.2.38 0 .65-.08.83-.23s.27-.35.27-.6c0-.14-.05-.26-.13-.37-.08-.1-.2-.2-.34-.28-.14-.09-.29-.16-.47-.23l-.53-.22c-.23-.09-.46-.18-.69-.3-.23-.11-.44-.24-.62-.4s-.33-.35-.45-.55c-.12-.21-.18-.46-.18-.75 0-.61.23-1.1.68-1.49.44-.38 1.06-.57 1.83-.57.48 0 .91.08 1.29.25s.71.36.99.57l-.74.98c-.24-.17-.49-.32-.73-.42-.25-.11-.51-.16-.78-.16-.35 0-.6.07-.76.21-.17.15-.25.33-.25.54 0 .14.04.26.12.36s.18.18.31.26c.14.07.29.14.46.21l.54.19c.23.09.47.18.7.29s.44.24.64.4c.19.16.34.35.46.58.11.23.17.5.17.82 0 .3-.06.58-.17.83-.12.26-.29.48-.51.68-.23.19-.51.34-.84.45-.34.11-.72.17-1.15.17-.48 0-.95-.09-1.41-.27-.46-.19-.86-.41-1.2-.68z" fill="#535353"/></g></svg>"></a></div><div class="c-bibliographic-information__column"><h3 class="c-article__sub-heading" id="citeas">Cite this article</h3><p class="c-bibliographic-information__citation">Venugopalan, J., Tong, L., Hassanzadeh, H.R. <i>et al.</i> Multimodal deep learning models for early detection of Alzheimer’s disease stage. <i>Sci Rep</i> <b>11</b>, 3254 (2021). https://doi.org/10.1038/s41598-020-74399-w</p><p class="c-bibliographic-information__download-citation u-hide-print"><a data-test="citation-link" data-track="click" data-track-action="download article citation" data-track-label="link" data-track-external="" rel="nofollow" href="https://citation-needed.springer.com/v2/references/10.1038/s41598-020-74399-w?format=refman&amp;flavour=citation">Download citation<svg width="16" height="16" focusable="false" role="img" aria-hidden="true" class="u-icon"><use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#icon-eds-i-download-medium"></use></svg></a></p><ul class="c-bibliographic-information__list" data-test="publication-history"><li class="c-bibliographic-information__list-item"><p>Received<span class="u-hide">: </span><span class="c-bibliographic-information__value"><time datetime="2018-08-28">28 August 2018</time></span></p></li><li class="c-bibliographic-information__list-item"><p>Accepted<span class="u-hide">: </span><span class="c-bibliographic-information__value"><time datetime="2020-01-22">22 January 2020</time></span></p></li><li class="c-bibliographic-information__list-item"><p>Published<span class="u-hide">: </span><span class="c-bibliographic-information__value"><time datetime="2021-02-05">05 February 2021</time></span></p></li><li class="c-bibliographic-information__list-item c-bibliographic-information__list-item--full-width"><p><abbr title="Digital Object Identifier">DOI</abbr><span class="u-hide">: </span><span class="c-bibliographic-information__value">https://doi.org/10.1038/s41598-020-74399-w</span></p></li></ul><div data-component="share-box"><div class="c-article-share-box u-display-none" hidden=""><h3 class="c-article__sub-heading">Share this article</h3><p class="c-article-share-box__description">Anyone you share the following link with will be able to read this content:</p><button class="js-get-share-url c-article-share-box__button" type="button" id="get-share-url" data-track="click" data-track-label="button" data-track-external="" data-track-action="get shareable link">Get shareable link</button><div class="js-no-share-url-container u-display-none" hidden=""><p class="js-c-article-share-box__no-sharelink-info c-article-share-box__no-sharelink-info">Sorry, a shareable link is not currently available for this article.</p></div><div class="js-share-url-container u-display-none" hidden=""><p class="js-share-url c-article-share-box__only-read-input" id="share-url" data-track="click" data-track-label="button" data-track-action="select share url"></p><button class="js-copy-share-url c-article-share-box__button--link-like" type="button" id="copy-share-url" data-track="click" data-track-label="button" data-track-action="copy share url" data-track-external="">Copy to clipboard</button></div><p class="js-c-article-share-box__additional-info c-article-share-box__additional-info"> Provided by the Springer Nature SharedIt content-sharing initiative </p></div></div><div data-component="article-info-list"></div></div></div></div></div></section> </div> <section> <div class="c-article-section js-article-section" id="further-reading-section" data-test="further-reading-section"> <h2 class="c-article-section__title js-section-title js-c-reading-companion-sections-item" id="further-reading">This article is cited by</h2> <div class="c-article-section__content js-collapsible-section" id="further-reading-content"> <ul class="c-article-further-reading__list" id="further-reading-list"> <li class="c-article-further-reading__item js-ref-item"> <h3 class="c-article-further-reading__title" data-test="article-title"> <a class="print-link" data-track="click" data-track-action="view further reading article" data-track-label="link:MR–CT image fusion method of intracranial tumors based on Res2Net" href="https://doi.org/10.1186/s12880-024-01329-x"> MR–CT image fusion method of intracranial tumors based on Res2Net </a> </h3> <ul data-test="author-list" class="c-author-list c-author-list--compact c-author-list--truncated u-sans-serif u-mb-4 u-mt-auto"> <li>Wei Chen</li><li>Qixuan Li</li><li>Xinye Ni</li> </ul> <p class="c-article-further-reading__journal-title"><i>BMC Medical Imaging</i> (2024)</p> </li> <li class="c-article-further-reading__item js-ref-item"> <h3 class="c-article-further-reading__title" data-test="article-title"> <a class="print-link" data-track="click" data-track-action="view further reading article" data-track-label="link:Understanding machine learning applications in dementia research and clinical practice: a review for biomedical scientists and clinicians" href="https://doi.org/10.1186/s13195-024-01540-6"> Understanding machine learning applications in dementia research and clinical practice: a review for biomedical scientists and clinicians </a> </h3> <ul data-test="author-list" class="c-author-list c-author-list--compact c-author-list--truncated u-sans-serif u-mb-4 u-mt-auto"> <li>Yihan Wang</li><li>Shu Liu</li><li>Liang Jin</li> </ul> <p class="c-article-further-reading__journal-title"><i>Alzheimer&#x27;s Research &amp; Therapy</i> (2024)</p> </li> <li class="c-article-further-reading__item js-ref-item"> <h3 class="c-article-further-reading__title" data-test="article-title"> <a class="print-link" data-track="click" data-track-action="view further reading article" data-track-label="link:Deep learning based joint fusion approach to exploit anatomical and functional brain information in autism spectrum disorders" href="https://doi.org/10.1186/s40708-023-00217-4"> Deep learning based joint fusion approach to exploit anatomical and functional brain information in autism spectrum disorders </a> </h3> <ul data-test="author-list" class="c-author-list c-author-list--compact c-author-list--truncated u-sans-serif u-mb-4 u-mt-auto"> <li>Sara Saponaro</li><li>Francesca Lizzi</li><li>Alessandra Retico</li> </ul> <p class="c-article-further-reading__journal-title"><i>Brain Informatics</i> (2024)</p> </li> <li class="c-article-further-reading__item js-ref-item"> <h3 class="c-article-further-reading__title" data-test="article-title"> <a class="print-link" data-track="click" data-track-action="view further reading article" data-track-label="link:A modified deep learning method for Alzheimer’s disease detection based on the facial submicroscopic features in mice" href="https://doi.org/10.1186/s12938-024-01305-0"> A modified deep learning method for Alzheimer’s disease detection based on the facial submicroscopic features in mice </a> </h3> <ul data-test="author-list" class="c-author-list c-author-list--compact c-author-list--truncated u-sans-serif u-mb-4 u-mt-auto"> <li>Guosheng Shen</li><li>Fei Ye</li><li>Qiang Li</li> </ul> <p class="c-article-further-reading__journal-title"><i>BioMedical Engineering OnLine</i> (2024)</p> </li> <li class="c-article-further-reading__item js-ref-item"> <h3 class="c-article-further-reading__title" data-test="article-title"> <a class="print-link" data-track="click" data-track-action="view further reading article" data-track-label="link:Multimodal masked siamese network improves chest X-ray representation learning" href="https://doi.org/10.1038/s41598-024-74043-x"> Multimodal masked siamese network improves chest X-ray representation learning </a> </h3> <ul data-test="author-list" class="c-author-list c-author-list--compact u-sans-serif u-mb-4 u-mt-auto"> <li>Saeed Shurrab</li><li>Alejandro Guerra-Manzanares</li><li>Farah E. Shamout</li> </ul> <p class="c-article-further-reading__journal-title"><i>Scientific Reports</i> (2024)</p> </li> </ul> </div> </div> </section> </div> </article> </main> <aside class="c-article-extras u-hide-print" aria-label="Article navigation" data-component-reading-companion data-container-type="reading-companion" data-track-component="reading companion"> <div class="js-context-bar-sticky-point-desktop" data-track-context="reading companion"> <div class="c-pdf-download u-clear-both js-pdf-download"> <a href="/articles/s41598-020-74399-w.pdf" class="u-button u-button--full-width u-button--primary u-justify-content-space-between c-pdf-download__link" data-article-pdf="true" data-readcube-pdf-url="true" data-test="download-pdf" data-draft-ignore="true" data-track="content_download" data-track-type="article pdf download" data-track-action="download pdf" data-track-label="link" data-track-external download> <span class="c-pdf-download__text">Download PDF</span> <svg aria-hidden="true" focusable="false" width="16" height="16" class="u-icon"><use xlink:href="#icon-download"/></svg> </a> </div> </div> <div class="c-article-associated-content__container"> <section> <h2 class="c-article-associated-content__title u-mb-24">Associated content</h2> <div class="c-article-associated-content__collection collection u-mb-24"> <section> <p class="c-article-associated-content__collection-label u-sans-serif u-text-bold u-mb-8">Collection</p> <h3 class="c-article-associated-content__collection-title u-h3 u-mb-8"> <a href="https://www.nature.com/collections/ehhfgeijba" class="u-link-inherit" data-track="click" data-track-action="view collection" data-track-category="associated content" data-track-label="collection" data-test="collection-link">Top 100 in Neuroscience</a> </h3> </section> </div> </section> </div> <script> window.dataLayer = window.dataLayer || []; window.dataLayer[0] = window.dataLayer[0] || {}; window.dataLayer[0].content = window.dataLayer[0].content || {}; window.dataLayer[0].content.associatedContentTypes = "collection"; window.dataLayer[0].content.collections = "ehhfgeijba"; </script> <div class="c-reading-companion"> <div class="c-reading-companion__sticky" data-component="reading-companion-sticky" data-test="reading-companion-sticky"> <div class="c-reading-companion__panel c-reading-companion__sections c-reading-companion__panel--active" id="tabpanel-sections"> <div class="u-lazy-ad-wrapper u-mt-16 u-hide" data-component-mpu> <div class="c-ad c-ad--300x250"> <div class="c-ad__inner"> <p class="c-ad__label">Advertisement</p> <div id="div-gpt-ad-right-2" class="div-gpt-ad advert medium-rectangle js-ad text-center hide-print grade-c-hide" data-ad-type="right" data-test="right-ad" data-pa11y-ignore data-gpt data-gpt-unitpath="/285/scientific_reports/article" data-gpt-sizes="300x250" data-gpt-targeting="type=article;pos=right;artid=s41598-020-74399-w;doi=10.1038/s41598-020-74399-w;subjmeta=114,2164,2401,631;kwrd=Data+integration,Data+mining"> <noscript> <a href="//pubads.g.doubleclick.net/gampad/jump?iu=/285/scientific_reports/article&amp;sz=300x250&amp;c=-1339767963&amp;t=pos%3Dright%26type%3Darticle%26artid%3Ds41598-020-74399-w%26doi%3D10.1038/s41598-020-74399-w%26subjmeta%3D114,2164,2401,631%26kwrd%3DData+integration,Data+mining"> <img data-test="gpt-advert-fallback-img" src="//pubads.g.doubleclick.net/gampad/ad?iu=/285/scientific_reports/article&amp;sz=300x250&amp;c=-1339767963&amp;t=pos%3Dright%26type%3Darticle%26artid%3Ds41598-020-74399-w%26doi%3D10.1038/s41598-020-74399-w%26subjmeta%3D114,2164,2401,631%26kwrd%3DData+integration,Data+mining" alt="Advertisement" width="300" height="250"></a> </noscript> </div> </div> </div> </div> </div> <div class="c-reading-companion__panel c-reading-companion__figures c-reading-companion__panel--full-width" id="tabpanel-figures"></div> <div class="c-reading-companion__panel c-reading-companion__references c-reading-companion__panel--full-width" id="tabpanel-references"></div> </div> </div> </aside> </div> <nav class="c-header__dropdown" aria-labelledby="Explore-content" data-test="Explore-content" id="explore" data-track-component="nature-150-split-header"> <div class="c-header__container"> <h2 id="Explore-content" class="c-header__heading c-header__heading--js-hide">Explore content</h2> <ul class="c-header__list c-header__list--js-stack"> <li class="c-header__item"> <a class="c-header__link" href="/srep/research-articles" data-track="click" data-track-action="research articles" data-track-label="link" data-test="explore-nav-item"> Research articles </a> </li> <li class="c-header__item"> <a class="c-header__link" href="/srep/news-and-comment" data-track="click" data-track-action="news &amp; comment" data-track-label="link" data-test="explore-nav-item"> News &amp; Comment </a> </li> <li class="c-header__item"> <a class="c-header__link" href="/srep/collections" data-track="click" data-track-action="collections" data-track-label="link" data-test="explore-nav-item"> Collections </a> </li> <li class="c-header__item"> <a class="c-header__link" href="/srep/browse-subjects" data-track="click" data-track-action="subjects" data-track-label="link" data-test="explore-nav-item"> Subjects </a> </li> </ul> <ul class="c-header__list c-header__list--js-stack"> <li class="c-header__item"> <a class="c-header__link" href="https://www.facebook.com/scientificreports" data-track="click" data-track-action="facebook" data-track-label="link">Follow us on Facebook </a> </li> <li class="c-header__item"> <a class="c-header__link" href="https://twitter.com/SciReports" data-track="click" data-track-action="twitter" data-track-label="link">Follow us on Twitter </a> </li> <li class="c-header__item c-header__item--hide-lg"> <a class="c-header__link" href="https://www.nature.com/my-account/alerts/subscribe-journal?list-id&#x3D;288" rel="nofollow" data-track="click" data-track-action="Sign up for alerts" data-track-external data-track-label="link (mobile dropdown)">Sign up for alerts<svg role="img" aria-hidden="true" focusable="false" height="18" viewBox="0 0 18 18" width="18" xmlns="http://www.w3.org/2000/svg"><path d="m4 10h2.5c.27614237 0 .5.2238576.5.5s-.22385763.5-.5.5h-3.08578644l-1.12132034 1.1213203c-.18753638.1875364-.29289322.4418903-.29289322.7071068v.1715729h14v-.1715729c0-.2652165-.1053568-.5195704-.2928932-.7071068l-1.7071068-1.7071067v-3.4142136c0-2.76142375-2.2385763-5-5-5-2.76142375 0-5 2.23857625-5 5zm3 4c0 1.1045695.8954305 2 2 2s2-.8954305 2-2zm-5 0c-.55228475 0-1-.4477153-1-1v-.1715729c0-.530433.21071368-1.0391408.58578644-1.4142135l1.41421356-1.4142136v-3c0-3.3137085 2.6862915-6 6-6s6 2.6862915 6 6v3l1.4142136 1.4142136c.3750727.3750727.5857864.8837805.5857864 1.4142135v.1715729c0 .5522847-.4477153 1-1 1h-4c0 1.6568542-1.3431458 3-3 3-1.65685425 0-3-1.3431458-3-3z" fill="#fff"/></svg> </a> </li> <li class="c-header__item c-header__item--hide-lg"> <a class="c-header__link" href="https://www.nature.com/srep.rss" data-track="click" data-track-action="rss feed" data-track-label="link"> <span>RSS feed</span> </a> </li> </ul> </div> </nav> <nav class="c-header__dropdown" aria-labelledby="About-the-journal" id="about-the-journal" data-test="about-the-journal" data-track-component="nature-150-split-header"> <div class="c-header__container"> <h2 id="About-the-journal" class="c-header__heading c-header__heading--js-hide">About the journal</h2> <ul class="c-header__list c-header__list--js-stack"> <li class="c-header__item"> <a class="c-header__link" href="/srep/about" data-track="click" data-track-action="about scientific reports" data-track-label="link"> About Scientific Reports </a> </li> <li class="c-header__item"> <a class="c-header__link" href="/srep/contact" data-track="click" data-track-action="contact" data-track-label="link"> Contact </a> </li> <li class="c-header__item"> <a class="c-header__link" href="/srep/journal-policies" data-track="click" data-track-action="journal policies" data-track-label="link"> Journal policies </a> </li> <li class="c-header__item"> <a class="c-header__link" href="/srep/guide-to-referees" data-track="click" data-track-action="guide to referees" data-track-label="link"> Guide to referees </a> </li> <li class="c-header__item"> <a class="c-header__link" href="/srep/calls-for-papers" data-track="click" data-track-action="calls for papers" data-track-label="link"> Calls for Papers </a> </li> <li class="c-header__item"> <a class="c-header__link" href="/srep/editorschoice" data-track="click" data-track-action="editor&#x27;s choice" data-track-label="link"> Editor&#x27;s Choice </a> </li> <li class="c-header__item"> <a class="c-header__link" href="/srep/highlights" data-track="click" data-track-action="journal highlights" data-track-label="link"> Journal highlights </a> </li> <li class="c-header__item"> <a class="c-header__link" href="/srep/open-access" data-track="click" data-track-action="open access fees and funding" data-track-label="link"> Open Access Fees and Funding </a> </li> </ul> </div> </nav> <nav class="c-header__dropdown" aria-labelledby="Publish-with-us-label" id="publish-with-us" data-test="publish-with-us" data-track-component="nature-150-split-header"> <div class="c-header__container"> <h2 id="Publish-with-us-label" class="c-header__heading c-header__heading--js-hide">Publish with us</h2> <ul class="c-header__list c-header__list--js-stack"> <li class="c-header__item"> <a class="c-header__link" href="/srep/author-instructions" data-track="click" data-track-action="for authors" data-track-label="link"> For authors </a> </li> <li class="c-header__item"> <a class="c-header__link" data-test="nature-author-services" data-track="nav_language_services" data-track-context="header publish with us dropdown menu" data-track-action="manuscript author services" data-track-label="link manuscript author services" href="https://authorservices.springernature.com/go/sn/?utm_source=For+Authors&utm_medium=Website_Nature&utm_campaign=Platform+Experimentation+2022&utm_id=PE2022"> Language editing services </a> </li> <li class="c-header__item c-header__item--keyline"> <a class="c-header__link" href="https://author-welcome.nature.com/41598" data-track="click_submit_manuscript" data-track-context="submit link in Nature header dropdown menu" data-track-action="submit manuscript" data-track-label="link (publish with us dropdown menu)" data-track-external>Submit manuscript<svg role="img" aria-hidden="true" focusable="false" height="18" viewBox="0 0 18 18" width="18" xmlns="http://www.w3.org/2000/svg"><path d="m15 0c1.1045695 0 2 .8954305 2 2v5.5c0 .27614237-.2238576.5-.5.5s-.5-.22385763-.5-.5v-5.5c0-.51283584-.3860402-.93550716-.8833789-.99327227l-.1166211-.00672773h-9v3c0 1.1045695-.8954305 2-2 2h-3v10c0 .5128358.38604019.9355072.88337887.9932723l.11662113.0067277h7.5c.27614237 0 .5.2238576.5.5s-.22385763.5-.5.5h-7.5c-1.1045695 0-2-.8954305-2-2v-10.17157288c0-.53043297.21071368-1.0391408.58578644-1.41421356l3.82842712-3.82842712c.37507276-.37507276.88378059-.58578644 1.41421356-.58578644zm-.5442863 8.18867991 3.3545404 3.35454039c.2508994.2508994.2538696.6596433.0035959.909917-.2429543.2429542-.6561449.2462671-.9065387-.0089489l-2.2609825-2.3045251.0010427 7.2231989c0 .3569916-.2898381.6371378-.6473715.6371378-.3470771 0-.6473715-.2852563-.6473715-.6371378l-.0010428-7.2231995-2.2611222 2.3046654c-.2531661.2580415-.6562868.2592444-.9065605.0089707-.24295423-.2429542-.24865597-.6576651.0036132-.9099343l3.3546673-3.35466731c.2509089-.25090888.6612706-.25227691.9135302-.00001728zm-.9557137-3.18867991c.2761424 0 .5.22385763.5.5s-.2238576.5-.5.5h-6c-.27614237 0-.5-.22385763-.5-.5s.22385763-.5.5-.5zm-8.5-3.587-3.587 3.587h2.587c.55228475 0 1-.44771525 1-1zm8.5 1.587c.2761424 0 .5.22385763.5.5s-.2238576.5-.5.5h-6c-.27614237 0-.5-.22385763-.5-.5s.22385763-.5.5-.5z" fill="#fff"/></svg> </a> </li> </ul> </div> </nav> <div id="search-menu" class="c-header__dropdown c-header__dropdown--full-width" data-track-component="nature-150-split-header"> <div class="c-header__container"> <h2 class="c-header__visually-hidden">Search</h2> <form class="c-header__search-form" action="/search" method="get" role="search" autocomplete="off" data-test="inline-search"> <label class="c-header__heading" for="keywords">Search articles by subject, keyword or author</label> <div class="c-header__search-layout c-header__search-layout--max-width"> <div> <input type="text" required="" class="c-header__input" id="keywords" name="q" value=""> </div> <div class="c-header__search-layout"> <div> <label for="results-from" class="c-header__visually-hidden">Show results from</label> <select id="results-from" name="journal" class="c-header__select"> <option value="" selected>All journals</option> <option value="srep">This journal</option> </select> </div> <div> <button type="submit" class="c-header__search-button">Search</button> </div> </div> </div> </form> <div class="c-header__flush"> <a class="c-header__link" href="/search/advanced" data-track="click" data-track-action="advanced search" data-track-label="link"> Advanced search </a> </div> <h3 class="c-header__heading c-header__heading--keyline">Quick links</h3> <ul class="c-header__list"> <li><a class="c-header__link" href="/subjects" data-track="click" data-track-action="explore articles by subject" data-track-label="link">Explore articles by subject</a></li> <li><a class="c-header__link" href="/naturecareers" data-track="click" data-track-action="find a job" data-track-label="link">Find a job</a></li> <li><a class="c-header__link" href="/authors/index.html" data-track="click" data-track-action="guide to authors" data-track-label="link">Guide to authors</a></li> <li><a class="c-header__link" href="/authors/editorial_policies/" data-track="click" data-track-action="editorial policies" data-track-label="link">Editorial policies</a></li> </ul> </div> </div> <footer class="composite-layer" itemscope itemtype="http://schema.org/Periodical"> <meta itemprop="publisher" content="Springer Nature"> <div class="u-mt-16 u-mb-16"> <div class="u-container"> <div class="u-display-flex u-flex-wrap u-justify-content-space-between"> <p class="c-meta u-ma-0 u-flex-shrink"> <span class="c-meta__item"> Scientific Reports (<i>Sci Rep</i>) </span> <span class="c-meta__item"> <abbr title="International Standard Serial Number">ISSN</abbr> <span itemprop="onlineIssn">2045-2322</span> (online) </span> </p> </div> </div> </div> <div class="c-footer"> <div class="u-hide-print" data-track-component="footer"> <h2 class="u-visually-hidden">nature.com sitemap</h2> <div class="c-footer__container"> <div class="c-footer__grid c-footer__group--separator"> <div class="c-footer__group"> <h3 class="c-footer__heading u-mt-0">About Nature Portfolio</h3> <ul class="c-footer__list"> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/npg_/company_info/index.html" data-track="click" data-track-action="about us" data-track-label="link">About us</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/npg_/press_room/press_releases.html" data-track="click" data-track-action="press releases" data-track-label="link">Press releases</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://press.nature.com/" data-track="click" data-track-action="press office" data-track-label="link">Press office</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://support.nature.com/support/home" data-track="click" data-track-action="contact us" data-track-label="link">Contact us</a></li> </ul> </div> <div class="c-footer__group"> <h3 class="c-footer__heading u-mt-0">Discover content</h3> <ul class="c-footer__list"> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/siteindex" data-track="click" data-track-action="journals a-z" data-track-label="link">Journals A-Z</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/subjects" data-track="click" data-track-action="article by subject" data-track-label="link">Articles by subject</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.protocols.io/" data-track="click" data-track-action="protocols.io" data-track-label="link">protocols.io</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.natureindex.com/" data-track="click" data-track-action="nature index" data-track-label="link">Nature Index</a></li> </ul> </div> <div class="c-footer__group"> <h3 class="c-footer__heading u-mt-0">Publishing policies</h3> <ul class="c-footer__list"> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/authors/editorial_policies" data-track="click" data-track-action="Nature portfolio policies" data-track-label="link">Nature portfolio policies</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/nature-research/open-access" data-track="click" data-track-action="open access" data-track-label="link">Open access</a></li> </ul> </div> <div class="c-footer__group"> <h3 class="c-footer__heading u-mt-0">Author &amp; Researcher services</h3> <ul class="c-footer__list"> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/reprints" data-track="click" data-track-action="reprints and permissions" data-track-label="link">Reprints &amp; permissions</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.springernature.com/gp/authors/research-data" data-track="click" data-track-action="data research service" data-track-label="link">Research data</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://authorservices.springernature.com/language-editing/" data-track="click" data-track-action="language editing" data-track-label="link">Language editing</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://authorservices.springernature.com/scientific-editing/" data-track="click" data-track-action="scientific editing" data-track-label="link">Scientific editing</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://masterclasses.nature.com/" data-track="click" data-track-action="nature masterclasses" data-track-label="link">Nature Masterclasses</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://solutions.springernature.com/" data-track="click" data-track-action="research solutions" data-track-label="link">Research Solutions</a></li> </ul> </div> <div class="c-footer__group"> <h3 class="c-footer__heading u-mt-0">Libraries &amp; institutions</h3> <ul class="c-footer__list"> <li class="c-footer__item"><a class="c-footer__link" href="https://www.springernature.com/gp/librarians/tools-services" data-track="click" data-track-action="librarian service and tools" data-track-label="link">Librarian service &amp; tools</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.springernature.com/gp/librarians/manage-your-account/librarianportal" data-track="click" data-track-action="librarian portal" data-track-label="link">Librarian portal</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/openresearch/about-open-access/information-for-institutions" data-track="click" data-track-action="open research" data-track-label="link">Open research</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.springernature.com/gp/librarians/recommend-to-your-library" data-track="click" data-track-action="Recommend to library" data-track-label="link">Recommend to library</a></li> </ul> </div> <div class="c-footer__group"> <h3 class="c-footer__heading u-mt-0">Advertising &amp; partnerships</h3> <ul class="c-footer__list"> <li class="c-footer__item"><a class="c-footer__link" href="https://partnerships.nature.com/product/digital-advertising/" data-track="click" data-track-action="advertising" data-track-label="link">Advertising</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://partnerships.nature.com/" data-track="click" data-track-action="partnerships and services" data-track-label="link">Partnerships &amp; Services</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://partnerships.nature.com/media-kits/" data-track="click" data-track-action="media kits" data-track-label="link">Media kits</a> </li> <li class="c-footer__item"><a class="c-footer__link" href="https://partnerships.nature.com/product/branded-content-native-advertising/" data-track-action="branded content" data-track-label="link">Branded content</a></li> </ul> </div> <div class="c-footer__group"> <h3 class="c-footer__heading u-mt-0">Professional development</h3> <ul class="c-footer__list"> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/naturecareers/" data-track="click" data-track-action="nature careers" data-track-label="link">Nature Careers</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://conferences.nature.com" data-track="click" data-track-action="nature conferences" data-track-label="link">Nature<span class="u-visually-hidden"> </span> Conferences</a></li> </ul> </div> <div class="c-footer__group"> <h3 class="c-footer__heading u-mt-0">Regional websites</h3> <ul class="c-footer__list"> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/natafrica" data-track="click" data-track-action="nature africa" data-track-label="link">Nature Africa</a></li> <li class="c-footer__item"><a class="c-footer__link" href="http://www.naturechina.com" data-track="click" data-track-action="nature china" data-track-label="link">Nature China</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/nindia" data-track="click" data-track-action="nature india" data-track-label="link">Nature India</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/natitaly" data-track="click" data-track-action="nature Italy" data-track-label="link">Nature Italy</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.natureasia.com/ja-jp" data-track="click" data-track-action="nature japan" data-track-label="link">Nature Japan</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/nmiddleeast" data-track="click" data-track-action="nature middle east" data-track-label="link">Nature Middle East</a></li> </ul> </div> </div> </div> <div class="c-footer__container"> <ul class="c-footer__links"> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/info/privacy" data-track="click" data-track-action="privacy policy" data-track-label="link">Privacy Policy</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/info/cookies" data-track="click" data-track-action="use of cookies" data-track-label="link">Use of cookies</a></li> <li class="c-footer__item"> <button class="optanon-toggle-display c-footer__link" onclick="javascript:;" data-cc-action="preferences" data-track="click" data-track-action="manage cookies" data-track-label="link">Your privacy choices/Manage cookies </button> </li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/info/legal-notice" data-track="click" data-track-action="legal notice" data-track-label="link">Legal notice</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/info/accessibility-statement" data-track="click" data-track-action="accessibility statement" data-track-label="link">Accessibility statement</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.nature.com/info/terms-and-conditions" data-track="click" data-track-action="terms and conditions" data-track-label="link">Terms &amp; Conditions</a></li> <li class="c-footer__item"><a class="c-footer__link" href="https://www.springernature.com/ccpa" data-track="click" data-track-action="california privacy statement" data-track-label="link">Your US state privacy rights</a></li> </ul> </div> </div> <div class="c-footer__container"> <a href="https://www.springernature.com/" class="c-footer__link"> <img src="/static/images/logos/sn-logo-white-ea63208b81.svg" alt="Springer Nature" loading="lazy" width="200" height="20"/> </a> <p class="c-footer__legal" data-test="copyright">&copy; 2024 Springer Nature Limited</p> </div> </div> <div class="u-visually-hidden" aria-hidden="true"> <?xml version="1.0" encoding="UTF-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><defs><path id="a" d="M0 .74h56.72v55.24H0z"/></defs><symbol id="icon-access" viewBox="0 0 18 18"><path d="m14 8c.5522847 0 1 .44771525 1 1v7h2.5c.2761424 0 .5.2238576.5.5v1.5h-18v-1.5c0-.2761424.22385763-.5.5-.5h2.5v-7c0-.55228475.44771525-1 1-1s1 .44771525 1 1v6.9996556h8v-6.9996556c0-.55228475.4477153-1 1-1zm-8 0 2 1v5l-2 1zm6 0v7l-2-1v-5zm-2.42653766-7.59857636 7.03554716 4.92488299c.4162533.29137735.5174853.86502537.226108 1.28127873-.1721584.24594054-.4534847.39241464-.7536934.39241464h-14.16284822c-.50810197 0-.92-.41189803-.92-.92 0-.30020869.1464741-.58153499.39241464-.75369337l7.03554714-4.92488299c.34432015-.2410241.80260453-.2410241 1.14692468 0zm-.57346234 2.03988748-3.65526982 2.55868888h7.31053962z" fill-rule="evenodd"/></symbol><symbol id="icon-account" viewBox="0 0 18 18"><path d="m10.2379028 16.9048051c1.3083556-.2032362 2.5118471-.7235183 3.5294683-1.4798399-.8731327-2.5141501-2.0638925-3.935978-3.7673711-4.3188248v-1.27684611c1.1651924-.41183641 2-1.52307546 2-2.82929429 0-1.65685425-1.3431458-3-3-3-1.65685425 0-3 1.34314575-3 3 0 1.30621883.83480763 2.41745788 2 2.82929429v1.27684611c-1.70347856.3828468-2.89423845 1.8046747-3.76737114 4.3188248 1.01762123.7563216 2.22111275 1.2766037 3.52946833 1.4798399.40563808.0629726.81921174.0951949 1.23790281.0951949s.83226473-.0322223 1.2379028-.0951949zm4.3421782-2.1721994c1.4927655-1.4532925 2.419919-3.484675 2.419919-5.7326057 0-4.418278-3.581722-8-8-8s-8 3.581722-8 8c0 2.2479307.92715352 4.2793132 2.41991895 5.7326057.75688473-2.0164459 1.83949951-3.6071894 3.48926591-4.3218837-1.14534283-.70360829-1.90918486-1.96796271-1.90918486-3.410722 0-2.209139 1.790861-4 4-4s4 1.790861 4 4c0 1.44275929-.763842 2.70711371-1.9091849 3.410722 1.6497664.7146943 2.7323812 2.3054378 3.4892659 4.3218837zm-5.580081 3.2673943c-4.97056275 0-9-4.0294373-9-9 0-4.97056275 4.02943725-9 9-9 4.9705627 0 9 4.02943725 9 9 0 4.9705627-4.0294373 9-9 9z" fill-rule="evenodd"/></symbol><symbol id="icon-alert" viewBox="0 0 18 18"><path d="m4 10h2.5c.27614237 0 .5.2238576.5.5s-.22385763.5-.5.5h-3.08578644l-1.12132034 1.1213203c-.18753638.1875364-.29289322.4418903-.29289322.7071068v.1715729h14v-.1715729c0-.2652165-.1053568-.5195704-.2928932-.7071068l-1.7071068-1.7071067v-3.4142136c0-2.76142375-2.2385763-5-5-5-2.76142375 0-5 2.23857625-5 5zm3 4c0 1.1045695.8954305 2 2 2s2-.8954305 2-2zm-5 0c-.55228475 0-1-.4477153-1-1v-.1715729c0-.530433.21071368-1.0391408.58578644-1.4142135l1.41421356-1.4142136v-3c0-3.3137085 2.6862915-6 6-6s6 2.6862915 6 6v3l1.4142136 1.4142136c.3750727.3750727.5857864.8837805.5857864 1.4142135v.1715729c0 .5522847-.4477153 1-1 1h-4c0 1.6568542-1.3431458 3-3 3-1.65685425 0-3-1.3431458-3-3z" fill-rule="evenodd"/></symbol><symbol id="icon-arrow-broad" viewBox="0 0 16 16"><path d="m6.10307866 2.97190702v7.69043288l2.44965196-2.44676915c.38776071-.38730439 1.0088052-.39493524 1.38498697-.01919617.38609051.38563612.38643641 1.01053024-.00013864 1.39665039l-4.12239817 4.11754683c-.38616704.3857126-1.01187344.3861062-1.39846576-.0000311l-4.12258206-4.11773056c-.38618426-.38572979-.39254614-1.00476697-.01636437-1.38050605.38609047-.38563611 1.01018509-.38751562 1.4012233.00306241l2.44985644 2.4469734v-8.67638639c0-.54139983.43698413-.98042709.98493125-.98159081l7.89910522-.0043627c.5451687 0 .9871152.44142642.9871152.98595351s-.4419465.98595351-.9871152.98595351z" fill-rule="evenodd" transform="matrix(-1 0 0 -1 14 15)"/></symbol><symbol id="icon-arrow-down" viewBox="0 0 16 16"><path d="m3.28337502 11.5302405 4.03074001 4.176208c.37758093.3912076.98937525.3916069 1.367372-.0000316l4.03091977-4.1763942c.3775978-.3912252.3838182-1.0190815.0160006-1.4001736-.3775061-.39113013-.9877245-.39303641-1.3700683.003106l-2.39538585 2.4818345v-11.6147896l-.00649339-.11662112c-.055753-.49733869-.46370161-.88337888-.95867408-.88337888-.49497246 0-.90292107.38604019-.95867408.88337888l-.00649338.11662112v11.6147896l-2.39518594-2.4816273c-.37913917-.39282218-.98637524-.40056175-1.35419292-.0194697-.37750607.3911302-.37784433 1.0249269.00013556 1.4165479z" fill-rule="evenodd"/></symbol><symbol id="icon-arrow-left" viewBox="0 0 16 16"><path d="m4.46975946 3.28337502-4.17620792 4.03074001c-.39120768.37758093-.39160691.98937525.0000316 1.367372l4.1763942 4.03091977c.39122514.3775978 1.01908149.3838182 1.40017357.0160006.39113012-.3775061.3930364-.9877245-.00310603-1.3700683l-2.48183446-2.39538585h11.61478958l.1166211-.00649339c.4973387-.055753.8833789-.46370161.8833789-.95867408 0-.49497246-.3860402-.90292107-.8833789-.95867408l-.1166211-.00649338h-11.61478958l2.4816273-2.39518594c.39282216-.37913917.40056173-.98637524.01946965-1.35419292-.39113012-.37750607-1.02492687-.37784433-1.41654791.00013556z" fill-rule="evenodd"/></symbol><symbol id="icon-arrow-right" viewBox="0 0 16 16"><path d="m11.5302405 12.716625 4.176208-4.03074003c.3912076-.37758093.3916069-.98937525-.0000316-1.367372l-4.1763942-4.03091981c-.3912252-.37759778-1.0190815-.38381821-1.4001736-.01600053-.39113013.37750607-.39303641.98772445.003106 1.37006824l2.4818345 2.39538588h-11.6147896l-.11662112.00649339c-.49733869.055753-.88337888.46370161-.88337888.95867408 0 .49497246.38604019.90292107.88337888.95867408l.11662112.00649338h11.6147896l-2.4816273 2.39518592c-.39282218.3791392-.40056175.9863753-.0194697 1.3541929.3911302.3775061 1.0249269.3778444 1.4165479-.0001355z" fill-rule="evenodd"/></symbol><symbol id="icon-arrow-sub" viewBox="0 0 16 16"><path d="m7.89692134 4.97190702v7.69043288l-2.44965196-2.4467692c-.38776071-.38730434-1.0088052-.39493519-1.38498697-.0191961-.38609047.3856361-.38643643 1.0105302.00013864 1.3966504l4.12239817 4.1175468c.38616704.3857126 1.01187344.3861062 1.39846576-.0000311l4.12258202-4.1177306c.3861843-.3857298.3925462-1.0047669.0163644-1.380506-.3860905-.38563612-1.0101851-.38751563-1.4012233.0030624l-2.44985643 2.4469734v-8.67638639c0-.54139983-.43698413-.98042709-.98493125-.98159081l-7.89910525-.0043627c-.54516866 0-.98711517.44142642-.98711517.98595351s.44194651.98595351.98711517.98595351z" fill-rule="evenodd"/></symbol><symbol id="icon-arrow-up" viewBox="0 0 16 16"><path d="m12.716625 4.46975946-4.03074003-4.17620792c-.37758093-.39120768-.98937525-.39160691-1.367372.0000316l-4.03091981 4.1763942c-.37759778.39122514-.38381821 1.01908149-.01600053 1.40017357.37750607.39113012.98772445.3930364 1.37006824-.00310603l2.39538588-2.48183446v11.61478958l.00649339.1166211c.055753.4973387.46370161.8833789.95867408.8833789.49497246 0 .90292107-.3860402.95867408-.8833789l.00649338-.1166211v-11.61478958l2.39518592 2.4816273c.3791392.39282216.9863753.40056173 1.3541929.01946965.3775061-.39113012.3778444-1.02492687-.0001355-1.41654791z" fill-rule="evenodd"/></symbol><symbol id="icon-article" viewBox="0 0 18 18"><path d="m13 15v-12.9906311c0-.0073595-.0019884-.0093689.0014977-.0093689l-11.00158888.00087166v13.00506804c0 .5482678.44615281.9940603.99415146.9940603h10.27350412c-.1701701-.2941734-.2675644-.6357129-.2675644-1zm-12 .0059397v-13.00506804c0-.5562408.44704472-1.00087166.99850233-1.00087166h11.00299537c.5510129 0 .9985023.45190985.9985023 1.0093689v2.9906311h3v9.9914698c0 1.1065798-.8927712 2.0085302-1.9940603 2.0085302h-12.01187942c-1.09954652 0-1.99406028-.8927712-1.99406028-1.9940603zm13-9.0059397v9c0 .5522847.4477153 1 1 1s1-.4477153 1-1v-9zm-10-2h7v4h-7zm1 1v2h5v-2zm-1 4h7v1h-7zm0 2h7v1h-7zm0 2h7v1h-7z" fill-rule="evenodd"/></symbol><symbol id="icon-audio" viewBox="0 0 18 18"><path d="m13.0957477 13.5588459c-.195279.1937043-.5119137.193729-.7072234.0000551-.1953098-.193674-.1953346-.5077061-.0000556-.7014104 1.0251004-1.0168342 1.6108711-2.3905226 1.6108711-3.85745208 0-1.46604976-.5850634-2.83898246-1.6090736-3.85566829-.1951894-.19379323-.1950192-.50782531.0003802-.70141028.1953993-.19358497.512034-.19341614.7072234.00037709 1.2094886 1.20083761 1.901635 2.8250555 1.901635 4.55670148 0 1.73268608-.6929822 3.35779608-1.9037571 4.55880738zm2.1233994 2.1025159c-.195234.193749-.5118687.1938462-.7072235.0002171-.1953548-.1936292-.1954528-.5076613-.0002189-.7014104 1.5832215-1.5711805 2.4881302-3.6939808 2.4881302-5.96012998 0-2.26581266-.9046382-4.3883241-2.487443-5.95944795-.1952117-.19377107-.1950777-.50780316.0002993-.70141031s.5120117-.19347426.7072234.00029682c1.7683321 1.75528196 2.7800854 4.12911258 2.7800854 6.66056144 0 2.53182498-1.0120556 4.90597838-2.7808529 6.66132328zm-14.21898205-3.6854911c-.5523759 0-1.00016505-.4441085-1.00016505-.991944v-3.96777631c0-.54783558.44778915-.99194407 1.00016505-.99194407h2.0003301l5.41965617-3.8393633c.44948677-.31842296 1.07413994-.21516983 1.39520191.23062232.12116339.16823446.18629727.36981184.18629727.57655577v12.01603479c0 .5478356-.44778914.9919441-1.00016505.9919441-.20845738 0-.41170538-.0645985-.58133413-.184766l-5.41965617-3.8393633zm0-.991944h2.32084805l5.68047235 4.0241292v-12.01603479l-5.68047235 4.02412928h-2.32084805z" fill-rule="evenodd"/></symbol><symbol id="icon-block" viewBox="0 0 24 24"><path d="m0 0h24v24h-24z" fill-rule="evenodd"/></symbol><symbol id="icon-book" viewBox="0 0 18 18"><path d="m4 13v-11h1v11h11v-11h-13c-.55228475 0-1 .44771525-1 1v10.2675644c.29417337-.1701701.63571286-.2675644 1-.2675644zm12 1h-13c-.55228475 0-1 .4477153-1 1s.44771525 1 1 1h13zm0 3h-13c-1.1045695 0-2-.8954305-2-2v-12c0-1.1045695.8954305-2 2-2h13c.5522847 0 1 .44771525 1 1v14c0 .5522847-.4477153 1-1 1zm-8.5-13h6c.2761424 0 .5.22385763.5.5s-.2238576.5-.5.5h-6c-.27614237 0-.5-.22385763-.5-.5s.22385763-.5.5-.5zm1 2h4c.2761424 0 .5.22385763.5.5s-.2238576.5-.5.5h-4c-.27614237 0-.5-.22385763-.5-.5s.22385763-.5.5-.5z" fill-rule="evenodd"/></symbol><symbol id="icon-broad" viewBox="0 0 24 24"><path d="m9.18274226 7.81v7.7999954l2.48162734-2.4816273c.3928221-.3928221 1.0219731-.4005617 1.4030652-.0194696.3911301.3911301.3914806 1.0249268-.0001404 1.4165479l-4.17620796 4.1762079c-.39120769.3912077-1.02508144.3916069-1.41671995-.0000316l-4.1763942-4.1763942c-.39122514-.3912251-.39767006-1.0190815-.01657798-1.4001736.39113012-.3911301 1.02337106-.3930364 1.41951349.0031061l2.48183446 2.4818344v-8.7999954c0-.54911294.4426881-.99439484.99778758-.99557515l8.00221246-.00442485c.5522847 0 1 .44771525 1 1s-.4477153 1-1 1z" fill-rule="evenodd" transform="matrix(-1 0 0 -1 20.182742 24.805206)"/></symbol><symbol id="icon-calendar" viewBox="0 0 18 18"><path d="m12.5 0c.2761424 0 .5.21505737.5.49047852v.50952148h2c1.1072288 0 2 .89451376 2 2v12c0 1.1072288-.8945138 2-2 2h-12c-1.1072288 0-2-.8945138-2-2v-12c0-1.1072288.89451376-2 2-2h1v1h-1c-.55393837 0-1 .44579254-1 1v3h14v-3c0-.55393837-.4457925-1-1-1h-2v1.50952148c0 .27088381-.2319336.49047852-.5.49047852-.2761424 0-.5-.21505737-.5-.49047852v-3.01904296c0-.27088381.2319336-.49047852.5-.49047852zm3.5 7h-14v8c0 .5539384.44579254 1 1 1h12c.5539384 0 1-.4457925 1-1zm-11 6v1h-1v-1zm3 0v1h-1v-1zm3 0v1h-1v-1zm-6-2v1h-1v-1zm3 0v1h-1v-1zm6 0v1h-1v-1zm-3 0v1h-1v-1zm-3-2v1h-1v-1zm6 0v1h-1v-1zm-3 0v1h-1v-1zm-5.5-9c.27614237 0 .5.21505737.5.49047852v.50952148h5v1h-5v1.50952148c0 .27088381-.23193359.49047852-.5.49047852-.27614237 0-.5-.21505737-.5-.49047852v-3.01904296c0-.27088381.23193359-.49047852.5-.49047852z" fill-rule="evenodd"/></symbol><symbol id="icon-cart" viewBox="0 0 18 18"><path d="m5 14c1.1045695 0 2 .8954305 2 2s-.8954305 2-2 2-2-.8954305-2-2 .8954305-2 2-2zm10 0c1.1045695 0 2 .8954305 2 2s-.8954305 2-2 2-2-.8954305-2-2 .8954305-2 2-2zm-10 1c-.55228475 0-1 .4477153-1 1s.44771525 1 1 1 1-.4477153 1-1-.44771525-1-1-1zm10 0c-.5522847 0-1 .4477153-1 1s.4477153 1 1 1 1-.4477153 1-1-.4477153-1-1-1zm-12.82032249-15c.47691417 0 .88746157.33678127.98070211.80449199l.23823144 1.19501025 13.36277974.00045554c.5522847.00001882.9999659.44774934.9999659 1.00004222 0 .07084994-.0075361.14150708-.022474.2107727l-1.2908094 5.98534344c-.1007861.46742419-.5432548.80388386-1.0571651.80388386h-10.24805106c-.59173366 0-1.07142857.4477153-1.07142857 1 0 .5128358.41361449.9355072.94647737.9932723l.1249512.0067277h10.35933776c.2749512 0 .4979349.2228539.4979349.4978051 0 .2749417-.2227336.4978951-.4976753.4980063l-10.35959736.0041886c-1.18346732 0-2.14285714-.8954305-2.14285714-2 0-.6625717.34520317-1.24989198.87690425-1.61383592l-1.63768102-8.19004794c-.01312273-.06561364-.01950005-.131011-.0196107-.19547395l-1.71961253-.00064219c-.27614237 0-.5-.22385762-.5-.5 0-.27614237.22385763-.5.5-.5zm14.53193359 2.99950224h-13.11300004l1.20580469 6.02530174c.11024034-.0163252.22327998-.02480398.33844139-.02480398h10.27064786z"/></symbol><symbol id="icon-chevron-less" viewBox="0 0 10 10"><path d="m5.58578644 4-3.29289322-3.29289322c-.39052429-.39052429-.39052429-1.02368927 0-1.41421356s1.02368927-.39052429 1.41421356 0l4 4c.39052429.39052429.39052429 1.02368927 0 1.41421356l-4 4c-.39052429.39052429-1.02368927.39052429-1.41421356 0s-.39052429-1.02368927 0-1.41421356z" fill-rule="evenodd" transform="matrix(0 -1 -1 0 9 9)"/></symbol><symbol id="icon-chevron-more" viewBox="0 0 10 10"><path d="m5.58578644 6-3.29289322-3.29289322c-.39052429-.39052429-.39052429-1.02368927 0-1.41421356s1.02368927-.39052429 1.41421356 0l4 4c.39052429.39052429.39052429 1.02368927 0 1.41421356l-4 4.00000002c-.39052429.3905243-1.02368927.3905243-1.41421356 0s-.39052429-1.02368929 0-1.41421358z" fill-rule="evenodd" transform="matrix(0 1 -1 0 11 1)"/></symbol><symbol id="icon-chevron-right" viewBox="0 0 10 10"><path d="m5.96738168 4.70639573 2.39518594-2.41447274c.37913917-.38219212.98637524-.38972225 1.35419292-.01894278.37750606.38054586.37784436.99719163-.00013556 1.37821513l-4.03074001 4.06319683c-.37758093.38062133-.98937525.38100976-1.367372-.00003075l-4.03091981-4.06337806c-.37759778-.38063832-.38381821-.99150444-.01600053-1.3622839.37750607-.38054587.98772445-.38240057 1.37006824.00302197l2.39538588 2.4146743.96295325.98624457z" fill-rule="evenodd" transform="matrix(0 -1 1 0 0 10)"/></symbol><symbol id="icon-circle-fill" viewBox="0 0 16 16"><path d="m8 14c-3.3137085 0-6-2.6862915-6-6s2.6862915-6 6-6 6 2.6862915 6 6-2.6862915 6-6 6z" fill-rule="evenodd"/></symbol><symbol id="icon-circle" viewBox="0 0 16 16"><path d="m8 12c2.209139 0 4-1.790861 4-4s-1.790861-4-4-4-4 1.790861-4 4 1.790861 4 4 4zm0 2c-3.3137085 0-6-2.6862915-6-6s2.6862915-6 6-6 6 2.6862915 6 6-2.6862915 6-6 6z" fill-rule="evenodd"/></symbol><symbol id="icon-citation" viewBox="0 0 18 18"><path d="m8.63593473 5.99995183c2.20913897 0 3.99999997 1.79084375 3.99999997 3.99996146 0 1.40730761-.7267788 2.64486871-1.8254829 3.35783281 1.6240224.6764218 2.8754442 2.0093871 3.4610603 3.6412466l-1.0763845.000006c-.5310008-1.2078237-1.5108121-2.1940153-2.7691712-2.7181346l-.79002167-.329052v-1.023992l.63016577-.4089232c.8482885-.5504661 1.3698342-1.4895187 1.3698342-2.51898361 0-1.65683828-1.3431457-2.99996146-2.99999997-2.99996146-1.65685425 0-3 1.34312318-3 2.99996146 0 1.02946491.52154569 1.96851751 1.36983419 2.51898361l.63016581.4089232v1.023992l-.79002171.329052c-1.25835905.5241193-2.23817037 1.5103109-2.76917113 2.7181346l-1.07638453-.000006c.58561612-1.6318595 1.8370379-2.9648248 3.46106024-3.6412466-1.09870405-.7129641-1.82548287-1.9505252-1.82548287-3.35783281 0-2.20911771 1.790861-3.99996146 4-3.99996146zm7.36897597-4.99995183c1.1018574 0 1.9950893.89353404 1.9950893 2.00274083v5.994422c0 1.10608317-.8926228 2.00274087-1.9950893 2.00274087l-3.0049107-.0009037v-1l3.0049107.00091329c.5490631 0 .9950893-.44783123.9950893-1.00275046v-5.994422c0-.55646537-.4450595-1.00275046-.9950893-1.00275046h-14.00982141c-.54906309 0-.99508929.44783123-.99508929 1.00275046v5.9971821c0 .66666024.33333333.99999036 1 .99999036l2-.00091329v1l-2 .0009037c-1 0-2-.99999041-2-1.99998077v-5.9971821c0-1.10608322.8926228-2.00274083 1.99508929-2.00274083zm-8.5049107 2.9999711c.27614237 0 .5.22385547.5.5 0 .2761349-.22385763.5-.5.5h-4c-.27614237 0-.5-.2238651-.5-.5 0-.27614453.22385763-.5.5-.5zm3 0c.2761424 0 .5.22385547.5.5 0 .2761349-.2238576.5-.5.5h-1c-.27614237 0-.5-.2238651-.5-.5 0-.27614453.22385763-.5.5-.5zm4 0c.2761424 0 .5.22385547.5.5 0 .2761349-.2238576.5-.5.5h-2c-.2761424 0-.5-.2238651-.5-.5 0-.27614453.2238576-.5.5-.5z" fill-rule="evenodd"/></symbol><symbol id="icon-close" viewBox="0 0 16 16"><path d="m2.29679575 12.2772478c-.39658757.3965876-.39438847 1.0328109-.00062148 1.4265779.39651227.3965123 1.03246768.3934888 1.42657791-.0006214l4.27724782-4.27724787 4.2772478 4.27724787c.3965876.3965875 1.0328109.3943884 1.4265779.0006214.3965123-.3965122.3934888-1.0324677-.0006214-1.4265779l-4.27724787-4.2772478 4.27724787-4.27724782c.3965875-.39658757.3943884-1.03281091.0006214-1.42657791-.3965122-.39651226-1.0324677-.39348875-1.4265779.00062148l-4.2772478 4.27724782-4.27724782-4.27724782c-.39658757-.39658757-1.03281091-.39438847-1.42657791-.00062148-.39651226.39651227-.39348875 1.03246768.00062148 1.42657791l4.27724782 4.27724782z" fill-rule="evenodd"/></symbol><symbol id="icon-collections" viewBox="0 0 18 18"><path d="m15 4c1.1045695 0 2 .8954305 2 2v9c0 1.1045695-.8954305 2-2 2h-8c-1.1045695 0-2-.8954305-2-2h1c0 .5128358.38604019.9355072.88337887.9932723l.11662113.0067277h8c.5128358 0 .9355072-.3860402.9932723-.8833789l.0067277-.1166211v-9c0-.51283584-.3860402-.93550716-.8833789-.99327227l-.1166211-.00672773h-1v-1zm-4-3c1.1045695 0 2 .8954305 2 2v9c0 1.1045695-.8954305 2-2 2h-8c-1.1045695 0-2-.8954305-2-2v-9c0-1.1045695.8954305-2 2-2zm0 1h-8c-.51283584 0-.93550716.38604019-.99327227.88337887l-.00672773.11662113v9c0 .5128358.38604019.9355072.88337887.9932723l.11662113.0067277h8c.5128358 0 .9355072-.3860402.9932723-.8833789l.0067277-.1166211v-9c0-.51283584-.3860402-.93550716-.8833789-.99327227zm-1.5 7c.27614237 0 .5.22385763.5.5s-.22385763.5-.5.5h-5c-.27614237 0-.5-.22385763-.5-.5s.22385763-.5.5-.5zm0-2c.27614237 0 .5.22385763.5.5s-.22385763.5-.5.5h-5c-.27614237 0-.5-.22385763-.5-.5s.22385763-.5.5-.5zm0-2c.27614237 0 .5.22385763.5.5s-.22385763.5-.5.5h-5c-.27614237 0-.5-.22385763-.5-.5s.22385763-.5.5-.5z" fill-rule="evenodd"/></symbol><symbol id="icon-compare" viewBox="0 0 18 18"><path d="m12 3c3.3137085 0 6 2.6862915 6 6s-2.6862915 6-6 6c-1.0928452 0-2.11744941-.2921742-2.99996061-.8026704-.88181407.5102749-1.90678042.8026704-3.00003939.8026704-3.3137085 0-6-2.6862915-6-6s2.6862915-6 6-6c1.09325897 0 2.11822532.29239547 3.00096303.80325037.88158756-.51107621 1.90619177-.80325037 2.99903697-.80325037zm-6 1c-2.76142375 0-5 2.23857625-5 5 0 2.7614237 2.23857625 5 5 5 .74397391 0 1.44999672-.162488 2.08451611-.4539116-1.27652344-1.1000812-2.08451611-2.7287264-2.08451611-4.5460884s.80799267-3.44600721 2.08434391-4.5463015c-.63434719-.29121054-1.34037-.4536985-2.08434391-.4536985zm6 0c-.7439739 0-1.4499967.16248796-2.08451611.45391156 1.27652341 1.10008123 2.08451611 2.72872644 2.08451611 4.54608844s-.8079927 3.4460072-2.08434391 4.5463015c.63434721.2912105 1.34037001.4536985 2.08434391.4536985 2.7614237 0 5-2.2385763 5-5 0-2.76142375-2.2385763-5-5-5zm-1.4162763 7.0005324h-3.16744736c.15614659.3572676.35283837.6927622.58425872 1.0006671h1.99892988c.23142036-.3079049.42811216-.6433995.58425876-1.0006671zm.4162763-2.0005324h-4c0 .34288501.0345146.67770871.10025909 1.0011864h3.79948181c.0657445-.32347769.1002591-.65830139.1002591-1.0011864zm-.4158423-1.99953894h-3.16831543c-.13859957.31730812-.24521946.651783-.31578599.99935097h3.79988742c-.0705665-.34756797-.1771864-.68204285-.315786-.99935097zm-1.58295822-1.999926-.08316107.06199199c-.34550042.27081213-.65446126.58611297-.91825862.93727862h2.00044041c-.28418626-.37830727-.6207872-.71499149-.99902072-.99927061z" fill-rule="evenodd"/></symbol><symbol id="icon-download-file" viewBox="0 0 18 18"><path d="m10.0046024 0c.5497429 0 1.3179837.32258606 1.707238.71184039l4.5763192 4.57631922c.3931386.39313859.7118404 1.16760135.7118404 1.71431368v8.98899651c0 1.1092806-.8945138 2.0085302-1.9940603 2.0085302h-12.01187942c-1.10128908 0-1.99406028-.8926228-1.99406028-1.9950893v-14.00982141c0-1.10185739.88743329-1.99508929 1.99961498-1.99508929zm0 1h-7.00498742c-.55709576 0-.99961498.44271433-.99961498.99508929v14.00982141c0 .5500396.44491393.9950893.99406028.9950893h12.01187942c.5463747 0 .9940603-.4506622.9940603-1.0085302v-8.98899651c0-.28393444-.2150684-.80332809-.4189472-1.0072069l-4.5763192-4.57631922c-.2038461-.20384606-.718603-.41894717-1.0001312-.41894717zm-1.5046024 4c.27614237 0 .5.21637201.5.49209595v6.14827645l1.7462789-1.77990922c.1933927-.1971171.5125222-.19455839.7001689-.0069117.1932998.19329992.1910058.50899492-.0027774.70277812l-2.59089271 2.5908927c-.19483374.1948337-.51177825.1937771-.70556873-.0000133l-2.59099079-2.5909908c-.19484111-.1948411-.19043735-.5151448-.00279066-.70279146.19329987-.19329987.50465175-.19237083.70018565.00692852l1.74638684 1.78001764v-6.14827695c0-.27177709.23193359-.49209595.5-.49209595z" fill-rule="evenodd"/></symbol><symbol id="icon-download" viewBox="0 0 16 16"><path d="m12.9975267 12.999368c.5467123 0 1.0024733.4478567 1.0024733 1.000316 0 .5563109-.4488226 1.000316-1.0024733 1.000316h-9.99505341c-.54671233 0-1.00247329-.4478567-1.00247329-1.000316 0-.5563109.44882258-1.000316 1.00247329-1.000316zm-4.9975267-11.999368c.55228475 0 1 .44497754 1 .99589209v6.80214418l2.4816273-2.48241149c.3928222-.39294628 1.0219732-.4006883 1.4030652-.01947579.3911302.39125371.3914806 1.02525073-.0001404 1.41699553l-4.17620792 4.17752758c-.39120769.3913313-1.02508144.3917306-1.41671995-.0000316l-4.17639421-4.17771394c-.39122513-.39134876-.39767006-1.01940351-.01657797-1.40061601.39113012-.39125372 1.02337105-.3931606 1.41951349.00310701l2.48183446 2.48261871v-6.80214418c0-.55001601.44386482-.99589209 1-.99589209z" fill-rule="evenodd"/></symbol><symbol id="icon-editors" viewBox="0 0 18 18"><path d="m8.72592184 2.54588137c-.48811714-.34391207-1.08343326-.54588137-1.72592184-.54588137-1.65685425 0-3 1.34314575-3 3 0 1.02947485.5215457 1.96853646 1.3698342 2.51900785l.6301658.40892721v1.02400182l-.79002171.32905522c-1.93395773.8055207-3.20997829 2.7024791-3.20997829 4.8180274v.9009805h-1v-.9009805c0-2.5479714 1.54557359-4.79153984 3.82548288-5.7411543-1.09870406-.71297106-1.82548288-1.95054399-1.82548288-3.3578652 0-2.209139 1.790861-4 4-4 1.09079823 0 2.07961816.43662103 2.80122451 1.1446278-.37707584.09278571-.7373238.22835063-1.07530267.40125357zm-2.72592184 14.45411863h-1v-.9009805c0-2.5479714 1.54557359-4.7915398 3.82548288-5.7411543-1.09870406-.71297106-1.82548288-1.95054399-1.82548288-3.3578652 0-2.209139 1.790861-4 4-4s4 1.790861 4 4c0 1.40732121-.7267788 2.64489414-1.8254829 3.3578652 2.2799093.9496145 3.8254829 3.1931829 3.8254829 5.7411543v.9009805h-1v-.9009805c0-2.1155483-1.2760206-4.0125067-3.2099783-4.8180274l-.7900217-.3290552v-1.02400184l.6301658-.40892721c.8482885-.55047139 1.3698342-1.489533 1.3698342-2.51900785 0-1.65685425-1.3431458-3-3-3-1.65685425 0-3 1.34314575-3 3 0 1.02947485.5215457 1.96853646 1.3698342 2.51900785l.6301658.40892721v1.02400184l-.79002171.3290552c-1.93395773.8055207-3.20997829 2.7024791-3.20997829 4.8180274z" fill-rule="evenodd"/></symbol><symbol id="icon-email" viewBox="0 0 18 18"><path d="m16.0049107 2c1.1018574 0 1.9950893.89706013 1.9950893 2.00585866v9.98828264c0 1.1078052-.8926228 2.0058587-1.9950893 2.0058587h-14.00982141c-1.10185739 0-1.99508929-.8970601-1.99508929-2.0058587v-9.98828264c0-1.10780515.8926228-2.00585866 1.99508929-2.00585866zm0 1h-14.00982141c-.54871518 0-.99508929.44887827-.99508929 1.00585866v9.98828264c0 .5572961.44630695 1.0058587.99508929 1.0058587h14.00982141c.5487152 0 .9950893-.4488783.9950893-1.0058587v-9.98828264c0-.55729607-.446307-1.00585866-.9950893-1.00585866zm-.0049107 2.55749512v1.44250488l-7 4-7-4v-1.44250488l7 4z" fill-rule="evenodd"/></symbol><symbol id="icon-error" viewBox="0 0 18 18"><path d="m9 0c4.9705627 0 9 4.02943725 9 9 0 4.9705627-4.0294373 9-9 9-4.97056275 0-9-4.0294373-9-9 0-4.97056275 4.02943725-9 9-9zm2.8630343 4.71100931-2.8630343 2.86303426-2.86303426-2.86303426c-.39658757-.39658757-1.03281091-.39438847-1.4265779-.00062147-.39651227.39651226-.39348876 1.03246767.00062147 1.4265779l2.86303426 2.86303426-2.86303426 2.8630343c-.39658757.3965875-.39438847 1.0328109-.00062147 1.4265779.39651226.3965122 1.03246767.3934887 1.4265779-.0006215l2.86303426-2.8630343 2.8630343 2.8630343c.3965875.3965876 1.0328109.3943885 1.4265779.0006215.3965122-.3965123.3934887-1.0324677-.0006215-1.4265779l-2.8630343-2.8630343 2.8630343-2.86303426c.3965876-.39658757.3943885-1.03281091.0006215-1.4265779-.3965123-.39651227-1.0324677-.39348876-1.4265779.00062147z" fill-rule="evenodd"/></symbol><symbol id="icon-ethics" viewBox="0 0 18 18"><path d="m6.76384967 1.41421356.83301651-.8330165c.77492941-.77492941 2.03133823-.77492941 2.80626762 0l.8330165.8330165c.3750728.37507276.8837806.58578644 1.4142136.58578644h1.3496361c1.1045695 0 2 .8954305 2 2v1.34963611c0 .53043298.2107137 1.03914081.5857864 1.41421356l.8330165.83301651c.7749295.77492941.7749295 2.03133823 0 2.80626762l-.8330165.8330165c-.3750727.3750728-.5857864.8837806-.5857864 1.4142136v1.3496361c0 1.1045695-.8954305 2-2 2h-1.3496361c-.530433 0-1.0391408.2107137-1.4142136.5857864l-.8330165.8330165c-.77492939.7749295-2.03133821.7749295-2.80626762 0l-.83301651-.8330165c-.37507275-.3750727-.88378058-.5857864-1.41421356-.5857864h-1.34963611c-1.1045695 0-2-.8954305-2-2v-1.3496361c0-.530433-.21071368-1.0391408-.58578644-1.4142136l-.8330165-.8330165c-.77492941-.77492939-.77492941-2.03133821 0-2.80626762l.8330165-.83301651c.37507276-.37507275.58578644-.88378058.58578644-1.41421356v-1.34963611c0-1.1045695.8954305-2 2-2h1.34963611c.53043298 0 1.03914081-.21071368 1.41421356-.58578644zm-1.41421356 1.58578644h-1.34963611c-.55228475 0-1 .44771525-1 1v1.34963611c0 .79564947-.31607052 1.55871121-.87867966 2.12132034l-.8330165.83301651c-.38440512.38440512-.38440512 1.00764896 0 1.39205408l.8330165.83301646c.56260914.5626092.87867966 1.3256709.87867966 2.1213204v1.3496361c0 .5522847.44771525 1 1 1h1.34963611c.79564947 0 1.55871121.3160705 2.12132034.8786797l.83301651.8330165c.38440512.3844051 1.00764896.3844051 1.39205408 0l.83301646-.8330165c.5626092-.5626092 1.3256709-.8786797 2.1213204-.8786797h1.3496361c.5522847 0 1-.4477153 1-1v-1.3496361c0-.7956495.3160705-1.5587112.8786797-2.1213204l.8330165-.83301646c.3844051-.38440512.3844051-1.00764896 0-1.39205408l-.8330165-.83301651c-.5626092-.56260913-.8786797-1.32567087-.8786797-2.12132034v-1.34963611c0-.55228475-.4477153-1-1-1h-1.3496361c-.7956495 0-1.5587112-.31607052-2.1213204-.87867966l-.83301646-.8330165c-.38440512-.38440512-1.00764896-.38440512-1.39205408 0l-.83301651.8330165c-.56260913.56260914-1.32567087.87867966-2.12132034.87867966zm3.58698944 11.4960218c-.02081224.002155-.04199226.0030286-.06345763.002542-.98766446-.0223875-1.93408568-.3063547-2.75885125-.8155622-.23496767-.1450683-.30784554-.4531483-.16277726-.688116.14506827-.2349677.45314827-.3078455.68811595-.1627773.67447084.4164161 1.44758575.6483839 2.25617384.6667123.01759529.0003988.03495764.0017019.05204365.0038639.01713363-.0017748.03452416-.0026845.05212715-.0026845 2.4852814 0 4.5-2.0147186 4.5-4.5 0-1.04888973-.3593547-2.04134635-1.0074477-2.83787157-.1742817-.21419731-.1419238-.5291218.0722736-.70340353.2141973-.17428173.5291218-.14192375.7034035.07227357.7919032.97327203 1.2317706 2.18808682 1.2317706 3.46900153 0 3.0375661-2.4624339 5.5-5.5 5.5-.02146768 0-.04261937-.0013529-.06337445-.0039782zm1.57975095-10.78419583c.2654788.07599731.419084.35281842.3430867.61829728-.0759973.26547885-.3528185.419084-.6182973.3430867-.37560116-.10752146-.76586237-.16587951-1.15568824-.17249193-2.5587807-.00064534-4.58547766 2.00216524-4.58547766 4.49928198 0 .62691557.12797645 1.23496.37274865 1.7964426.11035133.2531347-.0053975.5477984-.25853224.6581497-.25313473.1103514-.54779841-.0053975-.65814974-.2585322-.29947131-.6869568-.45606667-1.43097603-.45606667-2.1960601 0-3.05211432 2.47714695-5.50006595 5.59399617-5.49921198.48576182.00815502.96289603.0795037 1.42238033.21103795zm-1.9766658 6.41091303 2.69835-2.94655317c.1788432-.21040373.4943901-.23598862.7047939-.05714545.2104037.17884318.2359886.49439014.0571454.70479387l-3.01637681 3.34277395c-.18039088.1999106-.48669547.2210637-.69285412.0478478l-1.93095347-1.62240047c-.21213845-.17678204-.24080048-.49206439-.06401844-.70420284.17678204-.21213844.49206439-.24080048.70420284-.06401844z" fill-rule="evenodd"/></symbol><symbol id="icon-expand"><path d="M7.498 11.918a.997.997 0 0 0-.003-1.411.995.995 0 0 0-1.412-.003l-4.102 4.102v-3.51A1 1 0 0 0 .98 10.09.992.992 0 0 0 0 11.092V17c0 .554.448 1.002 1.002 1.002h5.907c.554 0 1.002-.45 1.002-1.003 0-.539-.45-.978-1.006-.978h-3.51zm3.005-5.835a.997.997 0 0 0 .003 1.412.995.995 0 0 0 1.411.003l4.103-4.103v3.51a1 1 0 0 0 1.001 1.006A.992.992 0 0 0 18 6.91V1.002A1 1 0 0 0 17 0h-5.907a1.003 1.003 0 0 0-1.002 1.003c0 .539.45.978 1.006.978h3.51z" fill-rule="evenodd"/></symbol><symbol id="icon-explore" viewBox="0 0 18 18"><path d="m9 17c4.418278 0 8-3.581722 8-8s-3.581722-8-8-8-8 3.581722-8 8 3.581722 8 8 8zm0 1c-4.97056275 0-9-4.0294373-9-9 0-4.97056275 4.02943725-9 9-9 4.9705627 0 9 4.02943725 9 9 0 4.9705627-4.0294373 9-9 9zm0-2.5c-.27614237 0-.5-.2238576-.5-.5s.22385763-.5.5-.5c2.969509 0 5.400504-2.3575119 5.497023-5.31714844.0090007-.27599565.2400359-.49243782.5160315-.48343711.2759957.0090007.4924378.2400359.4834371.51603155-.114093 3.4985237-2.9869632 6.284554-6.4964916 6.284554zm-.29090657-12.99359748c.27587424-.01216621.50937715.20161139.52154336.47748563.01216621.27587423-.20161139.50937715-.47748563.52154336-2.93195733.12930094-5.25315116 2.54886451-5.25315116 5.49456849 0 .27614237-.22385763.5-.5.5s-.5-.22385763-.5-.5c0-3.48142406 2.74307146-6.34074398 6.20909343-6.49359748zm1.13784138 8.04763908-1.2004882-1.20048821c-.19526215-.19526215-.19526215-.51184463 0-.70710678s.51184463-.19526215.70710678 0l1.20048821 1.2004882 1.6006509-4.00162734-4.50670359 1.80268144-1.80268144 4.50670359zm4.10281269-6.50378907-2.6692597 6.67314927c-.1016411.2541026-.3029834.4554449-.557086.557086l-6.67314927 2.6692597 2.66925969-6.67314926c.10164107-.25410266.30298336-.45544495.55708602-.55708602z" fill-rule="evenodd"/></symbol><symbol id="icon-filter" viewBox="0 0 16 16"><path d="m14.9738641 0c.5667192 0 1.0261359.4477136 1.0261359 1 0 .24221858-.0902161.47620768-.2538899.65849851l-5.6938314 6.34147206v5.49997973c0 .3147562-.1520673.6111434-.4104543.7999971l-2.05227171 1.4999945c-.45337535.3313696-1.09655869.2418269-1.4365902-.1999993-.13321514-.1730955-.20522717-.3836284-.20522717-.5999978v-6.99997423l-5.69383133-6.34147206c-.3731872-.41563511-.32996891-1.0473954.09653074-1.41107611.18705584-.15950448.42716133-.2474224.67571519-.2474224zm-5.9218641 8.5h-2.105v6.491l.01238459.0070843.02053271.0015705.01955278-.0070558 2.0532976-1.4990996zm-8.02585008-7.5-.01564945.00240169 5.83249953 6.49759831h2.313l5.836-6.499z"/></symbol><symbol id="icon-home" viewBox="0 0 18 18"><path d="m9 5-6 6v5h4v-4h4v4h4v-5zm7 6.5857864v4.4142136c0 .5522847-.4477153 1-1 1h-5v-4h-2v4h-5c-.55228475 0-1-.4477153-1-1v-4.4142136c-.25592232 0-.51184464-.097631-.70710678-.2928932l-.58578644-.5857864c-.39052429-.3905243-.39052429-1.02368929 0-1.41421358l8.29289322-8.29289322 8.2928932 8.29289322c.3905243.39052429.3905243 1.02368928 0 1.41421358l-.5857864.5857864c-.1952622.1952622-.4511845.2928932-.7071068.2928932zm-7-9.17157284-7.58578644 7.58578644.58578644.5857864 7-6.99999996 7 6.99999996.5857864-.5857864z" fill-rule="evenodd"/></symbol><symbol id="icon-image" viewBox="0 0 18 18"><path d="m10.0046024 0c.5497429 0 1.3179837.32258606 1.707238.71184039l4.5763192 4.57631922c.3931386.39313859.7118404 1.16760135.7118404 1.71431368v8.98899651c0 1.1092806-.8945138 2.0085302-1.9940603 2.0085302h-12.01187942c-1.10128908 0-1.99406028-.8926228-1.99406028-1.9950893v-14.00982141c0-1.10185739.88743329-1.99508929 1.99961498-1.99508929zm-3.49645283 10.1752453-3.89407257 6.7495552c.11705545.048464.24538859.0751995.37998328.0751995h10.60290092l-2.4329715-4.2154691-1.57494129 2.7288098zm8.49779013 6.8247547c.5463747 0 .9940603-.4506622.9940603-1.0085302v-8.98899651c0-.28393444-.2150684-.80332809-.4189472-1.0072069l-4.5763192-4.57631922c-.2038461-.20384606-.718603-.41894717-1.0001312-.41894717h-7.00498742c-.55709576 0-.99961498.44271433-.99961498.99508929v13.98991071l4.50814957-7.81026689 3.08089884 5.33809539 1.57494129-2.7288097 3.5875735 6.2159812zm-3.0059397-11c1.1045695 0 2 .8954305 2 2s-.8954305 2-2 2-2-.8954305-2-2 .8954305-2 2-2zm0 1c-.5522847 0-1 .44771525-1 1s.4477153 1 1 1 1-.44771525 1-1-.4477153-1-1-1z" fill-rule="evenodd"/></symbol><symbol id="icon-info" viewBox="0 0 18 18"><path d="m9 0c4.9705627 0 9 4.02943725 9 9 0 4.9705627-4.0294373 9-9 9-4.97056275 0-9-4.0294373-9-9 0-4.97056275 4.02943725-9 9-9zm0 7h-1.5l-.11662113.00672773c-.49733868.05776511-.88337887.48043643-.88337887.99327227 0 .47338693.32893365.86994729.77070917.97358929l.1126697.01968298.11662113.00672773h.5v3h-.5l-.11662113.0067277c-.42082504.0488782-.76196299.3590206-.85696816.7639815l-.01968298.1126697-.00672773.1166211.00672773.1166211c.04887817.4208251.35902055.761963.76398144.8569682l.1126697.019683.11662113.0067277h3l.1166211-.0067277c.4973387-.0577651.8833789-.4804365.8833789-.9932723 0-.4733869-.3289337-.8699473-.7707092-.9735893l-.1126697-.019683-.1166211-.0067277h-.5v-4l-.00672773-.11662113c-.04887817-.42082504-.35902055-.76196299-.76398144-.85696816l-.1126697-.01968298zm0-3.25c-.69035594 0-1.25.55964406-1.25 1.25s.55964406 1.25 1.25 1.25 1.25-.55964406 1.25-1.25-.55964406-1.25-1.25-1.25z" fill-rule="evenodd"/></symbol><symbol id="icon-institution" viewBox="0 0 18 18"><path d="m7 16.9998189v-2.0003623h4v2.0003623h2v-3.0005434h-8v3.0005434zm-3-10.00181122h-1.52632364c-.27614237 0-.5-.22389817-.5-.50009056 0-.13995446.05863589-.27350497.16166338-.36820841l1.23156713-1.13206327h-2.36690687v12.00217346h3v-2.0003623h-3v-1.0001811h3v-1.0001811h1v-4.00072448h-1zm10 0v2.00036224h-1v4.00072448h1v1.0001811h3v1.0001811h-3v2.0003623h3v-12.00217346h-2.3695309l1.2315671 1.13206327c.2033191.186892.2166633.50325042.0298051.70660631-.0946863.10304615-.2282126.16169266-.3681417.16169266zm3-3.00054336c.5522847 0 1 .44779634 1 1.00018112v13.00235456h-18v-13.00235456c0-.55238478.44771525-1.00018112 1-1.00018112h3.45499992l4.20535144-3.86558216c.19129876-.17584288.48537447-.17584288.67667324 0l4.2053514 3.86558216zm-4 3.00054336h-8v1.00018112h8zm-2 6.00108672h1v-4.00072448h-1zm-1 0v-4.00072448h-2v4.00072448zm-3 0v-4.00072448h-1v4.00072448zm8-4.00072448c.5522847 0 1 .44779634 1 1.00018112v2.00036226h-2v-2.00036226c0-.55238478.4477153-1.00018112 1-1.00018112zm-12 0c.55228475 0 1 .44779634 1 1.00018112v2.00036226h-2v-2.00036226c0-.55238478.44771525-1.00018112 1-1.00018112zm5.99868798-7.81907007-5.24205601 4.81852671h10.48411203zm.00131202 3.81834559c-.55228475 0-1-.44779634-1-1.00018112s.44771525-1.00018112 1-1.00018112 1 .44779634 1 1.00018112-.44771525 1.00018112-1 1.00018112zm-1 11.00199236v1.0001811h2v-1.0001811z" fill-rule="evenodd"/></symbol><symbol id="icon-location" viewBox="0 0 18 18"><path d="m9.39521328 16.2688008c.79596342-.7770119 1.59208152-1.6299956 2.33285652-2.5295081 1.4020032-1.7024324 2.4323601-3.3624519 2.9354918-4.871847.2228715-.66861448.3364384-1.29323246.3364384-1.8674457 0-3.3137085-2.6862915-6-6-6-3.36356866 0-6 2.60156856-6 6 0 .57421324.11356691 1.19883122.3364384 1.8674457.50313169 1.5093951 1.53348863 3.1694146 2.93549184 4.871847.74077492.8995125 1.53689309 1.7524962 2.33285648 2.5295081.13694479.1336842.26895677.2602648.39521328.3793207.12625651-.1190559.25826849-.2456365.39521328-.3793207zm-.39521328 1.7311992s-7-6-7-11c0-4 3.13400675-7 7-7 3.8659932 0 7 3.13400675 7 7 0 5-7 11-7 11zm0-8c-1.65685425 0-3-1.34314575-3-3s1.34314575-3 3-3c1.6568542 0 3 1.34314575 3 3s-1.3431458 3-3 3zm0-1c1.1045695 0 2-.8954305 2-2s-.8954305-2-2-2-2 .8954305-2 2 .8954305 2 2 2z" fill-rule="evenodd"/></symbol><symbol id="icon-minus" viewBox="0 0 16 16"><path d="m2.00087166 7h11.99825664c.5527662 0 1.0008717.44386482 1.0008717 1 0 .55228475-.4446309 1-1.0008717 1h-11.99825664c-.55276616 0-1.00087166-.44386482-1.00087166-1 0-.55228475.44463086-1 1.00087166-1z" fill-rule="evenodd"/></symbol><symbol id="icon-newsletter" viewBox="0 0 18 18"><path d="m9 11.8482489 2-1.1428571v-1.7053918h-4v1.7053918zm-3-1.7142857v-2.1339632h6v2.1339632l3-1.71428574v-6.41967746h-12v6.41967746zm10-5.3839632 1.5299989.95624934c.2923814.18273835.4700011.50320827.4700011.8479983v8.44575236c0 1.1045695-.8954305 2-2 2h-14c-1.1045695 0-2-.8954305-2-2v-8.44575236c0-.34479003.1776197-.66525995.47000106-.8479983l1.52999894-.95624934v-2.75c0-.55228475.44771525-1 1-1h12c.5522847 0 1 .44771525 1 1zm0 1.17924764v3.07075236l-7 4-7-4v-3.07075236l-1 .625v8.44575236c0 .5522847.44771525 1 1 1h14c.5522847 0 1-.4477153 1-1v-8.44575236zm-10-1.92924764h6v1h-6zm-1 2h8v1h-8z" fill-rule="evenodd"/></symbol><symbol id="icon-orcid" viewBox="0 0 18 18"><path d="m9 1c4.418278 0 8 3.581722 8 8s-3.581722 8-8 8-8-3.581722-8-8 3.581722-8 8-8zm-2.90107518 5.2732337h-1.41865256v7.1712107h1.41865256zm4.55867178.02508949h-2.99247027v7.14612121h2.91062487c.7673039 0 1.4476365-.1483432 2.0410182-.445034s1.0511995-.7152915 1.3734671-1.2558144c.3222677-.540523.4833991-1.1603247.4833991-1.85942385 0-.68545815-.1602789-1.30270225-.4808414-1.85175082-.3205625-.54904856-.7707074-.97532211-1.3504481-1.27883343-.5797408-.30351132-1.2413173-.45526471-1.9847495-.45526471zm-.1892674 1.07933542c.7877654 0 1.4143875.22336734 1.8798852.67010873.4654977.44674138.698243 1.05546001.698243 1.82617415 0 .74343221-.2310402 1.34447791-.6931277 1.80315511-.4620874.4586773-1.0750688.6880124-1.8389625.6880124h-1.46810075v-4.98745039zm-5.08652545-3.71099194c-.21825533 0-.410525.08444276-.57681478.25333081-.16628977.16888806-.24943341.36245684-.24943341.58071218 0 .22345188.08314364.41961891.24943341.58850696.16628978.16888806.35855945.25333082.57681478.25333082.233845 0 .43390938-.08314364.60019916-.24943342.16628978-.16628977.24943342-.36375592.24943342-.59240436 0-.233845-.08314364-.43131115-.24943342-.59240437s-.36635416-.24163862-.60019916-.24163862z" fill-rule="evenodd"/></symbol><symbol id="icon-plus" viewBox="0 0 16 16"><path d="m2.00087166 7h4.99912834v-4.99912834c0-.55276616.44386482-1.00087166 1-1.00087166.55228475 0 1 .44463086 1 1.00087166v4.99912834h4.9991283c.5527662 0 1.0008717.44386482 1.0008717 1 0 .55228475-.4446309 1-1.0008717 1h-4.9991283v4.9991283c0 .5527662-.44386482 1.0008717-1 1.0008717-.55228475 0-1-.4446309-1-1.0008717v-4.9991283h-4.99912834c-.55276616 0-1.00087166-.44386482-1.00087166-1 0-.55228475.44463086-1 1.00087166-1z" fill-rule="evenodd"/></symbol><symbol id="icon-print" viewBox="0 0 18 18"><path d="m16.0049107 5h-14.00982141c-.54941618 0-.99508929.4467783-.99508929.99961498v6.00077002c0 .5570958.44271433.999615.99508929.999615h1.00491071v-3h12v3h1.0049107c.5494162 0 .9950893-.4467783.9950893-.999615v-6.00077002c0-.55709576-.4427143-.99961498-.9950893-.99961498zm-2.0049107-1v-2.00208688c0-.54777062-.4519464-.99791312-1.0085302-.99791312h-7.9829396c-.55661731 0-1.0085302.44910695-1.0085302.99791312v2.00208688zm1 10v2.0018986c0 1.103521-.9019504 1.9981014-2.0085302 1.9981014h-7.9829396c-1.1092806 0-2.0085302-.8867064-2.0085302-1.9981014v-2.0018986h-1.00491071c-1.10185739 0-1.99508929-.8874333-1.99508929-1.999615v-6.00077002c0-1.10435686.8926228-1.99961498 1.99508929-1.99961498h1.00491071v-2.00208688c0-1.10341695.90195036-1.99791312 2.0085302-1.99791312h7.9829396c1.1092806 0 2.0085302.89826062 2.0085302 1.99791312v2.00208688h1.0049107c1.1018574 0 1.9950893.88743329 1.9950893 1.99961498v6.00077002c0 1.1043569-.8926228 1.999615-1.9950893 1.999615zm-1-3h-10v5.0018986c0 .5546075.44702548.9981014 1.0085302.9981014h7.9829396c.5565964 0 1.0085302-.4491701 1.0085302-.9981014zm-9 1h8v1h-8zm0 2h5v1h-5zm9-5c-.5522847 0-1-.44771525-1-1s.4477153-1 1-1 1 .44771525 1 1-.4477153 1-1 1z" fill-rule="evenodd"/></symbol><symbol id="icon-search" viewBox="0 0 22 22"><path d="M21.697 20.261a1.028 1.028 0 01.01 1.448 1.034 1.034 0 01-1.448-.01l-4.267-4.267A9.812 9.811 0 010 9.812a9.812 9.811 0 1117.43 6.182zM9.812 18.222A8.41 8.41 0 109.81 1.403a8.41 8.41 0 000 16.82z" fill-rule="evenodd"/></symbol><symbol id="icon-social-facebook" viewBox="0 0 24 24"><path d="m6.00368507 20c-1.10660471 0-2.00368507-.8945138-2.00368507-1.9940603v-12.01187942c0-1.10128908.89451376-1.99406028 1.99406028-1.99406028h12.01187942c1.1012891 0 1.9940603.89451376 1.9940603 1.99406028v12.01187942c0 1.1012891-.88679 1.9940603-2.0032184 1.9940603h-2.9570132v-6.1960818h2.0797387l.3114113-2.414723h-2.39115v-1.54164807c0-.69911803.1941355-1.1755439 1.1966615-1.1755439l1.2786739-.00055875v-2.15974763l-.2339477-.02492088c-.3441234-.03134957-.9500153-.07025255-1.6293054-.07025255-1.8435726 0-3.1057323 1.12531866-3.1057323 3.19187953v1.78079225h-2.0850778v2.414723h2.0850778v6.1960818z" fill-rule="evenodd"/></symbol><symbol id="icon-social-twitter" viewBox="0 0 24 24"><path d="m18.8767135 6.87445248c.7638174-.46908424 1.351611-1.21167363 1.6250764-2.09636345-.7135248.43394112-1.50406.74870123-2.3464594.91677702-.6695189-.73342162-1.6297913-1.19486605-2.6922204-1.19486605-2.0399895 0-3.6933555 1.69603749-3.6933555 3.78628909 0 .29642457.0314329.58673729.0942985.8617704-3.06469922-.15890802-5.78835241-1.66547825-7.60988389-3.9574208-.3174714.56076194-.49978171 1.21167363-.49978171 1.90536824 0 1.31404706.65223085 2.47224203 1.64236444 3.15218497-.60350999-.0198635-1.17401554-.1925232-1.67222562-.47366811v.04583885c0 1.83355406 1.27302891 3.36609966 2.96411421 3.71294696-.31118484.0886217-.63651445.1329326-.97441718.1329326-.2357461 0-.47149219-.0229194-.69466516-.0672303.47149219 1.5065703 1.83253297 2.6036468 3.44975116 2.632678-1.2651707 1.0160946-2.85724264 1.6196394-4.5891906 1.6196394-.29861172 0-.59093688-.0152796-.88011875-.0504227 1.63450624 1.0726291 3.57548241 1.6990934 5.66104951 1.6990934 6.79263079 0 10.50641749-5.7711113 10.50641749-10.7751859l-.0094298-.48894775c.7229547-.53478659 1.3516109-1.20250585 1.8419628-1.96190282-.6632323.30100846-1.3751855.50422736-2.1217148.59590507z" fill-rule="evenodd"/></symbol><symbol id="icon-social-youtube" viewBox="0 0 24 24"><path d="m10.1415 14.3973208-.0005625-5.19318431 4.863375 2.60554491zm9.963-7.92753362c-.6845625-.73643756-1.4518125-.73990314-1.803375-.7826454-2.518875-.18714178-6.2971875-.18714178-6.2971875-.18714178-.007875 0-3.7861875 0-6.3050625.18714178-.352125.04274226-1.1188125.04620784-1.8039375.7826454-.5394375.56084773-.7149375 1.8344515-.7149375 1.8344515s-.18 1.49597903-.18 2.99138042v1.4024082c0 1.495979.18 2.9913804.18 2.9913804s.1755 1.2736038.7149375 1.8344515c.685125.7364376 1.5845625.7133337 1.9850625.7901542 1.44.1420891 6.12.1859866 6.12.1859866s3.78225-.005776 6.301125-.1929178c.3515625-.0433198 1.1188125-.0467854 1.803375-.783223.5394375-.5608477.7155-1.8344515.7155-1.8344515s.18-1.4954014.18-2.9913804v-1.4024082c0-1.49540139-.18-2.99138042-.18-2.99138042s-.1760625-1.27360377-.7155-1.8344515z" fill-rule="evenodd"/></symbol><symbol id="icon-subject-medicine" viewBox="0 0 18 18"><path d="m12.5 8h-6.5c-1.65685425 0-3 1.34314575-3 3v1c0 1.6568542 1.34314575 3 3 3h1v-2h-.5c-.82842712 0-1.5-.6715729-1.5-1.5s.67157288-1.5 1.5-1.5h1.5 2 1 2c1.6568542 0 3-1.34314575 3-3v-1c0-1.65685425-1.3431458-3-3-3h-2v2h1.5c.8284271 0 1.5.67157288 1.5 1.5s-.6715729 1.5-1.5 1.5zm-5.5-1v-1h-3.5c-1.38071187 0-2.5-1.11928813-2.5-2.5s1.11928813-2.5 2.5-2.5h1.02786405c.46573528 0 .92507448.10843528 1.34164078.31671843l1.13382424.56691212c.06026365-1.05041141.93116291-1.88363055 1.99667093-1.88363055 1.1045695 0 2 .8954305 2 2h2c2.209139 0 4 1.790861 4 4v1c0 2.209139-1.790861 4-4 4h-2v1h2c1.1045695 0 2 .8954305 2 2s-.8954305 2-2 2h-2c0 1.1045695-.8954305 2-2 2s-2-.8954305-2-2h-1c-2.209139 0-4-1.790861-4-4v-1c0-2.209139 1.790861-4 4-4zm0-2v-2.05652691c-.14564246-.03538148-.28733393-.08714006-.42229124-.15461871l-1.15541752-.57770876c-.27771087-.13885544-.583937-.21114562-.89442719-.21114562h-1.02786405c-.82842712 0-1.5.67157288-1.5 1.5s.67157288 1.5 1.5 1.5zm4 1v1h1.5c.2761424 0 .5-.22385763.5-.5s-.2238576-.5-.5-.5zm-1 1v-5c0-.55228475-.44771525-1-1-1s-1 .44771525-1 1v5zm-2 4v5c0 .5522847.44771525 1 1 1s1-.4477153 1-1v-5zm3 2v2h2c.5522847 0 1-.4477153 1-1s-.4477153-1-1-1zm-4-1v-1h-.5c-.27614237 0-.5.2238576-.5.5s.22385763.5.5.5zm-3.5-9h1c.27614237 0 .5.22385763.5.5s-.22385763.5-.5.5h-1c-.27614237 0-.5-.22385763-.5-.5s.22385763-.5.5-.5z" fill-rule="evenodd"/></symbol><symbol id="icon-success" viewBox="0 0 18 18"><path d="m9 0c4.9705627 0 9 4.02943725 9 9 0 4.9705627-4.0294373 9-9 9-4.97056275 0-9-4.0294373-9-9 0-4.97056275 4.02943725-9 9-9zm3.4860198 4.98163161-4.71802968 5.50657859-2.62834168-2.02300024c-.42862421-.36730544-1.06564993-.30775346-1.42283677.13301307-.35718685.44076653-.29927542 1.0958383.12934879 1.46314377l3.40735508 2.7323063c.42215801.3385221 1.03700951.2798252 1.38749189-.1324571l5.38450527-6.33394549c.3613513-.43716226.3096573-1.09278382-.115462-1.46437175-.4251192-.37158792-1.0626796-.31842941-1.4240309.11873285z" fill-rule="evenodd"/></symbol><symbol id="icon-table" viewBox="0 0 18 18"><path d="m16.0049107 2c1.1018574 0 1.9950893.89706013 1.9950893 2.00585866v9.98828264c0 1.1078052-.8926228 2.0058587-1.9950893 2.0058587l-4.0059107-.001.001.001h-1l-.001-.001h-5l.001.001h-1l-.001-.001-3.00391071.001c-1.10185739 0-1.99508929-.8970601-1.99508929-2.0058587v-9.98828264c0-1.10780515.8926228-2.00585866 1.99508929-2.00585866zm-11.0059107 5h-3.999v6.9941413c0 .5572961.44630695 1.0058587.99508929 1.0058587h3.00391071zm6 0h-5v8h5zm5.0059107-4h-4.0059107v3h5.001v1h-5.001v7.999l4.0059107.001c.5487152 0 .9950893-.4488783.9950893-1.0058587v-9.98828264c0-.55729607-.446307-1.00585866-.9950893-1.00585866zm-12.5049107 9c.27614237 0 .5.2238576.5.5s-.22385763.5-.5.5h-1c-.27614237 0-.5-.2238576-.5-.5s.22385763-.5.5-.5zm12 0c.2761424 0 .5.2238576.5.5s-.2238576.5-.5.5h-2c-.2761424 0-.5-.2238576-.5-.5s.2238576-.5.5-.5zm-6 0c.27614237 0 .5.2238576.5.5s-.22385763.5-.5.5h-2c-.27614237 0-.5-.2238576-.5-.5s.22385763-.5.5-.5zm-6-2c.27614237 0 .5.2238576.5.5s-.22385763.5-.5.5h-1c-.27614237 0-.5-.2238576-.5-.5s.22385763-.5.5-.5zm12 0c.2761424 0 .5.2238576.5.5s-.2238576.5-.5.5h-2c-.2761424 0-.5-.2238576-.5-.5s.2238576-.5.5-.5zm-6 0c.27614237 0 .5.2238576.5.5s-.22385763.5-.5.5h-2c-.27614237 0-.5-.2238576-.5-.5s.22385763-.5.5-.5zm-6-2c.27614237 0 .5.22385763.5.5s-.22385763.5-.5.5h-1c-.27614237 0-.5-.22385763-.5-.5s.22385763-.5.5-.5zm12 0c.2761424 0 .5.22385763.5.5s-.2238576.5-.5.5h-2c-.2761424 0-.5-.22385763-.5-.5s.2238576-.5.5-.5zm-6 0c.27614237 0 .5.22385763.5.5s-.22385763.5-.5.5h-2c-.27614237 0-.5-.22385763-.5-.5s.22385763-.5.5-.5zm1.499-5h-5v3h5zm-6 0h-3.00391071c-.54871518 0-.99508929.44887827-.99508929 1.00585866v1.99414134h3.999z" fill-rule="evenodd"/></symbol><symbol id="icon-tick-circle" viewBox="0 0 24 24"><path d="m12 2c5.5228475 0 10 4.4771525 10 10s-4.4771525 10-10 10-10-4.4771525-10-10 4.4771525-10 10-10zm0 1c-4.97056275 0-9 4.02943725-9 9 0 4.9705627 4.02943725 9 9 9 4.9705627 0 9-4.0294373 9-9 0-4.97056275-4.0294373-9-9-9zm4.2199868 5.36606669c.3613514-.43716226.9989118-.49032077 1.424031-.11873285s.4768133 1.02720949.115462 1.46437175l-6.093335 6.94397871c-.3622945.4128716-.9897871.4562317-1.4054264.0971157l-3.89719065-3.3672071c-.42862421-.3673054-.48653564-1.0223772-.1293488-1.4631437s.99421256-.5003185 1.42283677-.1330131l3.11097438 2.6987741z" fill-rule="evenodd"/></symbol><symbol id="icon-tick" viewBox="0 0 16 16"><path d="m6.76799012 9.21106946-3.1109744-2.58349728c-.42862421-.35161617-1.06564993-.29460792-1.42283677.12733148s-.29927541 1.04903009.1293488 1.40064626l3.91576307 3.23873978c.41034319.3393961 1.01467563.2976897 1.37450571-.0948578l6.10568327-6.660841c.3613513-.41848908.3096572-1.04610608-.115462-1.4018218-.4251192-.35571573-1.0626796-.30482786-1.424031.11366122z" fill-rule="evenodd"/></symbol><symbol id="icon-update" viewBox="0 0 18 18"><path d="m1 13v1c0 .5522847.44771525 1 1 1h14c.5522847 0 1-.4477153 1-1v-1h-1v-10h-14v10zm16-1h1v2c0 1.1045695-.8954305 2-2 2h-14c-1.1045695 0-2-.8954305-2-2v-2h1v-9c0-.55228475.44771525-1 1-1h14c.5522847 0 1 .44771525 1 1zm-1 0v1h-4.5857864l-1 1h-2.82842716l-1-1h-4.58578644v-1h5l1 1h2l1-1zm-13-8h12v7h-12zm1 1v5h10v-5zm1 1h4v1h-4zm0 2h4v1h-4z" fill-rule="evenodd"/></symbol><symbol id="icon-upload" viewBox="0 0 18 18"><path d="m10.0046024 0c.5497429 0 1.3179837.32258606 1.707238.71184039l4.5763192 4.57631922c.3931386.39313859.7118404 1.16760135.7118404 1.71431368v8.98899651c0 1.1092806-.8945138 2.0085302-1.9940603 2.0085302h-12.01187942c-1.10128908 0-1.99406028-.8926228-1.99406028-1.9950893v-14.00982141c0-1.10185739.88743329-1.99508929 1.99961498-1.99508929zm0 1h-7.00498742c-.55709576 0-.99961498.44271433-.99961498.99508929v14.00982141c0 .5500396.44491393.9950893.99406028.9950893h12.01187942c.5463747 0 .9940603-.4506622.9940603-1.0085302v-8.98899651c0-.28393444-.2150684-.80332809-.4189472-1.0072069l-4.5763192-4.57631922c-.2038461-.20384606-.718603-.41894717-1.0001312-.41894717zm-1.85576936 4.14572769c.19483374-.19483375.51177826-.19377714.70556874.00001334l2.59099082 2.59099079c.1948411.19484112.1904373.51514474.0027906.70279143-.1932998.19329987-.5046517.19237083-.7001856-.00692852l-1.74638687-1.7800176v6.14827687c0 .2717771-.23193359.492096-.5.492096-.27614237 0-.5-.216372-.5-.492096v-6.14827641l-1.74627892 1.77990922c-.1933927.1971171-.51252214.19455839-.70016883.0069117-.19329987-.19329988-.19100584-.50899493.00277731-.70277808z" fill-rule="evenodd"/></symbol><symbol id="icon-video" viewBox="0 0 18 18"><path d="m16.0049107 2c1.1018574 0 1.9950893.89706013 1.9950893 2.00585866v9.98828264c0 1.1078052-.8926228 2.0058587-1.9950893 2.0058587h-14.00982141c-1.10185739 0-1.99508929-.8970601-1.99508929-2.0058587v-9.98828264c0-1.10780515.8926228-2.00585866 1.99508929-2.00585866zm0 1h-14.00982141c-.54871518 0-.99508929.44887827-.99508929 1.00585866v9.98828264c0 .5572961.44630695 1.0058587.99508929 1.0058587h14.00982141c.5487152 0 .9950893-.4488783.9950893-1.0058587v-9.98828264c0-.55729607-.446307-1.00585866-.9950893-1.00585866zm-8.30912922 2.24944486 4.60460462 2.73982242c.9365543.55726659.9290753 1.46522435 0 2.01804082l-4.60460462 2.7398224c-.93655425.5572666-1.69578148.1645632-1.69578148-.8937585v-5.71016863c0-1.05087579.76670616-1.446575 1.69578148-.89375851zm-.67492769.96085624v5.5750128c0 .2995102-.10753745.2442517.16578928.0847713l4.58452283-2.67497259c.3050619-.17799716.3051624-.21655446 0-.39461026l-4.58452283-2.67497264c-.26630747-.15538481-.16578928-.20699944-.16578928.08477139z" fill-rule="evenodd"/></symbol><symbol id="icon-warning" viewBox="0 0 18 18"><path d="m9 11.75c.69035594 0 1.25.5596441 1.25 1.25s-.55964406 1.25-1.25 1.25-1.25-.5596441-1.25-1.25.55964406-1.25 1.25-1.25zm.41320045-7.75c.55228475 0 1.00000005.44771525 1.00000005 1l-.0034543.08304548-.3333333 4c-.043191.51829212-.47645714.91695452-.99654578.91695452h-.15973424c-.52008864 0-.95335475-.3986624-.99654576-.91695452l-.33333333-4c-.04586475-.55037702.36312325-1.03372649.91350028-1.07959124l.04148683-.00259031zm-.41320045 14c-4.97056275 0-9-4.0294373-9-9 0-4.97056275 4.02943725-9 9-9 4.9705627 0 9 4.02943725 9 9 0 4.9705627-4.0294373 9-9 9z" fill-rule="evenodd"/></symbol><symbol id="icon-checklist-banner" viewBox="0 0 56.69 56.69"><path style="fill:none" d="M0 0h56.69v56.69H0z"/><clipPath id="b"><use xlink:href="#a" style="overflow:visible"/></clipPath><path d="M21.14 34.46c0-6.77 5.48-12.26 12.24-12.26s12.24 5.49 12.24 12.26-5.48 12.26-12.24 12.26c-6.76-.01-12.24-5.49-12.24-12.26zm19.33 10.66 10.23 9.22s1.21 1.09 2.3-.12l2.09-2.32s1.09-1.21-.12-2.3l-10.23-9.22m-19.29-5.92c0-4.38 3.55-7.94 7.93-7.94s7.93 3.55 7.93 7.94c0 4.38-3.55 7.94-7.93 7.94-4.38-.01-7.93-3.56-7.93-7.94zm17.58 12.99 4.14-4.81" style="clip-path:url(#b);fill:none;stroke:#01324b;stroke-width:2;stroke-linecap:round"/><path d="M8.26 9.75H28.6M8.26 15.98H28.6m-20.34 6.2h12.5m14.42-5.2V4.86s0-2.93-2.93-2.93H4.13s-2.93 0-2.93 2.93v37.57s0 2.93 2.93 2.93h15.01M8.26 9.75H28.6M8.26 15.98H28.6m-20.34 6.2h12.5" style="clip-path:url(#b);fill:none;stroke:#01324b;stroke-width:2;stroke-linecap:round;stroke-linejoin:round"/></symbol><symbol id="icon-chevron-down" viewBox="0 0 16 16"><path d="m5.58578644 3-3.29289322-3.29289322c-.39052429-.39052429-.39052429-1.02368927 0-1.41421356s1.02368927-.39052429 1.41421356 0l4 4c.39052429.39052429.39052429 1.02368927 0 1.41421356l-4 4c-.39052429.39052429-1.02368927.39052429-1.41421356 0s-.39052429-1.02368927 0-1.41421356z" fill-rule="evenodd" transform="matrix(0 1 -1 0 11 1)"/></symbol><symbol id="icon-eds-i-arrow-right-medium" viewBox="0 0 24 24"><path d="m12.728 3.293 7.98 7.99a.996.996 0 0 1 .281.561l.011.157c0 .32-.15.605-.384.788l-7.908 7.918a1 1 0 0 1-1.416-1.414L17.576 13H4a1 1 0 0 1 0-2h13.598l-6.285-6.293a1 1 0 0 1-.082-1.32l.083-.095a1 1 0 0 1 1.414.001Z"/></symbol><symbol id="icon-eds-i-chevron-down-medium" viewBox="0 0 16 16"><path d="m2.00087166 7h4.99912834v-4.99912834c0-.55276616.44386482-1.00087166 1-1.00087166.55228475 0 1 .44463086 1 1.00087166v4.99912834h4.9991283c.5527662 0 1.0008717.44386482 1.0008717 1 0 .55228475-.4446309 1-1.0008717 1h-4.9991283v4.9991283c0 .5527662-.44386482 1.0008717-1 1.0008717-.55228475 0-1-.4446309-1-1.0008717v-4.9991283h-4.99912834c-.55276616 0-1.00087166-.44386482-1.00087166-1 0-.55228475.44463086-1 1.00087166-1z" fill-rule="evenodd"/></symbol><symbol id="icon-eds-i-chevron-down-small" viewBox="0 0 16 16"><path d="M13.692 5.278a1 1 0 0 1 .03 1.414L9.103 11.51a1.491 1.491 0 0 1-2.188.019L2.278 6.692a1 1 0 0 1 1.444-1.384L8 9.771l4.278-4.463a1 1 0 0 1 1.318-.111l.096.081Z"/></symbol><symbol id="icon-eds-i-chevron-right-medium" viewBox="0 0 10 10"><path d="m5.96738168 4.70639573 2.39518594-2.41447274c.37913917-.38219212.98637524-.38972225 1.35419292-.01894278.37750606.38054586.37784436.99719163-.00013556 1.37821513l-4.03074001 4.06319683c-.37758093.38062133-.98937525.38100976-1.367372-.00003075l-4.03091981-4.06337806c-.37759778-.38063832-.38381821-.99150444-.01600053-1.3622839.37750607-.38054587.98772445-.38240057 1.37006824.00302197l2.39538588 2.4146743.96295325.98624457z" fill-rule="evenodd" transform="matrix(0 -1 1 0 0 10)"/></symbol><symbol id="icon-eds-i-chevron-right-small" viewBox="0 0 10 10"><path d="m5.96738168 4.70639573 2.39518594-2.41447274c.37913917-.38219212.98637524-.38972225 1.35419292-.01894278.37750606.38054586.37784436.99719163-.00013556 1.37821513l-4.03074001 4.06319683c-.37758093.38062133-.98937525.38100976-1.367372-.00003075l-4.03091981-4.06337806c-.37759778-.38063832-.38381821-.99150444-.01600053-1.3622839.37750607-.38054587.98772445-.38240057 1.37006824.00302197l2.39538588 2.4146743.96295325.98624457z" fill-rule="evenodd" transform="matrix(0 -1 1 0 0 10)"/></symbol><symbol id="icon-eds-i-chevron-up-medium" viewBox="0 0 16 16"><path d="m2.00087166 7h11.99825664c.5527662 0 1.0008717.44386482 1.0008717 1 0 .55228475-.4446309 1-1.0008717 1h-11.99825664c-.55276616 0-1.00087166-.44386482-1.00087166-1 0-.55228475.44463086-1 1.00087166-1z" fill-rule="evenodd"/></symbol><symbol id="icon-eds-i-close-medium" viewBox="0 0 16 16"><path d="m2.29679575 12.2772478c-.39658757.3965876-.39438847 1.0328109-.00062148 1.4265779.39651227.3965123 1.03246768.3934888 1.42657791-.0006214l4.27724782-4.27724787 4.2772478 4.27724787c.3965876.3965875 1.0328109.3943884 1.4265779.0006214.3965123-.3965122.3934888-1.0324677-.0006214-1.4265779l-4.27724787-4.2772478 4.27724787-4.27724782c.3965875-.39658757.3943884-1.03281091.0006214-1.42657791-.3965122-.39651226-1.0324677-.39348875-1.4265779.00062148l-4.2772478 4.27724782-4.27724782-4.27724782c-.39658757-.39658757-1.03281091-.39438847-1.42657791-.00062148-.39651226.39651227-.39348875 1.03246768.00062148 1.42657791l4.27724782 4.27724782z" fill-rule="evenodd"/></symbol><symbol id="icon-eds-i-download-medium" viewBox="0 0 16 16"><path d="m12.9975267 12.999368c.5467123 0 1.0024733.4478567 1.0024733 1.000316 0 .5563109-.4488226 1.000316-1.0024733 1.000316h-9.99505341c-.54671233 0-1.00247329-.4478567-1.00247329-1.000316 0-.5563109.44882258-1.000316 1.00247329-1.000316zm-4.9975267-11.999368c.55228475 0 1 .44497754 1 .99589209v6.80214418l2.4816273-2.48241149c.3928222-.39294628 1.0219732-.4006883 1.4030652-.01947579.3911302.39125371.3914806 1.02525073-.0001404 1.41699553l-4.17620792 4.17752758c-.39120769.3913313-1.02508144.3917306-1.41671995-.0000316l-4.17639421-4.17771394c-.39122513-.39134876-.39767006-1.01940351-.01657797-1.40061601.39113012-.39125372 1.02337105-.3931606 1.41951349.00310701l2.48183446 2.48261871v-6.80214418c0-.55001601.44386482-.99589209 1-.99589209z" fill-rule="evenodd"/></symbol><symbol id="icon-eds-i-info-filled-medium" viewBox="0 0 18 18"><path d="m9 0c4.9705627 0 9 4.02943725 9 9 0 4.9705627-4.0294373 9-9 9-4.97056275 0-9-4.0294373-9-9 0-4.97056275 4.02943725-9 9-9zm0 7h-1.5l-.11662113.00672773c-.49733868.05776511-.88337887.48043643-.88337887.99327227 0 .47338693.32893365.86994729.77070917.97358929l.1126697.01968298.11662113.00672773h.5v3h-.5l-.11662113.0067277c-.42082504.0488782-.76196299.3590206-.85696816.7639815l-.01968298.1126697-.00672773.1166211.00672773.1166211c.04887817.4208251.35902055.761963.76398144.8569682l.1126697.019683.11662113.0067277h3l.1166211-.0067277c.4973387-.0577651.8833789-.4804365.8833789-.9932723 0-.4733869-.3289337-.8699473-.7707092-.9735893l-.1126697-.019683-.1166211-.0067277h-.5v-4l-.00672773-.11662113c-.04887817-.42082504-.35902055-.76196299-.76398144-.85696816l-.1126697-.01968298zm0-3.25c-.69035594 0-1.25.55964406-1.25 1.25s.55964406 1.25 1.25 1.25 1.25-.55964406 1.25-1.25-.55964406-1.25-1.25-1.25z" fill-rule="evenodd"/></symbol><symbol id="icon-eds-i-mail-medium" viewBox="0 0 24 24"><path d="m19.462 0c1.413 0 2.538 1.184 2.538 2.619v12.762c0 1.435-1.125 2.619-2.538 2.619h-16.924c-1.413 0-2.538-1.184-2.538-2.619v-12.762c0-1.435 1.125-2.619 2.538-2.619zm.538 5.158-7.378 6.258a2.549 2.549 0 0 1 -3.253-.008l-7.369-6.248v10.222c0 .353.253.619.538.619h16.924c.285 0 .538-.266.538-.619zm-.538-3.158h-16.924c-.264 0-.5.228-.534.542l8.65 7.334c.2.165.492.165.684.007l8.656-7.342-.001-.025c-.044-.3-.274-.516-.531-.516z"/></symbol><symbol id="icon-eds-i-menu-medium" viewBox="0 0 24 24"><path d="M21 4a1 1 0 0 1 0 2H3a1 1 0 1 1 0-2h18Zm-4 7a1 1 0 0 1 0 2H3a1 1 0 0 1 0-2h14Zm4 7a1 1 0 0 1 0 2H3a1 1 0 0 1 0-2h18Z"/></symbol><symbol id="icon-eds-i-search-medium" viewBox="0 0 24 24"><path d="M11 1c5.523 0 10 4.477 10 10 0 2.4-.846 4.604-2.256 6.328l3.963 3.965a1 1 0 0 1-1.414 1.414l-3.965-3.963A9.959 9.959 0 0 1 11 21C5.477 21 1 16.523 1 11S5.477 1 11 1Zm0 2a8 8 0 1 0 0 16 8 8 0 0 0 0-16Z"/></symbol><symbol id="icon-eds-i-user-single-medium" viewBox="0 0 24 24"><path d="M12 1a5 5 0 1 1 0 10 5 5 0 0 1 0-10Zm0 2a3 3 0 1 0 0 6 3 3 0 0 0 0-6Zm-.406 9.008a8.965 8.965 0 0 1 6.596 2.494A9.161 9.161 0 0 1 21 21.025V22a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1v-.985c.05-4.825 3.815-8.777 8.594-9.007Zm.39 1.992-.299.006c-3.63.175-6.518 3.127-6.678 6.775L5 21h13.998l-.009-.268a7.157 7.157 0 0 0-1.97-4.573l-.214-.213A6.967 6.967 0 0 0 11.984 14Z"/></symbol><symbol id="icon-eds-i-warning-filled-medium" viewBox="0 0 18 18"><path d="m9 11.75c.69035594 0 1.25.5596441 1.25 1.25s-.55964406 1.25-1.25 1.25-1.25-.5596441-1.25-1.25.55964406-1.25 1.25-1.25zm.41320045-7.75c.55228475 0 1.00000005.44771525 1.00000005 1l-.0034543.08304548-.3333333 4c-.043191.51829212-.47645714.91695452-.99654578.91695452h-.15973424c-.52008864 0-.95335475-.3986624-.99654576-.91695452l-.33333333-4c-.04586475-.55037702.36312325-1.03372649.91350028-1.07959124l.04148683-.00259031zm-.41320045 14c-4.97056275 0-9-4.0294373-9-9 0-4.97056275 4.02943725-9 9-9 4.9705627 0 9 4.02943725 9 9 0 4.9705627-4.0294373 9-9 9z" fill-rule="evenodd"/></symbol><symbol id="icon-expand-image" viewBox="0 0 18 18"><path d="m7.49754099 11.9178212c.38955542-.3895554.38761957-1.0207846-.00290473-1.4113089-.39324695-.3932469-1.02238878-.3918247-1.41130883-.0029047l-4.10273549 4.1027355.00055454-3.5103985c.00008852-.5603185-.44832171-1.006032-1.00155062-1.0059446-.53903074.0000852-.97857527.4487442-.97866268 1.0021075l-.00093318 5.9072465c-.00008751.553948.44841131 1.001882 1.00174994 1.0017946l5.906983-.0009331c.5539233-.0000875 1.00197907-.4486389 1.00206646-1.0018679.00008515-.5390307-.45026621-.9784332-1.00588841-.9783454l-3.51010549.0005545zm3.00571741-5.83449376c-.3895554.38955541-.3876196 1.02078454.0029047 1.41130883.393247.39324696 1.0223888.39182478 1.4113089.00290473l4.1027355-4.10273549-.0005546 3.5103985c-.0000885.56031852.4483217 1.006032 1.0015506 1.00594461.5390308-.00008516.9785753-.44874418.9786627-1.00210749l.0009332-5.9072465c.0000875-.553948-.4484113-1.00188204-1.0017499-1.00179463l-5.906983.00093313c-.5539233.00008751-1.0019791.44863892-1.0020665 1.00186784-.0000852.53903074.4502662.97843325 1.0058884.97834547l3.5101055-.00055449z" fill-rule="evenodd"/></symbol><symbol id="icon-github" viewBox="0 0 100 100"><path fill-rule="evenodd" clip-rule="evenodd" d="M48.854 0C21.839 0 0 22 0 49.217c0 21.756 13.993 40.172 33.405 46.69 2.427.49 3.316-1.059 3.316-2.362 0-1.141-.08-5.052-.08-9.127-13.59 2.934-16.42-5.867-16.42-5.867-2.184-5.704-5.42-7.17-5.42-7.17-4.448-3.015.324-3.015.324-3.015 4.934.326 7.523 5.052 7.523 5.052 4.367 7.496 11.404 5.378 14.235 4.074.404-3.178 1.699-5.378 3.074-6.6-10.839-1.141-22.243-5.378-22.243-24.283 0-5.378 1.94-9.778 5.014-13.2-.485-1.222-2.184-6.275.486-13.038 0 0 4.125-1.304 13.426 5.052a46.97 46.97 0 0 1 12.214-1.63c4.125 0 8.33.571 12.213 1.63 9.302-6.356 13.427-5.052 13.427-5.052 2.67 6.763.97 11.816.485 13.038 3.155 3.422 5.015 7.822 5.015 13.2 0 18.905-11.404 23.06-22.324 24.283 1.78 1.548 3.316 4.481 3.316 9.126 0 6.6-.08 11.897-.08 13.526 0 1.304.89 2.853 3.316 2.364 19.412-6.52 33.405-24.935 33.405-46.691C97.707 22 75.788 0 48.854 0z"/></symbol><symbol id="icon-springer-arrow-left"><path d="M15 7a1 1 0 000-2H3.385l2.482-2.482a.994.994 0 00.02-1.403 1.001 1.001 0 00-1.417 0L.294 5.292a1.001 1.001 0 000 1.416l4.176 4.177a.991.991 0 001.4.016 1 1 0 00-.003-1.42L3.385 7H15z"/></symbol><symbol id="icon-springer-arrow-right"><path d="M1 7a1 1 0 010-2h11.615l-2.482-2.482a.994.994 0 01-.02-1.403 1.001 1.001 0 011.417 0l4.176 4.177a1.001 1.001 0 010 1.416l-4.176 4.177a.991.991 0 01-1.4.016 1 1 0 01.003-1.42L12.615 7H1z"/></symbol><symbol id="icon-submit-open" viewBox="0 0 16 17"><path d="M12 0c1.10457 0 2 .895431 2 2v5c0 .276142-.223858.5-.5.5S13 7.276142 13 7V2c0-.512836-.38604-.935507-.883379-.993272L12 1H6v3c0 1.10457-.89543 2-2 2H1v8c0 .512836.38604.935507.883379.993272L2 15h6.5c.276142 0 .5.223858.5.5s-.223858.5-.5.5H2c-1.104569 0-2-.89543-2-2V5.828427c0-.530433.210714-1.039141.585786-1.414213L4.414214.585786C4.789286.210714 5.297994 0 5.828427 0H12Zm3.41 11.14c.250899.250899.250274.659726 0 .91-.242954.242954-.649606.245216-.9-.01l-1.863671-1.900337.001043 5.869492c0 .356992-.289839.637138-.647372.637138-.347077 0-.647371-.285256-.647371-.637138l-.001043-5.869492L9.5 12.04c-.253166.258042-.649726.260274-.9.01-.242954-.242954-.252269-.657731 0-.91l2.942184-2.951303c.250908-.250909.66127-.252277.91353-.000017L15.41 11.14ZM5 1.413 1.413 5H4c.552285 0 1-.447715 1-1V1.413ZM11 3c.276142 0 .5.223858.5.5s-.223858.5-.5.5H7.5c-.276142 0-.5-.223858-.5-.5s.223858-.5.5-.5H11Zm0 2c.276142 0 .5.223858.5.5s-.223858.5-.5.5H7.5c-.276142 0-.5-.223858-.5-.5s.223858-.5.5-.5H11Z" fill-rule="nonzero"/></symbol></svg> </div> </footer> <div class="c-site-messages message u-hide u-hide-print c-site-messages--nature-briefing c-site-messages--nature-briefing-email-variant c-site-messages--nature-briefing-redesign-2020 sans-serif " data-component-id="nature-briefing-banner" data-component-expirydays="30" data-component-trigger-scroll-percentage="15" data-track="in-view" data-track-action="in-view" data-track-category="nature briefing" data-track-label="Briefing banner visible: Flagship"> <div class="c-site-messages__banner-large"> <div class="c-site-messages__close-container"> <button class="c-site-messages__close" data-track="click" data-track-category="nature briefing" data-track-label="Briefing banner dismiss: Flagship"> <svg width="25px" height="25px" focusable="false" aria-hidden="true" viewBox="0 0 25 25" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <title>Close banner</title> <defs></defs> <g stroke="none" stroke-width="1" fill="none" fill-rule="evenodd"> <rect opacity="0" x="0" y="0" width="25" height="25"></rect> <path d="M6.29679575,16.2772478 C5.90020818,16.6738354 5.90240728,17.3100587 6.29617427,17.7038257 C6.69268654,18.100338 7.32864195,18.0973145 7.72275218,17.7032043 L12,13.4259564 L16.2772478,17.7032043 C16.6738354,18.0997918 17.3100587,18.0975927 17.7038257,17.7038257 C18.100338,17.3073135 18.0973145,16.671358 17.7032043,16.2772478 L13.4259564,12 L17.7032043,7.72275218 C18.0997918,7.32616461 18.0975927,6.68994127 17.7038257,6.29617427 C17.3073135,5.89966201 16.671358,5.90268552 16.2772478,6.29679575 L12,10.5740436 L7.72275218,6.29679575 C7.32616461,5.90020818 6.68994127,5.90240728 6.29617427,6.29617427 C5.89966201,6.69268654 5.90268552,7.32864195 6.29679575,7.72275218 L10.5740436,12 L6.29679575,16.2772478 Z" fill="#ffffff"></path> </g> </svg> <span class="visually-hidden">Close</span> </button> </div> <div class="c-site-messages__form-container"> <div class="grid grid-12 last"> <div class="grid grid-4"> <img alt="Nature Briefing" src="/static/images/logos/nature-briefing-logo-n150-white-d81c9da3ec.svg" width="250" height="40"> <p class="c-site-messages--nature-briefing__strapline extra-tight-line-height">Sign up for the <em>Nature Briefing</em> newsletter — what matters in science, free to your inbox daily.</p> </div> <div class="grid grid-8 last"> <form action="https://www.nature.com/briefing/briefing" method="post" data-location="banner" data-track="signup_nature_briefing_banner" data-track-action="transmit-form" data-track-category="nature briefing" data-track-label="Briefing banner submit: Flagship"> <input id="briefing-banner-signup-form-input-track-originReferralPoint" type="hidden" name="track_originReferralPoint" value="MainBriefingBanner"> <input id="briefing-banner-signup-form-input-track-formType" type="hidden" name="track_formType" value="DirectEmailBanner"> <input type="hidden" value="false" name="gdpr_tick" id="gdpr_tick_banner"> <input type="hidden" value="false" name="marketing" id="marketing_input_banner"> <input type="hidden" value="false" name="marketing_tick" id="marketing_tick_banner"> <input type="hidden" value="MainBriefingBanner" name="brieferEntryPoint" id="brieferEntryPoint_banner"> <label class="nature-briefing-banner__email-label" for="emailAddress">Email address</label> <div class="nature-briefing-banner__email-wrapper"> <input class="nature-briefing-banner__email-input box-sizing text14" type="email" id="emailAddress" name="emailAddress" value="" placeholder="e.g. jo.smith@university.ac.uk" required data-test-element="briefing-emailbanner-email-input"> <input type="hidden" value="true" name="N:nature_briefing_daily" id="defaultNewsletter_banner"> <button type="submit" class="nature-briefing-banner__submit-button box-sizing text14" data-test-element="briefing-emailbanner-signup-button">Sign up</button> </div> <div class="nature-briefing-banner__checkbox-wrapper grid grid-12 last"> <input class="nature-briefing-banner__checkbox-checkbox" id="gdpr-briefing-banner-checkbox" type="checkbox" name="gdpr" value="true" data-test-element="briefing-emailbanner-gdpr-checkbox" required> <label class="nature-briefing-banner__checkbox-label box-sizing text13 sans-serif block tighten-line-height" for="gdpr-briefing-banner-checkbox">I agree my information will be processed in accordance with the <em>Nature</em> and Springer Nature Limited <a href="https://www.nature.com/info/privacy">Privacy Policy</a>.</label> </div> </form> </div> </div> </div> </div> <div class="c-site-messages__banner-small"> <div class="c-site-messages__close-container"> <button class="c-site-messages__close" data-track="click" data-track-category="nature briefing" data-track-label="Briefing banner dismiss: Flagship"> <svg width="25px" height="25px" focusable="false" aria-hidden="true" viewBox="0 0 25 25" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <title>Close banner</title> <defs></defs> <g stroke="none" stroke-width="1" fill="none" fill-rule="evenodd"> <rect opacity="0" x="0" y="0" width="25" height="25"></rect> <path d="M6.29679575,16.2772478 C5.90020818,16.6738354 5.90240728,17.3100587 6.29617427,17.7038257 C6.69268654,18.100338 7.32864195,18.0973145 7.72275218,17.7032043 L12,13.4259564 L16.2772478,17.7032043 C16.6738354,18.0997918 17.3100587,18.0975927 17.7038257,17.7038257 C18.100338,17.3073135 18.0973145,16.671358 17.7032043,16.2772478 L13.4259564,12 L17.7032043,7.72275218 C18.0997918,7.32616461 18.0975927,6.68994127 17.7038257,6.29617427 C17.3073135,5.89966201 16.671358,5.90268552 16.2772478,6.29679575 L12,10.5740436 L7.72275218,6.29679575 C7.32616461,5.90020818 6.68994127,5.90240728 6.29617427,6.29617427 C5.89966201,6.69268654 5.90268552,7.32864195 6.29679575,7.72275218 L10.5740436,12 L6.29679575,16.2772478 Z" fill="#ffffff"></path> </g> </svg> <span class="visually-hidden">Close</span> </button> </div> <div class="c-site-messages__content text14"> <span class="c-site-messages--nature-briefing__strapline strong">Get the most important science stories of the day, free in your inbox.</span> <a class="nature-briefing__link text14 sans-serif" data-track="click" data-track-category="nature briefing" data-track-label="Small-screen banner CTA to site" data-test-element="briefing-banner-link" target="_blank" rel="noreferrer noopener" href="https://www.nature.com/briefing/signup/?brieferEntryPoint=MainBriefingBanner">Sign up for Nature Briefing </a> </div> </div> </div> <noscript> <img hidden src="https://verify.nature.com/verify/nature.png" width="0" height="0" style="display: none" alt=""> </noscript> <script src="//content.readcube.com/ping?doi=10.1038/s41598-020-74399-w&amp;format=js&amp;last_modified=2021-02-05" async></script> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10