CINXE.COM

Search | arXiv e-print repository

<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"/> <meta name="viewport" content="width=device-width, initial-scale=1"/> <!-- new favicon config and versions by realfavicongenerator.net --> <link rel="apple-touch-icon" sizes="180x180" href="https://static.arxiv.org/static/base/1.0.0a5/images/icons/apple-touch-icon.png"> <link rel="icon" type="image/png" sizes="32x32" href="https://static.arxiv.org/static/base/1.0.0a5/images/icons/favicon-32x32.png"> <link rel="icon" type="image/png" sizes="16x16" href="https://static.arxiv.org/static/base/1.0.0a5/images/icons/favicon-16x16.png"> <link rel="manifest" href="https://static.arxiv.org/static/base/1.0.0a5/images/icons/site.webmanifest"> <link rel="mask-icon" href="https://static.arxiv.org/static/base/1.0.0a5/images/icons/safari-pinned-tab.svg" color="#b31b1b"> <link rel="shortcut icon" href="https://static.arxiv.org/static/base/1.0.0a5/images/icons/favicon.ico"> <meta name="msapplication-TileColor" content="#b31b1b"> <meta name="msapplication-config" content="images/icons/browserconfig.xml"> <meta name="theme-color" content="#b31b1b"> <!-- end favicon config --> <title>Search | arXiv e-print repository</title> <script defer src="https://static.arxiv.org/static/base/1.0.0a5/fontawesome-free-5.11.2-web/js/all.js"></script> <link rel="stylesheet" href="https://static.arxiv.org/static/base/1.0.0a5/css/arxivstyle.css" /> <script type="text/x-mathjax-config"> MathJax.Hub.Config({ messageStyle: "none", extensions: ["tex2jax.js"], jax: ["input/TeX", "output/HTML-CSS"], tex2jax: { inlineMath: [ ['$','$'], ["\\(","\\)"] ], displayMath: [ ['$$','$$'], ["\\[","\\]"] ], processEscapes: true, ignoreClass: '.*', processClass: 'mathjax.*' }, TeX: { extensions: ["AMSmath.js", "AMSsymbols.js", "noErrors.js"], noErrors: { inlineDelimiters: ["$","$"], multiLine: false, style: { "font-size": "normal", "border": "" } } }, "HTML-CSS": { availableFonts: ["TeX"] } }); </script> <script src='//static.arxiv.org/MathJax-2.7.3/MathJax.js'></script> <script src="https://static.arxiv.org/static/base/1.0.0a5/js/notification.js"></script> <link rel="stylesheet" href="https://static.arxiv.org/static/search/0.5.6/css/bulma-tooltip.min.css" /> <link rel="stylesheet" href="https://static.arxiv.org/static/search/0.5.6/css/search.css" /> <script src="https://code.jquery.com/jquery-3.2.1.slim.min.js" integrity="sha256-k2WSCIexGzOj3Euiig+TlR8gA0EmPjuc79OEeY5L45g=" crossorigin="anonymous"></script> <script src="https://static.arxiv.org/static/search/0.5.6/js/fieldset.js"></script> <style> radio#cf-customfield_11400 { display: none; } </style> </head> <body> <header><a href="#main-container" class="is-sr-only">Skip to main content</a> <!-- contains Cornell logo and sponsor statement --> <div class="attribution level is-marginless" role="banner"> <div class="level-left"> <a class="level-item" href="https://cornell.edu/"><img src="https://static.arxiv.org/static/base/1.0.0a5/images/cornell-reduced-white-SMALL.svg" alt="Cornell University" width="200" aria-label="logo" /></a> </div> <div class="level-right is-marginless"><p class="sponsors level-item is-marginless"><span id="support-ack-url">We gratefully acknowledge support from<br /> the Simons Foundation, <a href="https://info.arxiv.org/about/ourmembers.html">member institutions</a>, and all contributors. <a href="https://info.arxiv.org/about/donate.html">Donate</a></span></p></div> </div> <!-- contains arXiv identity and search bar --> <div class="identity level is-marginless"> <div class="level-left"> <div class="level-item"> <a class="arxiv" href="https://arxiv.org/" aria-label="arxiv-logo"> <img src="https://static.arxiv.org/static/base/1.0.0a5/images/arxiv-logo-one-color-white.svg" aria-label="logo" alt="arxiv logo" width="85" style="width:85px;"/> </a> </div> </div> <div class="search-block level-right"> <form class="level-item mini-search" method="GET" action="https://arxiv.org/search"> <div class="field has-addons"> <div class="control"> <input class="input is-small" type="text" name="query" placeholder="Search..." aria-label="Search term or terms" /> <p class="help"><a href="https://info.arxiv.org/help">Help</a> | <a href="https://arxiv.org/search/advanced">Advanced Search</a></p> </div> <div class="control"> <div class="select is-small"> <select name="searchtype" aria-label="Field to search"> <option value="all" selected="selected">All fields</option> <option value="title">Title</option> <option value="author">Author</option> <option value="abstract">Abstract</option> <option value="comments">Comments</option> <option value="journal_ref">Journal reference</option> <option value="acm_class">ACM classification</option> <option value="msc_class">MSC classification</option> <option value="report_num">Report number</option> <option value="paper_id">arXiv identifier</option> <option value="doi">DOI</option> <option value="orcid">ORCID</option> <option value="author_id">arXiv author ID</option> <option value="help">Help pages</option> <option value="full_text">Full text</option> </select> </div> </div> <input type="hidden" name="source" value="header"> <button class="button is-small is-cul-darker">Search</button> </div> </form> </div> </div> <!-- closes identity --> <div class="container"> <div class="user-tools is-size-7 has-text-right has-text-weight-bold" role="navigation" aria-label="User menu"> <a href="https://arxiv.org/login">Login</a> </div> </div> </header> <main class="container" id="main-container"> <div class="level is-marginless"> <div class="level-left"> <h1 class="title is-clearfix"> Showing 1&ndash;7 of 7 results for author: <span class="mathjax">Iglesias, J E</span> </h1> </div> <div class="level-right is-hidden-mobile"> <!-- feedback for mobile is moved to footer --> <span class="help" style="display: inline-block;"><a href="https://github.com/arXiv/arxiv-search/releases">Search v0.5.6 released 2020-02-24</a>&nbsp;&nbsp;</span> </div> </div> <div class="content"> <form method="GET" action="/search/q-bio" aria-role="search"> Searching in archive <strong>q-bio</strong>. <a href="/search/?searchtype=author&amp;query=Iglesias%2C+J+E">Search in all archives.</a> <div class="field has-addons-tablet"> <div class="control is-expanded"> <label for="query" class="hidden-label">Search term or terms</label> <input class="input is-medium" id="query" name="query" placeholder="Search term..." type="text" value="Iglesias, J E"> </div> <div class="select control is-medium"> <label class="is-hidden" for="searchtype">Field</label> <select class="is-medium" id="searchtype" name="searchtype"><option value="all">All fields</option><option value="title">Title</option><option selected value="author">Author(s)</option><option value="abstract">Abstract</option><option value="comments">Comments</option><option value="journal_ref">Journal reference</option><option value="acm_class">ACM classification</option><option value="msc_class">MSC classification</option><option value="report_num">Report number</option><option value="paper_id">arXiv identifier</option><option value="doi">DOI</option><option value="orcid">ORCID</option><option value="license">License (URI)</option><option value="author_id">arXiv author ID</option><option value="help">Help pages</option><option value="full_text">Full text</option></select> </div> <div class="control"> <button class="button is-link is-medium">Search</button> </div> </div> <div class="field"> <div class="control is-size-7"> <label class="radio"> <input checked id="abstracts-0" name="abstracts" type="radio" value="show"> Show abstracts </label> <label class="radio"> <input id="abstracts-1" name="abstracts" type="radio" value="hide"> Hide abstracts </label> </div> </div> <div class="is-clearfix" style="height: 2.5em"> <div class="is-pulled-right"> <a href="/search/advanced?terms-0-term=Iglesias%2C+J+E&amp;terms-0-field=author&amp;size=50&amp;order=-announced_date_first">Advanced Search</a> </div> </div> <input type="hidden" name="order" value="-announced_date_first"> <input type="hidden" name="size" value="50"> </form> <div class="level breathe-horizontal"> <div class="level-left"> <form method="GET" action="/search/"> <div style="display: none;"> <select id="searchtype" name="searchtype"><option value="all">All fields</option><option value="title">Title</option><option selected value="author">Author(s)</option><option value="abstract">Abstract</option><option value="comments">Comments</option><option value="journal_ref">Journal reference</option><option value="acm_class">ACM classification</option><option value="msc_class">MSC classification</option><option value="report_num">Report number</option><option value="paper_id">arXiv identifier</option><option value="doi">DOI</option><option value="orcid">ORCID</option><option value="license">License (URI)</option><option value="author_id">arXiv author ID</option><option value="help">Help pages</option><option value="full_text">Full text</option></select> <input id="query" name="query" type="text" value="Iglesias, J E"> <ul id="abstracts"><li><input checked id="abstracts-0" name="abstracts" type="radio" value="show"> <label for="abstracts-0">Show abstracts</label></li><li><input id="abstracts-1" name="abstracts" type="radio" value="hide"> <label for="abstracts-1">Hide abstracts</label></li></ul> </div> <div class="box field is-grouped is-grouped-multiline level-item"> <div class="control"> <span class="select is-small"> <select id="size" name="size"><option value="25">25</option><option selected value="50">50</option><option value="100">100</option><option value="200">200</option></select> </span> <label for="size">results per page</label>. </div> <div class="control"> <label for="order">Sort results by</label> <span class="select is-small"> <select id="order" name="order"><option selected value="-announced_date_first">Announcement date (newest first)</option><option value="announced_date_first">Announcement date (oldest first)</option><option value="-submitted_date">Submission date (newest first)</option><option value="submitted_date">Submission date (oldest first)</option><option value="">Relevance</option></select> </span> </div> <div class="control"> <button class="button is-small is-link">Go</button> </div> </div> </form> </div> </div> <ol class="breathe-horizontal" start="1"> <li class="arxiv-result"> <div class="is-marginless"> <p class="list-title is-inline-block"><a href="https://arxiv.org/abs/2306.00838">arXiv:2306.00838</a> <span>&nbsp;[<a href="https://arxiv.org/pdf/2306.00838">pdf</a>, <a href="https://arxiv.org/format/2306.00838">other</a>]&nbsp;</span> </p> <div class="tags is-inline-block"> <span class="tag is-small is-link tooltip is-tooltip-top" data-tooltip="Other Quantitative Biology">q-bio.OT</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Image and Video Processing">eess.IV</span> </div> </div> <p class="title is-5 mathjax"> The Brain Tumor Segmentation (BraTS-METS) Challenge 2023: Brain Metastasis Segmentation on Pre-treatment MRI </p> <p class="authors"> <span class="search-hit">Authors:</span> <a href="/search/q-bio?searchtype=author&amp;query=Moawad%2C+A+W">Ahmed W. Moawad</a>, <a href="/search/q-bio?searchtype=author&amp;query=Janas%2C+A">Anastasia Janas</a>, <a href="/search/q-bio?searchtype=author&amp;query=Baid%2C+U">Ujjwal Baid</a>, <a href="/search/q-bio?searchtype=author&amp;query=Ramakrishnan%2C+D">Divya Ramakrishnan</a>, <a href="/search/q-bio?searchtype=author&amp;query=Saluja%2C+R">Rachit Saluja</a>, <a href="/search/q-bio?searchtype=author&amp;query=Ashraf%2C+N">Nader Ashraf</a>, <a href="/search/q-bio?searchtype=author&amp;query=Maleki%2C+N">Nazanin Maleki</a>, <a href="/search/q-bio?searchtype=author&amp;query=Jekel%2C+L">Leon Jekel</a>, <a href="/search/q-bio?searchtype=author&amp;query=Yordanov%2C+N">Nikolay Yordanov</a>, <a href="/search/q-bio?searchtype=author&amp;query=Fehringer%2C+P">Pascal Fehringer</a>, <a href="/search/q-bio?searchtype=author&amp;query=Gkampenis%2C+A">Athanasios Gkampenis</a>, <a href="/search/q-bio?searchtype=author&amp;query=Amiruddin%2C+R">Raisa Amiruddin</a>, <a href="/search/q-bio?searchtype=author&amp;query=Manteghinejad%2C+A">Amirreza Manteghinejad</a>, <a href="/search/q-bio?searchtype=author&amp;query=Adewole%2C+M">Maruf Adewole</a>, <a href="/search/q-bio?searchtype=author&amp;query=Albrecht%2C+J">Jake Albrecht</a>, <a href="/search/q-bio?searchtype=author&amp;query=Anazodo%2C+U">Udunna Anazodo</a>, <a href="/search/q-bio?searchtype=author&amp;query=Aneja%2C+S">Sanjay Aneja</a>, <a href="/search/q-bio?searchtype=author&amp;query=Anwar%2C+S+M">Syed Muhammad Anwar</a>, <a href="/search/q-bio?searchtype=author&amp;query=Bergquist%2C+T">Timothy Bergquist</a>, <a href="/search/q-bio?searchtype=author&amp;query=Chiang%2C+V">Veronica Chiang</a>, <a href="/search/q-bio?searchtype=author&amp;query=Chung%2C+V">Verena Chung</a>, <a href="/search/q-bio?searchtype=author&amp;query=Conte%2C+G+M">Gian Marco Conte</a>, <a href="/search/q-bio?searchtype=author&amp;query=Dako%2C+F">Farouk Dako</a>, <a href="/search/q-bio?searchtype=author&amp;query=Eddy%2C+J">James Eddy</a>, <a href="/search/q-bio?searchtype=author&amp;query=Ezhov%2C+I">Ivan Ezhov</a> , et al. (207 additional authors not shown) </p> <p class="abstract mathjax"> <span class="has-text-black-bis has-text-weight-semibold">Abstract</span>: <span class="abstract-short has-text-grey-dark mathjax" id="2306.00838v3-abstract-short" style="display: inline;"> The translation of AI-generated brain metastases (BM) segmentation into clinical practice relies heavily on diverse, high-quality annotated medical imaging datasets. The BraTS-METS 2023 challenge has gained momentum for testing and benchmarking algorithms using rigorously annotated internationally compiled real-world datasets. This study presents the results of the segmentation challenge and chara&hellip; <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('2306.00838v3-abstract-full').style.display = 'inline'; document.getElementById('2306.00838v3-abstract-short').style.display = 'none';">&#9661; More</a> </span> <span class="abstract-full has-text-grey-dark mathjax" id="2306.00838v3-abstract-full" style="display: none;"> The translation of AI-generated brain metastases (BM) segmentation into clinical practice relies heavily on diverse, high-quality annotated medical imaging datasets. The BraTS-METS 2023 challenge has gained momentum for testing and benchmarking algorithms using rigorously annotated internationally compiled real-world datasets. This study presents the results of the segmentation challenge and characterizes the challenging cases that impacted the performance of the winning algorithms. Untreated brain metastases on standard anatomic MRI sequences (T1, T2, FLAIR, T1PG) from eight contributed international datasets were annotated in stepwise method: published UNET algorithms, student, neuroradiologist, final approver neuroradiologist. Segmentations were ranked based on lesion-wise Dice and Hausdorff distance (HD95) scores. False positives (FP) and false negatives (FN) were rigorously penalized, receiving a score of 0 for Dice and a fixed penalty of 374 for HD95. Eight datasets comprising 1303 studies were annotated, with 402 studies (3076 lesions) released on Synapse as publicly available datasets to challenge competitors. Additionally, 31 studies (139 lesions) were held out for validation, and 59 studies (218 lesions) were used for testing. Segmentation accuracy was measured as rank across subjects, with the winning team achieving a LesionWise mean score of 7.9. Common errors among the leading teams included false negatives for small lesions and misregistration of masks in space.The BraTS-METS 2023 challenge successfully curated well-annotated, diverse datasets and identified common errors, facilitating the translation of BM segmentation across varied clinical environments and providing personalized volumetric reports to patients undergoing BM treatment. <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('2306.00838v3-abstract-full').style.display = 'none'; document.getElementById('2306.00838v3-abstract-short').style.display = 'inline';">&#9651; Less</a> </span> </p> <p class="is-size-7"><span class="has-text-black-bis has-text-weight-semibold">Submitted</span> 8 December, 2024; <span class="has-text-black-bis has-text-weight-semibold">v1</span> submitted 1 June, 2023; <span class="has-text-black-bis has-text-weight-semibold">originally announced</span> June 2023. </p> </li> <li class="arxiv-result"> <div class="is-marginless"> <p class="list-title is-inline-block"><a href="https://arxiv.org/abs/2305.17033">arXiv:2305.17033</a> <span>&nbsp;[<a href="https://arxiv.org/pdf/2305.17033">pdf</a>, <a href="https://arxiv.org/format/2305.17033">other</a>]&nbsp;</span> </p> <div class="tags is-inline-block"> <span class="tag is-small is-link tooltip is-tooltip-top" data-tooltip="Image and Video Processing">eess.IV</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Computer Vision and Pattern Recognition">cs.CV</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Machine Learning">cs.LG</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Quantitative Methods">q-bio.QM</span> </div> </div> <p class="title is-5 mathjax"> The Brain Tumor Segmentation (BraTS) Challenge 2023: Focus on Pediatrics (CBTN-CONNECT-DIPGR-ASNR-MICCAI BraTS-PEDs) </p> <p class="authors"> <span class="search-hit">Authors:</span> <a href="/search/q-bio?searchtype=author&amp;query=Kazerooni%2C+A+F">Anahita Fathi Kazerooni</a>, <a href="/search/q-bio?searchtype=author&amp;query=Khalili%2C+N">Nastaran Khalili</a>, <a href="/search/q-bio?searchtype=author&amp;query=Liu%2C+X">Xinyang Liu</a>, <a href="/search/q-bio?searchtype=author&amp;query=Haldar%2C+D">Debanjan Haldar</a>, <a href="/search/q-bio?searchtype=author&amp;query=Jiang%2C+Z">Zhifan Jiang</a>, <a href="/search/q-bio?searchtype=author&amp;query=Anwar%2C+S+M">Syed Muhammed Anwar</a>, <a href="/search/q-bio?searchtype=author&amp;query=Albrecht%2C+J">Jake Albrecht</a>, <a href="/search/q-bio?searchtype=author&amp;query=Adewole%2C+M">Maruf Adewole</a>, <a href="/search/q-bio?searchtype=author&amp;query=Anazodo%2C+U">Udunna Anazodo</a>, <a href="/search/q-bio?searchtype=author&amp;query=Anderson%2C+H">Hannah Anderson</a>, <a href="/search/q-bio?searchtype=author&amp;query=Bagheri%2C+S">Sina Bagheri</a>, <a href="/search/q-bio?searchtype=author&amp;query=Baid%2C+U">Ujjwal Baid</a>, <a href="/search/q-bio?searchtype=author&amp;query=Bergquist%2C+T">Timothy Bergquist</a>, <a href="/search/q-bio?searchtype=author&amp;query=Borja%2C+A+J">Austin J. Borja</a>, <a href="/search/q-bio?searchtype=author&amp;query=Calabrese%2C+E">Evan Calabrese</a>, <a href="/search/q-bio?searchtype=author&amp;query=Chung%2C+V">Verena Chung</a>, <a href="/search/q-bio?searchtype=author&amp;query=Conte%2C+G">Gian-Marco Conte</a>, <a href="/search/q-bio?searchtype=author&amp;query=Dako%2C+F">Farouk Dako</a>, <a href="/search/q-bio?searchtype=author&amp;query=Eddy%2C+J">James Eddy</a>, <a href="/search/q-bio?searchtype=author&amp;query=Ezhov%2C+I">Ivan Ezhov</a>, <a href="/search/q-bio?searchtype=author&amp;query=Familiar%2C+A">Ariana Familiar</a>, <a href="/search/q-bio?searchtype=author&amp;query=Farahani%2C+K">Keyvan Farahani</a>, <a href="/search/q-bio?searchtype=author&amp;query=Haldar%2C+S">Shuvanjan Haldar</a>, <a href="/search/q-bio?searchtype=author&amp;query=Iglesias%2C+J+E">Juan Eugenio Iglesias</a>, <a href="/search/q-bio?searchtype=author&amp;query=Janas%2C+A">Anastasia Janas</a> , et al. (48 additional authors not shown) </p> <p class="abstract mathjax"> <span class="has-text-black-bis has-text-weight-semibold">Abstract</span>: <span class="abstract-short has-text-grey-dark mathjax" id="2305.17033v7-abstract-short" style="display: inline;"> Pediatric tumors of the central nervous system are the most common cause of cancer-related death in children. The five-year survival rate for high-grade gliomas in children is less than 20\%. Due to their rarity, the diagnosis of these entities is often delayed, their treatment is mainly based on historic treatment concepts, and clinical trials require multi-institutional collaborations. The MICCA&hellip; <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('2305.17033v7-abstract-full').style.display = 'inline'; document.getElementById('2305.17033v7-abstract-short').style.display = 'none';">&#9661; More</a> </span> <span class="abstract-full has-text-grey-dark mathjax" id="2305.17033v7-abstract-full" style="display: none;"> Pediatric tumors of the central nervous system are the most common cause of cancer-related death in children. The five-year survival rate for high-grade gliomas in children is less than 20\%. Due to their rarity, the diagnosis of these entities is often delayed, their treatment is mainly based on historic treatment concepts, and clinical trials require multi-institutional collaborations. The MICCAI Brain Tumor Segmentation (BraTS) Challenge is a landmark community benchmark event with a successful history of 12 years of resource creation for the segmentation and analysis of adult glioma. Here we present the CBTN-CONNECT-DIPGR-ASNR-MICCAI BraTS-PEDs 2023 challenge, which represents the first BraTS challenge focused on pediatric brain tumors with data acquired across multiple international consortia dedicated to pediatric neuro-oncology and clinical trials. The BraTS-PEDs 2023 challenge focuses on benchmarking the development of volumentric segmentation algorithms for pediatric brain glioma through standardized quantitative performance evaluation metrics utilized across the BraTS 2023 cluster of challenges. Models gaining knowledge from the BraTS-PEDs multi-parametric structural MRI (mpMRI) training data will be evaluated on separate validation and unseen test mpMRI dataof high-grade pediatric glioma. The CBTN-CONNECT-DIPGR-ASNR-MICCAI BraTS-PEDs 2023 challenge brings together clinicians and AI/imaging scientists to lead to faster development of automated segmentation techniques that could benefit clinical trials, and ultimately the care of children with brain tumors. <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('2305.17033v7-abstract-full').style.display = 'none'; document.getElementById('2305.17033v7-abstract-short').style.display = 'inline';">&#9651; Less</a> </span> </p> <p class="is-size-7"><span class="has-text-black-bis has-text-weight-semibold">Submitted</span> 23 May, 2024; <span class="has-text-black-bis has-text-weight-semibold">v1</span> submitted 26 May, 2023; <span class="has-text-black-bis has-text-weight-semibold">originally announced</span> May 2023. </p> </li> <li class="arxiv-result"> <div class="is-marginless"> <p class="list-title is-inline-block"><a href="https://arxiv.org/abs/2305.03413">arXiv:2305.03413</a> <span>&nbsp;[<a href="https://arxiv.org/pdf/2305.03413">pdf</a>, <a href="https://arxiv.org/format/2305.03413">other</a>]&nbsp;</span> </p> <div class="tags is-inline-block"> <span class="tag is-small is-link tooltip is-tooltip-top" data-tooltip="Image and Video Processing">eess.IV</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Computer Vision and Pattern Recognition">cs.CV</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Machine Learning">cs.LG</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Quantitative Methods">q-bio.QM</span> </div> <div class="is-inline-block" style="margin-left: 0.5rem"> <div class="tags has-addons"> <span class="tag is-dark is-size-7">doi</span> <span class="tag is-light is-size-7"><a class="" href="https://doi.org/10.1007/978-3-031-43993-3_24">10.1007/978-3-031-43993-3_24 <i class="fa fa-external-link" aria-hidden="true"></i></a></span> </div> </div> </div> <p class="title is-5 mathjax"> Domain-agnostic segmentation of thalamic nuclei from joint structural and diffusion MRI </p> <p class="authors"> <span class="search-hit">Authors:</span> <a href="/search/q-bio?searchtype=author&amp;query=Tregidgo%2C+H+F+J">Henry F. J. Tregidgo</a>, <a href="/search/q-bio?searchtype=author&amp;query=Soskic%2C+S">Sonja Soskic</a>, <a href="/search/q-bio?searchtype=author&amp;query=Olchanyi%2C+M+D">Mark D. Olchanyi</a>, <a href="/search/q-bio?searchtype=author&amp;query=Althonayan%2C+J">Juri Althonayan</a>, <a href="/search/q-bio?searchtype=author&amp;query=Billot%2C+B">Benjamin Billot</a>, <a href="/search/q-bio?searchtype=author&amp;query=Maffei%2C+C">Chiara Maffei</a>, <a href="/search/q-bio?searchtype=author&amp;query=Golland%2C+P">Polina Golland</a>, <a href="/search/q-bio?searchtype=author&amp;query=Yendiki%2C+A">Anastasia Yendiki</a>, <a href="/search/q-bio?searchtype=author&amp;query=Alexander%2C+D+C">Daniel C. Alexander</a>, <a href="/search/q-bio?searchtype=author&amp;query=Bocchetta%2C+M">Martina Bocchetta</a>, <a href="/search/q-bio?searchtype=author&amp;query=Rohrer%2C+J+D">Jonathan D. Rohrer</a>, <a href="/search/q-bio?searchtype=author&amp;query=Iglesias%2C+J+E">Juan Eugenio Iglesias</a> </p> <p class="abstract mathjax"> <span class="has-text-black-bis has-text-weight-semibold">Abstract</span>: <span class="abstract-short has-text-grey-dark mathjax" id="2305.03413v1-abstract-short" style="display: inline;"> The human thalamus is a highly connected subcortical grey-matter structure within the brain. It comprises dozens of nuclei with different function and connectivity, which are affected differently by disease. For this reason, there is growing interest in studying the thalamic nuclei in vivo with MRI. Tools are available to segment the thalamus from 1 mm T1 scans, but the contrast of the lateral and&hellip; <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('2305.03413v1-abstract-full').style.display = 'inline'; document.getElementById('2305.03413v1-abstract-short').style.display = 'none';">&#9661; More</a> </span> <span class="abstract-full has-text-grey-dark mathjax" id="2305.03413v1-abstract-full" style="display: none;"> The human thalamus is a highly connected subcortical grey-matter structure within the brain. It comprises dozens of nuclei with different function and connectivity, which are affected differently by disease. For this reason, there is growing interest in studying the thalamic nuclei in vivo with MRI. Tools are available to segment the thalamus from 1 mm T1 scans, but the contrast of the lateral and internal boundaries is too faint to produce reliable segmentations. Some tools have attempted to incorporate information from diffusion MRI in the segmentation to refine these boundaries, but do not generalise well across diffusion MRI acquisitions. Here we present the first CNN that can segment thalamic nuclei from T1 and diffusion data of any resolution without retraining or fine tuning. Our method builds on a public histological atlas of the thalamic nuclei and silver standard segmentations on high-quality diffusion data obtained with a recent Bayesian adaptive segmentation tool. We combine these with an approximate degradation model for fast domain randomisation during training. Our CNN produces a segmentation at 0.7 mm isotropic resolution, irrespective of the resolution of the input. Moreover, it uses a parsimonious model of the diffusion signal at each voxel (fractional anisotropy and principal eigenvector) that is compatible with virtually any set of directions and b-values, including huge amounts of legacy data. We show results of our proposed method on three heterogeneous datasets acquired on dozens of different scanners. An implementation of the method is publicly available at https://freesurfer.net/fswiki/ThalamicNucleiDTI. <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('2305.03413v1-abstract-full').style.display = 'none'; document.getElementById('2305.03413v1-abstract-short').style.display = 'inline';">&#9651; Less</a> </span> </p> <p class="is-size-7"><span class="has-text-black-bis has-text-weight-semibold">Submitted</span> 5 May, 2023; <span class="has-text-black-bis has-text-weight-semibold">originally announced</span> May 2023. </p> <p class="comments is-size-7"> <span class="has-text-black-bis has-text-weight-semibold">Comments:</span> <span class="has-text-grey-dark mathjax">Under review</span> </p> </li> <li class="arxiv-result"> <div class="is-marginless"> <p class="list-title is-inline-block"><a href="https://arxiv.org/abs/2004.10282">arXiv:2004.10282</a> <span>&nbsp;[<a href="https://arxiv.org/pdf/2004.10282">pdf</a>]&nbsp;</span> </p> <div class="tags is-inline-block"> <span class="tag is-small is-link tooltip is-tooltip-top" data-tooltip="Image and Video Processing">eess.IV</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Computer Vision and Pattern Recognition">cs.CV</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Neurons and Cognition">q-bio.NC</span> </div> <div class="is-inline-block" style="margin-left: 0.5rem"> <div class="tags has-addons"> <span class="tag is-dark is-size-7">doi</span> <span class="tag is-light is-size-7"><a class="" href="https://doi.org/10.1109/TMI.2021.3116879">10.1109/TMI.2021.3116879 <i class="fa fa-external-link" aria-hidden="true"></i></a></span> </div> </div> </div> <p class="title is-5 mathjax"> SynthMorph: learning contrast-invariant registration without acquired images </p> <p class="authors"> <span class="search-hit">Authors:</span> <a href="/search/q-bio?searchtype=author&amp;query=Hoffmann%2C+M">Malte Hoffmann</a>, <a href="/search/q-bio?searchtype=author&amp;query=Billot%2C+B">Benjamin Billot</a>, <a href="/search/q-bio?searchtype=author&amp;query=Greve%2C+D+N">Douglas N. Greve</a>, <a href="/search/q-bio?searchtype=author&amp;query=Iglesias%2C+J+E">Juan Eugenio Iglesias</a>, <a href="/search/q-bio?searchtype=author&amp;query=Fischl%2C+B">Bruce Fischl</a>, <a href="/search/q-bio?searchtype=author&amp;query=Dalca%2C+A+V">Adrian V. Dalca</a> </p> <p class="abstract mathjax"> <span class="has-text-black-bis has-text-weight-semibold">Abstract</span>: <span class="abstract-short has-text-grey-dark mathjax" id="2004.10282v4-abstract-short" style="display: inline;"> We introduce a strategy for learning image registration without acquired imaging data, producing powerful networks agnostic to contrast introduced by magnetic resonance imaging (MRI). While classical registration methods accurately estimate the spatial correspondence between images, they solve an optimization problem for every new image pair. Learning-based techniques are fast at test time but lim&hellip; <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('2004.10282v4-abstract-full').style.display = 'inline'; document.getElementById('2004.10282v4-abstract-short').style.display = 'none';">&#9661; More</a> </span> <span class="abstract-full has-text-grey-dark mathjax" id="2004.10282v4-abstract-full" style="display: none;"> We introduce a strategy for learning image registration without acquired imaging data, producing powerful networks agnostic to contrast introduced by magnetic resonance imaging (MRI). While classical registration methods accurately estimate the spatial correspondence between images, they solve an optimization problem for every new image pair. Learning-based techniques are fast at test time but limited to registering images with contrasts and geometric content similar to those seen during training. We propose to remove this dependency on training data by leveraging a generative strategy for diverse synthetic label maps and images that exposes networks to a wide range of variability, forcing them to learn more invariant features. This approach results in powerful networks that accurately generalize to a broad array of MRI contrasts. We present extensive experiments with a focus on 3D neuroimaging, showing that this strategy enables robust and accurate registration of arbitrary MRI contrasts even if the target contrast is not seen by the networks during training. We demonstrate registration accuracy surpassing the state of the art both within and across contrasts, using a single model. Critically, training on arbitrary shapes synthesized from noise distributions results in competitive performance, removing the dependency on acquired data of any kind. Additionally, since anatomical label maps are often available for the anatomy of interest, we show that synthesizing images from these dramatically boosts performance, while still avoiding the need for real intensity images. Our code is available at https://w3id.org/synthmorph. <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('2004.10282v4-abstract-full').style.display = 'none'; document.getElementById('2004.10282v4-abstract-short').style.display = 'inline';">&#9651; Less</a> </span> </p> <p class="is-size-7"><span class="has-text-black-bis has-text-weight-semibold">Submitted</span> 3 March, 2022; <span class="has-text-black-bis has-text-weight-semibold">v1</span> submitted 21 April, 2020; <span class="has-text-black-bis has-text-weight-semibold">originally announced</span> April 2020. </p> <p class="comments is-size-7"> <span class="has-text-black-bis has-text-weight-semibold">Comments:</span> <span class="has-text-grey-dark mathjax">16 pages, 15 figures, 3 tables, deformable image registration, data independence, deep learning, MRI-contrast invariance, anatomy agnosticism, final published version</span> </p> <p class="comments is-size-7"> <span class="has-text-black-bis has-text-weight-semibold">Journal ref:</span> IEEE Trans Med Imaging, 41 (3), 2022, 543-558 </p> </li> <li class="arxiv-result"> <div class="is-marginless"> <p class="list-title is-inline-block"><a href="https://arxiv.org/abs/2004.10221">arXiv:2004.10221</a> <span>&nbsp;[<a href="https://arxiv.org/pdf/2004.10221">pdf</a>, <a href="https://arxiv.org/format/2004.10221">other</a>]&nbsp;</span> </p> <div class="tags is-inline-block"> <span class="tag is-small is-link tooltip is-tooltip-top" data-tooltip="Computer Vision and Pattern Recognition">cs.CV</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Machine Learning">cs.LG</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Image and Video Processing">eess.IV</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Quantitative Methods">q-bio.QM</span> </div> <div class="is-inline-block" style="margin-left: 0.5rem"> <div class="tags has-addons"> <span class="tag is-dark is-size-7">doi</span> <span class="tag is-light is-size-7"><a class="" href="https://doi.org/10.1007/978-3-030-59728-3_18">10.1007/978-3-030-59728-3_18 <i class="fa fa-external-link" aria-hidden="true"></i></a></span> </div> </div> </div> <p class="title is-5 mathjax"> Partial Volume Segmentation of Brain MRI Scans of any Resolution and Contrast </p> <p class="authors"> <span class="search-hit">Authors:</span> <a href="/search/q-bio?searchtype=author&amp;query=Billot%2C+B">Benjamin Billot</a>, <a href="/search/q-bio?searchtype=author&amp;query=Robinson%2C+E+D">Eleanor D. Robinson</a>, <a href="/search/q-bio?searchtype=author&amp;query=Dalca%2C+A+V">Adrian V. Dalca</a>, <a href="/search/q-bio?searchtype=author&amp;query=Iglesias%2C+J+E">Juan Eugenio Iglesias</a> </p> <p class="abstract mathjax"> <span class="has-text-black-bis has-text-weight-semibold">Abstract</span>: <span class="abstract-short has-text-grey-dark mathjax" id="2004.10221v3-abstract-short" style="display: inline;"> Partial voluming (PV) is arguably the last crucial unsolved problem in Bayesian segmentation of brain MRI with probabilistic atlases. PV occurs when voxels contain multiple tissue classes, giving rise to image intensities that may not be representative of any one of the underlying classes. PV is particularly problematic for segmentation when there is a large resolution gap between the atlas and th&hellip; <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('2004.10221v3-abstract-full').style.display = 'inline'; document.getElementById('2004.10221v3-abstract-short').style.display = 'none';">&#9661; More</a> </span> <span class="abstract-full has-text-grey-dark mathjax" id="2004.10221v3-abstract-full" style="display: none;"> Partial voluming (PV) is arguably the last crucial unsolved problem in Bayesian segmentation of brain MRI with probabilistic atlases. PV occurs when voxels contain multiple tissue classes, giving rise to image intensities that may not be representative of any one of the underlying classes. PV is particularly problematic for segmentation when there is a large resolution gap between the atlas and the test scan, e.g., when segmenting clinical scans with thick slices, or when using a high-resolution atlas. In this work, we present PV-SynthSeg, a convolutional neural network (CNN) that tackles this problem by directly learning a mapping between (possibly multi-modal) low resolution (LR) scans and underlying high resolution (HR) segmentations. PV-SynthSeg simulates LR images from HR label maps with a generative model of PV, and can be trained to segment scans of any desired target contrast and resolution, even for previously unseen modalities where neither images nor segmentations are available at training. PV-SynthSeg does not require any preprocessing, and runs in seconds. We demonstrate the accuracy and flexibility of the method with extensive experiments on three datasets and 2,680 scans. The code is available at https://github.com/BBillot/SynthSeg. <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('2004.10221v3-abstract-full').style.display = 'none'; document.getElementById('2004.10221v3-abstract-short').style.display = 'inline';">&#9651; Less</a> </span> </p> <p class="is-size-7"><span class="has-text-black-bis has-text-weight-semibold">Submitted</span> 8 April, 2021; <span class="has-text-black-bis has-text-weight-semibold">v1</span> submitted 21 April, 2020; <span class="has-text-black-bis has-text-weight-semibold">originally announced</span> April 2020. </p> <p class="comments is-size-7"> <span class="has-text-black-bis has-text-weight-semibold">Comments:</span> <span class="has-text-grey-dark mathjax">12 pages, 7 figures</span> </p> <p class="comments is-size-7"> <span class="has-text-black-bis has-text-weight-semibold">Journal ref:</span> International Conference on Medical Image Computing and Computer-Assisted Intervention (MICCAI) 2020, pp. 177-187 </p> </li> <li class="arxiv-result"> <div class="is-marginless"> <p class="list-title is-inline-block"><a href="https://arxiv.org/abs/1806.08634">arXiv:1806.08634</a> <span>&nbsp;[<a href="https://arxiv.org/pdf/1806.08634">pdf</a>, <a href="https://arxiv.org/format/1806.08634">other</a>]&nbsp;</span> </p> <div class="tags is-inline-block"> <span class="tag is-small is-link tooltip is-tooltip-top" data-tooltip="Neurons and Cognition">q-bio.NC</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Computer Vision and Pattern Recognition">cs.CV</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Medical Physics">physics.med-ph</span> </div> </div> <p class="title is-5 mathjax"> A probabilistic atlas of the human thalamic nuclei combining ex vivo MRI and histology </p> <p class="authors"> <span class="search-hit">Authors:</span> <a href="/search/q-bio?searchtype=author&amp;query=Iglesias%2C+J+E">Juan Eugenio Iglesias</a>, <a href="/search/q-bio?searchtype=author&amp;query=Insausti%2C+R">Ricardo Insausti</a>, <a href="/search/q-bio?searchtype=author&amp;query=Lerma-Usabiaga%2C+G">Garikoitz Lerma-Usabiaga</a>, <a href="/search/q-bio?searchtype=author&amp;query=Bocchetta%2C+M">Martina Bocchetta</a>, <a href="/search/q-bio?searchtype=author&amp;query=Van+Leemput%2C+K">Koen Van Leemput</a>, <a href="/search/q-bio?searchtype=author&amp;query=Greve%2C+D+N">Douglas N Greve</a>, <a href="/search/q-bio?searchtype=author&amp;query=van+der+Kouwe%2C+A">Andre van der Kouwe</a>, <a href="/search/q-bio?searchtype=author&amp;query=Fischl%2C+B">Bruce Fischl</a>, <a href="/search/q-bio?searchtype=author&amp;query=Caballero-Gaudes%2C+C">Cesar Caballero-Gaudes</a>, <a href="/search/q-bio?searchtype=author&amp;query=Paz-Alonso%2C+P+M">Pedro M Paz-Alonso</a> </p> <p class="abstract mathjax"> <span class="has-text-black-bis has-text-weight-semibold">Abstract</span>: <span class="abstract-short has-text-grey-dark mathjax" id="1806.08634v1-abstract-short" style="display: inline;"> The human thalamus is a brain structure that comprises numerous, highly specific nuclei. Since these nuclei are known to have different functions and to be connected to different areas of the cerebral cortex, it is of great interest for the neuroimaging community to study their volume, shape and connectivity in vivo with MRI. In this study, we present a probabilistic atlas of the thalamic nuclei b&hellip; <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('1806.08634v1-abstract-full').style.display = 'inline'; document.getElementById('1806.08634v1-abstract-short').style.display = 'none';">&#9661; More</a> </span> <span class="abstract-full has-text-grey-dark mathjax" id="1806.08634v1-abstract-full" style="display: none;"> The human thalamus is a brain structure that comprises numerous, highly specific nuclei. Since these nuclei are known to have different functions and to be connected to different areas of the cerebral cortex, it is of great interest for the neuroimaging community to study their volume, shape and connectivity in vivo with MRI. In this study, we present a probabilistic atlas of the thalamic nuclei built using ex vivo brain MRI scans and histological data, as well as the application of the atlas to in vivo MRI segmentation. The atlas was built using manual delineation of 26 thalamic nuclei on the serial histology of 12 whole thalami from six autopsy samples, combined with manual segmentations of the whole thalamus and surrounding structures (caudate, putamen, hippocampus, etc.) made on in vivo brain MR data from 39 subjects. The 3D structure of the histological data and corresponding manual segmentations was recovered using the ex vivo MRI as reference frame, and stacks of blockface photographs acquired during the sectioning as intermediate target. The atlas, which was encoded as an adaptive tetrahedral mesh, shows a good agreement with with previous histological studies of the thalamus in terms of volumes of representative nuclei. When applied to segmentation of in vivo scans using Bayesian inference, the atlas shows excellent test-retest reliability, robustness to changes in input MRI contrast, and ability to detect differential thalamic effects in subjects with Alzheimer&#39;s disease. The probabilistic atlas and companion segmentation tool are publicly available as part of the neuroimaging package FreeSurfer. <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('1806.08634v1-abstract-full').style.display = 'none'; document.getElementById('1806.08634v1-abstract-short').style.display = 'inline';">&#9651; Less</a> </span> </p> <p class="is-size-7"><span class="has-text-black-bis has-text-weight-semibold">Submitted</span> 22 June, 2018; <span class="has-text-black-bis has-text-weight-semibold">originally announced</span> June 2018. </p> </li> <li class="arxiv-result"> <div class="is-marginless"> <p class="list-title is-inline-block"><a href="https://arxiv.org/abs/1706.08041">arXiv:1706.08041</a> <span>&nbsp;[<a href="https://arxiv.org/pdf/1706.08041">pdf</a>, <a href="https://arxiv.org/format/1706.08041">other</a>]&nbsp;</span> </p> <div class="tags is-inline-block"> <span class="tag is-small is-link tooltip is-tooltip-top" data-tooltip="Quantitative Methods">q-bio.QM</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Neurons and Cognition">q-bio.NC</span> <span class="tag is-small is-grey tooltip is-tooltip-top" data-tooltip="Applications">stat.AP</span> </div> <div class="is-inline-block" style="margin-left: 0.5rem"> <div class="tags has-addons"> <span class="tag is-dark is-size-7">doi</span> <span class="tag is-light is-size-7"><a class="" href="https://doi.org/10.1073/pnas.1705414114">10.1073/pnas.1705414114 <i class="fa fa-external-link" aria-hidden="true"></i></a></span> </div> </div> </div> <p class="title is-5 mathjax"> Sparsity Enables Estimation of both Subcortical and Cortical Activity from MEG and EEG </p> <p class="authors"> <span class="search-hit">Authors:</span> <a href="/search/q-bio?searchtype=author&amp;query=Krishnaswamy%2C+P">Pavitra Krishnaswamy</a>, <a href="/search/q-bio?searchtype=author&amp;query=Obregon-Henao%2C+G">Gabriel Obregon-Henao</a>, <a href="/search/q-bio?searchtype=author&amp;query=Ahveninen%2C+J">Jyrki Ahveninen</a>, <a href="/search/q-bio?searchtype=author&amp;query=Khan%2C+S">Sheraz Khan</a>, <a href="/search/q-bio?searchtype=author&amp;query=Babadi%2C+B">Behtash Babadi</a>, <a href="/search/q-bio?searchtype=author&amp;query=Iglesias%2C+J+E">Juan Eugenio Iglesias</a>, <a href="/search/q-bio?searchtype=author&amp;query=Hamalainen%2C+M+S">Matti S. Hamalainen</a>, <a href="/search/q-bio?searchtype=author&amp;query=Purdon%2C+P+L">Patrick L. Purdon</a> </p> <p class="abstract mathjax"> <span class="has-text-black-bis has-text-weight-semibold">Abstract</span>: <span class="abstract-short has-text-grey-dark mathjax" id="1706.08041v1-abstract-short" style="display: inline;"> Subcortical structures play a critical role in brain function. However, options for assessing electrophysiological activity in these structures are limited. Electromagnetic fields generated by neuronal activity in subcortical structures can be recorded non-invasively using magnetoencephalography (MEG) and electroencephalography (EEG). However, these subcortical signals are much weaker than those d&hellip; <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('1706.08041v1-abstract-full').style.display = 'inline'; document.getElementById('1706.08041v1-abstract-short').style.display = 'none';">&#9661; More</a> </span> <span class="abstract-full has-text-grey-dark mathjax" id="1706.08041v1-abstract-full" style="display: none;"> Subcortical structures play a critical role in brain function. However, options for assessing electrophysiological activity in these structures are limited. Electromagnetic fields generated by neuronal activity in subcortical structures can be recorded non-invasively using magnetoencephalography (MEG) and electroencephalography (EEG). However, these subcortical signals are much weaker than those due to cortical activity. In addition, we show here that it is difficult to resolve subcortical sources, because distributed cortical activity can explain the MEG and EEG patterns due to deep sources. We then demonstrate that if the cortical activity can be assumed to be spatially sparse, both cortical and subcortical sources can be resolved with M/EEG. Building on this insight, we develop a novel hierarchical sparse inverse solution for M/EEG. We assess the performance of this algorithm on realistic simulations and auditory evoked response data and show that thalamic and brainstem sources can be correctly estimated in the presence of cortical activity. Our analysis and method suggest new opportunities and offer practical tools for characterizing electrophysiological activity in the subcortical structures of the human brain. <a class="is-size-7" style="white-space: nowrap;" onclick="document.getElementById('1706.08041v1-abstract-full').style.display = 'none'; document.getElementById('1706.08041v1-abstract-short').style.display = 'inline';">&#9651; Less</a> </span> </p> <p class="is-size-7"><span class="has-text-black-bis has-text-weight-semibold">Submitted</span> 25 June, 2017; <span class="has-text-black-bis has-text-weight-semibold">originally announced</span> June 2017. </p> <p class="comments is-size-7"> <span class="has-text-black-bis has-text-weight-semibold">Comments:</span> <span class="has-text-grey-dark mathjax">12 pages with 6 figures</span> </p> <p class="comments is-size-7"> <span class="has-text-black-bis has-text-weight-semibold">MSC Class:</span> 62-07; 15A29 (Primary); and 62P10; 92-08; 68W01 (Secondary) <span class="has-text-black-bis has-text-weight-semibold">ACM Class:</span> G.1.3; I.5.4 </p> </li> </ol> <div class="is-hidden-tablet"> <!-- feedback for mobile only --> <span class="help" style="display: inline-block;"><a href="https://github.com/arXiv/arxiv-search/releases">Search v0.5.6 released 2020-02-24</a>&nbsp;&nbsp;</span> </div> </div> </main> <footer> <div class="columns is-desktop" role="navigation" aria-label="Secondary"> <!-- MetaColumn 1 --> <div class="column"> <div class="columns"> <div class="column"> <ul class="nav-spaced"> <li><a href="https://info.arxiv.org/about">About</a></li> <li><a href="https://info.arxiv.org/help">Help</a></li> </ul> </div> <div class="column"> <ul class="nav-spaced"> <li> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512" class="icon filter-black" role="presentation"><title>contact arXiv</title><desc>Click here to contact arXiv</desc><path d="M502.3 190.8c3.9-3.1 9.7-.2 9.7 4.7V400c0 26.5-21.5 48-48 48H48c-26.5 0-48-21.5-48-48V195.6c0-5 5.7-7.8 9.7-4.7 22.4 17.4 52.1 39.5 154.1 113.6 21.1 15.4 56.7 47.8 92.2 47.6 35.7.3 72-32.8 92.3-47.6 102-74.1 131.6-96.3 154-113.7zM256 320c23.2.4 56.6-29.2 73.4-41.4 132.7-96.3 142.8-104.7 173.4-128.7 5.8-4.5 9.2-11.5 9.2-18.9v-19c0-26.5-21.5-48-48-48H48C21.5 64 0 85.5 0 112v19c0 7.4 3.4 14.3 9.2 18.9 30.6 23.9 40.7 32.4 173.4 128.7 16.8 12.2 50.2 41.8 73.4 41.4z"/></svg> <a href="https://info.arxiv.org/help/contact.html"> Contact</a> </li> <li> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512" class="icon filter-black" role="presentation"><title>subscribe to arXiv mailings</title><desc>Click here to subscribe</desc><path d="M476 3.2L12.5 270.6c-18.1 10.4-15.8 35.6 2.2 43.2L121 358.4l287.3-253.2c5.5-4.9 13.3 2.6 8.6 8.3L176 407v80.5c0 23.6 28.5 32.9 42.5 15.8L282 426l124.6 52.2c14.2 6 30.4-2.9 33-18.2l72-432C515 7.8 493.3-6.8 476 3.2z"/></svg> <a href="https://info.arxiv.org/help/subscribe"> Subscribe</a> </li> </ul> </div> </div> </div> <!-- end MetaColumn 1 --> <!-- MetaColumn 2 --> <div class="column"> <div class="columns"> <div class="column"> <ul class="nav-spaced"> <li><a href="https://info.arxiv.org/help/license/index.html">Copyright</a></li> <li><a href="https://info.arxiv.org/help/policies/privacy_policy.html">Privacy Policy</a></li> </ul> </div> <div class="column sorry-app-links"> <ul class="nav-spaced"> <li><a href="https://info.arxiv.org/help/web_accessibility.html">Web Accessibility Assistance</a></li> <li> <p class="help"> <a class="a11y-main-link" href="https://status.arxiv.org" target="_blank">arXiv Operational Status <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 256 512" class="icon filter-dark_grey" role="presentation"><path d="M224.3 273l-136 136c-9.4 9.4-24.6 9.4-33.9 0l-22.6-22.6c-9.4-9.4-9.4-24.6 0-33.9l96.4-96.4-96.4-96.4c-9.4-9.4-9.4-24.6 0-33.9L54.3 103c9.4-9.4 24.6-9.4 33.9 0l136 136c9.5 9.4 9.5 24.6.1 34z"/></svg></a><br> Get status notifications via <a class="is-link" href="https://subscribe.sorryapp.com/24846f03/email/new" target="_blank"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512" class="icon filter-black" role="presentation"><path d="M502.3 190.8c3.9-3.1 9.7-.2 9.7 4.7V400c0 26.5-21.5 48-48 48H48c-26.5 0-48-21.5-48-48V195.6c0-5 5.7-7.8 9.7-4.7 22.4 17.4 52.1 39.5 154.1 113.6 21.1 15.4 56.7 47.8 92.2 47.6 35.7.3 72-32.8 92.3-47.6 102-74.1 131.6-96.3 154-113.7zM256 320c23.2.4 56.6-29.2 73.4-41.4 132.7-96.3 142.8-104.7 173.4-128.7 5.8-4.5 9.2-11.5 9.2-18.9v-19c0-26.5-21.5-48-48-48H48C21.5 64 0 85.5 0 112v19c0 7.4 3.4 14.3 9.2 18.9 30.6 23.9 40.7 32.4 173.4 128.7 16.8 12.2 50.2 41.8 73.4 41.4z"/></svg>email</a> or <a class="is-link" href="https://subscribe.sorryapp.com/24846f03/slack/new" target="_blank"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512" class="icon filter-black" role="presentation"><path d="M94.12 315.1c0 25.9-21.16 47.06-47.06 47.06S0 341 0 315.1c0-25.9 21.16-47.06 47.06-47.06h47.06v47.06zm23.72 0c0-25.9 21.16-47.06 47.06-47.06s47.06 21.16 47.06 47.06v117.84c0 25.9-21.16 47.06-47.06 47.06s-47.06-21.16-47.06-47.06V315.1zm47.06-188.98c-25.9 0-47.06-21.16-47.06-47.06S139 32 164.9 32s47.06 21.16 47.06 47.06v47.06H164.9zm0 23.72c25.9 0 47.06 21.16 47.06 47.06s-21.16 47.06-47.06 47.06H47.06C21.16 243.96 0 222.8 0 196.9s21.16-47.06 47.06-47.06H164.9zm188.98 47.06c0-25.9 21.16-47.06 47.06-47.06 25.9 0 47.06 21.16 47.06 47.06s-21.16 47.06-47.06 47.06h-47.06V196.9zm-23.72 0c0 25.9-21.16 47.06-47.06 47.06-25.9 0-47.06-21.16-47.06-47.06V79.06c0-25.9 21.16-47.06 47.06-47.06 25.9 0 47.06 21.16 47.06 47.06V196.9zM283.1 385.88c25.9 0 47.06 21.16 47.06 47.06 0 25.9-21.16 47.06-47.06 47.06-25.9 0-47.06-21.16-47.06-47.06v-47.06h47.06zm0-23.72c-25.9 0-47.06-21.16-47.06-47.06 0-25.9 21.16-47.06 47.06-47.06h117.84c25.9 0 47.06 21.16 47.06 47.06 0 25.9-21.16 47.06-47.06 47.06H283.1z"/></svg>slack</a> </p> </li> </ul> </div> </div> </div> <!-- end MetaColumn 2 --> </div> </footer> <script src="https://static.arxiv.org/static/base/1.0.0a5/js/member_acknowledgement.js"></script> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10