CINXE.COM
Yong Xu | Papers With Code
<!doctype html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"> <script> const GTAG_ENABLED = true ; const GTAG_TRACKING_ID = "UA-121182717-1"; const SENTRY_DSN_FRONTEND = "".trim(); const GLOBAL_CSRF_TOKEN = 'YcqYLE4qTapKfU278psgDJUI43t0el6DKuRkqVA4LgnyjRtzpW71yQFokfa7teD0'; const MEDIA_URL = "https://production-media.paperswithcode.com/"; const ASSETS_URL = "https://production-assets.paperswithcode.com"; run_after_frontend_loaded = window.run_after_frontend_loaded || []; </script> <link rel="preconnect" href="https://production-assets.paperswithcode.com"><link rel="dns-prefetch" href="https://production-assets.paperswithcode.com"><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/65e877e527022735c1a1.woff2" crossorigin><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/917632e36982ca7933c8.woff2" crossorigin><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/f1405bd8a987c2ea8a67.woff2" crossorigin><script>(()=>{if(GTAG_ENABLED){const t=document.createElement("script");function n(){window.dataLayer.push(arguments)}t.src=`https://www.googletagmanager.com/gtag/js?id=${GTAG_TRACKING_ID}`,document.head.appendChild(t),window.dataLayer=window.dataLayer||[],window.gtag=n,n("js",new Date),n("config",GTAG_TRACKING_ID),window.captureOutboundLink=function(t){n("event","click",{event_category:"outbound",event_label:t})}}else window.captureOutboundLink=function(n){document.location=n}})();</script><style>:root{--bs-blue: #0d6efd;--bs-indigo: #6610f2;--bs-purple: #6f42c1;--bs-pink: #d63384;--bs-red: #dc3545;--bs-orange: #fd7e14;--bs-yellow: #ffc107;--bs-green: #198754;--bs-teal: #20c997;--bs-cyan: #21cbce;--bs-white: #fff;--bs-gray: #6c757d;--bs-gray-dark: #343a40;--bs-primary: #0d6efd;--bs-secondary: #6c757d;--bs-success: #198754;--bs-info: #21cbce;--bs-warning: #ffc107;--bs-danger: #dc3545;--bs-light: #f8f9fa;--bs-dark: #212529;--bs-font-sans-serif: system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", "Liberation Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";--bs-font-monospace: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;--bs-gradient: linear-gradient(180deg, rgba(255, 255, 255, 0.15), rgba(255, 255, 255, 0))}@font-face{font-family:"Lato";font-style:normal;font-weight:300;font-display:swap;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/917632e36982ca7933c8.woff2) format("woff2")}@font-face{font-family:"Lato";font-style:normal;font-weight:400;font-display:swap;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/65e877e527022735c1a1.woff2) format("woff2")}@font-face{font-family:"Lato";font-style:normal;font-weight:700;font-display:swap;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/f1405bd8a987c2ea8a67.woff2) format("woff2")}@font-face{font-family:"Computer Modern Serif";src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/b63de31899ec71cfb870.woff) format("woff");font-display:swap;font-weight:normal;font-style:normal}@font-face{font-family:"Computer Modern Serif";src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/5d5c7512cb539fb279b2.woff) format("woff");font-display:swap;font-weight:bold;font-style:normal}@font-face{font-family:"Computer Modern Serif";src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/357ce3503c6299bc1b58.woff) format("woff");font-display:swap;font-weight:normal;font-style:italic}@font-face{font-family:"Computer Modern Serif";src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/c83e6f15d4c7568ee872.woff) format("woff");font-display:swap;font-weight:bold;font-style:italic}@font-face{font-family:"Exo";font-style:normal;font-weight:100;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/729c812ee9989426abb1.woff2) format("woff2");font-display:swap}@font-face{font-family:"Nunito";font-style:normal;font-weight:400;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/39a18f443d434999b89b.woff2) format("woff2");font-display:swap}@font-face{font-family:"Nunito";font-style:normal;font-weight:700;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/4ad349571e28bb59c5a5.woff2) format("woff2");font-display:swap}*,*::before,*::after{box-sizing:border-box}@media(prefers-reduced-motion: no-preference){:root{scroll-behavior:smooth}}body{margin:0;font-family:system-ui,-apple-system,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans","Liberation Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";font-size:1rem;font-weight:400;line-height:1.5;color:#212529;background-color:#fff;-webkit-text-size-adjust:100%;-webkit-tap-highlight-color:rgba(0,0,0,0)}[tabindex="-1"]:focus:not(:focus-visible){outline:0 !important}hr{margin:1rem 0;color:#000;background-color:currentColor;border:0;opacity:.1}hr:not([size]){height:1px}h6,h5,h4,h3,h2,h1{margin-top:0;margin-bottom:.5rem;font-weight:500;line-height:1.2}h1{font-size:calc(1.375rem + 1.5vw)}@media(min-width: 1200px){h1{font-size:2.5rem}}h2{font-size:calc(1.325rem + 0.9vw)}@media(min-width: 1200px){h2{font-size:2rem}}h3{font-size:calc(1.3rem + 0.6vw)}@media(min-width: 1200px){h3{font-size:1.75rem}}h4{font-size:calc(1.275rem + 0.3vw)}@media(min-width: 1200px){h4{font-size:1.5rem}}h5{font-size:1.25rem}h6{font-size:1rem}p{margin-top:0;margin-bottom:1rem}ol,ul{padding-left:2rem}ol,ul{margin-top:0;margin-bottom:1rem}ul ul{margin-bottom:0}b,strong{font-weight:bolder}small{font-size:0.875em}a{color:#0d6efd;text-decoration:none}a:hover{color:#0a58ca;text-decoration:none}a:not([href]):not([class]),a:not([href]):not([class]):hover{color:inherit;text-decoration:none}pre,code{font-family:var(--bs-font-monospace);font-size:1em;direction:ltr /* rtl:ignore */;unicode-bidi:bidi-override}pre{display:block;margin-top:0;margin-bottom:1rem;overflow:auto;font-size:0.875em}code{font-size:0.875em;color:#d63384;word-wrap:break-word}a>code{color:inherit}figure{margin:0 0 1rem}img,svg{vertical-align:middle}table{caption-side:bottom;border-collapse:collapse}th{text-align:inherit;text-align:-webkit-match-parent}thead,tbody,tfoot,tr,td,th{border-color:inherit;border-style:solid;border-width:0}label{display:inline-block}button{border-radius:0}button:focus:not(:focus-visible){outline:0}input,button,select,optgroup,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,select{text-transform:none}[role=button]{cursor:pointer}select{word-wrap:normal}[list]::-webkit-calendar-picker-indicator{display:none}button,[type=button],[type=reset],[type=submit]{-webkit-appearance:button}button:not(:disabled),[type=button]:not(:disabled),[type=reset]:not(:disabled),[type=submit]:not(:disabled){cursor:pointer}::-moz-focus-inner{padding:0;border-style:none}textarea{resize:vertical}::-webkit-datetime-edit-fields-wrapper,::-webkit-datetime-edit-text,::-webkit-datetime-edit-minute,::-webkit-datetime-edit-hour-field,::-webkit-datetime-edit-day-field,::-webkit-datetime-edit-month-field,::-webkit-datetime-edit-year-field{padding:0}::-webkit-inner-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:textfield}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-color-swatch-wrapper{padding:0}::file-selector-button{font:inherit}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}iframe{border:0}[hidden]{display:none !important}.list-unstyled{padding-left:0;list-style:none}small,.small{font-size:0.875em;font-weight:400}.footer{display:block;margin-top:30px;padding:15px;border-top:1px solid #e0e0e0;font-size:13px;color:#aaa;text-align:center}.footer a{color:#999}.footer-contact{margin-bottom:5px}.footer-contact-item{display:inline-block}.footer-links>*:not(:last-child){margin-right:1rem}.icon-wrapper{display:inline-block;width:1em;height:1em;contain:strict;fill:currentcolor;box-sizing:content-box !important}.icon-wrapper.icon-fa{position:relative;top:2px}.icon-wrapper svg{display:block;height:100%;width:100%}.icon-wrapper[data-name=slack] svg{width:200%;height:200%;transform:translate(-25%, -25%)}.icon-wrapper:not(.icon-color)>svg>*{stroke:currentColor}.navbar-brand .icon-wrapper{color:#21cbce;width:30px;height:30px;vertical-align:middle}.navbar-mobile-twitter{margin-right:18px !important;padding-top:1px}.navbar-mobile-twitter a{color:#1d9bf0}.navbar-mobile-twitter .icon-wrapper{width:23px;height:23px}.header-search{margin-bottom:26px}.header-search form{position:relative}.header-search .icon{color:gray;position:absolute !important;top:50% !important;left:initial !important;padding-right:0 !important;transform:translateY(-50%);right:22px;padding:0;height:20px;width:20px}.header-search .icon .icon-wrapper{width:100%;height:100%;top:0}.nav-link-social-icon{color:#1d9bf0;width:25px;height:25px}.nav-link-social-icon-slack{vertical-align:middle}@media(min-width: 992px){.header-search{margin:0}.header-search .icon{right:10px}.nav-link-social-icon{width:20px;height:20px}.nav-link-social-icon-slack{width:22px;height:22px}} </style><link href="https://production-assets.paperswithcode.com/static/css/13.a0e289cc.chunk.css" rel="stylesheet"><link href="https://production-assets.paperswithcode.com/static/css/main.cd7ec85b.chunk.css" rel="stylesheet"> <!-- Metadata --> <title>Yong Xu | Papers With Code</title> <meta name="description" content="Papers by Yong Xu with links to code and results." /> <!-- Open Graph protocol metadata --> <meta property="og:title" content="Papers with Code - Yong Xu"> <meta property="og:description" content="Papers by Yong Xu with links to code and results."> <meta property="og:image" content="https://paperswithcode.com/static/index.jpeg"> <meta property="og:url" content="https://paperswithcode.com/search?q=author%3AYong+Xu"> <!-- Twitter metadata --> <meta name="twitter:card" content="summary_large_image"> <meta name="twitter:site" content="@paperswithcode"> <meta name="twitter:title" content="Papers with Code - Yong Xu"> <meta name="twitter:description" content="Papers by Yong Xu with links to code and results."> <meta name="twitter:creator" content="@paperswithcode"> <meta name="twitter:url" content="https://paperswithcode.com/search?q=author%3AYong+Xu"> <meta name="twitter:domain" content="paperswithcode.com"> <!-- JSON LD --> <script type="application/ld+json">{ "@context": "http://schema.org", "@graph": { "@type": "WebPage", "name": "Yong Xu", "description": "Papers by Yong Xu with links to code and results.", "url": "https://paperswithcode.com/search?q=author%3AYong+Xu", "image": "https://paperswithcode.com/static/index.jpeg", "headline": "Yong Xu" } }</script> <meta name="theme-color" content="#fff"/> <link rel="manifest" href="https://production-assets.paperswithcode.com/static/manifest.web.json"> </head> <body> <nav class="navbar navbar-expand-lg navbar-light header"> <a class="navbar-brand" href="/"> <span class=" icon-wrapper" data-name="pwc"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path d="M88 128h48v256H88zm144 0h48v256h-48zm-72 16h48v224h-48zm144 0h48v224h-48zm72-16h48v256h-48z"/><path d="M104 104V56H16v400h88v-48H64V104zm304-48v48h40v304h-40v48h88V56z"/></svg></span> </a> <div class="navbar-mobile-twitter d-lg-none"> <a rel="noreferrer" href="https://twitter.com/paperswithcode"> <span class=" icon-wrapper icon-fa icon-fa-brands" data-name="twitter"><svg viewBox="0 0 512.001 515.25" xmlns="http://www.w3.org/2000/svg"><path d="M459.37 152.016c.326 4.548.326 9.097.326 13.645 0 138.72-105.583 298.558-298.559 298.558C101.685 464.22 46.457 447 0 417.114c8.447.973 16.568 1.298 25.34 1.298 49.054 0 94.213-16.568 130.274-44.832-46.132-.975-84.792-31.188-98.113-72.772 6.499.975 12.996 1.624 19.819 1.624 9.42 0 18.843-1.3 27.613-3.573-48.08-9.747-84.142-51.98-84.142-102.984v-1.3c13.968 7.798 30.213 12.67 47.43 13.32-28.263-18.843-46.78-51.006-46.78-87.391 0-19.492 5.196-37.36 14.294-52.954 51.654 63.674 129.3 105.258 216.364 109.807-1.624-7.797-2.599-15.918-2.599-24.04 0-57.827 46.782-104.934 104.934-104.934 30.214 0 57.502 12.67 76.671 33.136 23.715-4.548 46.455-13.319 66.599-25.34-7.798 24.367-24.366 44.834-46.132 57.828 21.117-2.274 41.584-8.122 60.426-16.244-14.292 20.791-32.161 39.309-52.628 54.253z"/></svg></span> </a> </div> <button class="navbar-toggler" type="button" data-toggle="collapse" data-bs-toggle="collapse" data-target="#top-menu" data-bs-target="#top-menu" aria-controls="top-menu" aria-expanded="false" aria-label="Toggle navigation" > <span class="navbar-toggler-icon"></span> </button> <div class="collapse navbar-collapse" id="top-menu"> <ul class="navbar-nav mr-auto navbar-nav__left light-header"> <li class="nav-item header-search"> <form action="/search" method="get" id="id_global_search_form" autocomplete="off"> <input type="text" name="q_meta" style="display:none" id="q_meta" /> <input type="hidden" name="q_type" id="q_type" /> <input id="id_global_search_input" autocomplete="off" value="" name='q' class="global-search" type="search" placeholder='Search'/> <button type="submit" class="icon"><span class=" icon-wrapper icon-fa icon-fa-light" data-name="search"><svg viewBox="0 0 512.025 520.146" xmlns="http://www.w3.org/2000/svg"><path d="M508.5 482.6c4.7 4.7 4.7 12.3 0 17l-9.9 9.9c-4.7 4.7-12.3 4.7-17 0l-129-129c-2.2-2.3-3.5-5.3-3.5-8.5v-10.2C312 396 262.5 417 208 417 93.1 417 0 323.9 0 209S93.1 1 208 1s208 93.1 208 208c0 54.5-21 104-55.3 141.1H371c3.2 0 6.2 1.2 8.5 3.5zM208 385c97.3 0 176-78.7 176-176S305.3 33 208 33 32 111.7 32 209s78.7 176 176 176z"/></svg></span></button> </form> </li> <li class="nav-item"> <a class="nav-link" href="/sota"> Browse State-of-the-Art </a> </li> <li class="nav-item"> <a class="nav-link" href="/datasets"> Datasets </a> </li> <li class="nav-item"> <a class="nav-link" href="/methods">Methods</a> </li> <li class="nav-item dropdown"> <a class="nav-link dropdown-toggle" role="button" id="navbarDropdownRepro" data-toggle="dropdown" data-bs-toggle="dropdown" aria-haspopup="true" aria-expanded="false" > More </a> <div class="dropdown-menu" aria-labelledby="navbarDropdownRepro"> <a class="dropdown-item" href="/newsletter">Newsletter</a> <a class="dropdown-item" href="/rc2022">RC2022</a> <div class="dropdown-divider"></div> <a class="dropdown-item" href="/about">About</a> <a class="dropdown-item" href="/trends">Trends</a> <a class="dropdown-item" href="https://portal.paperswithcode.com/"> Portals </a> <a class="dropdown-item" href="/libraries"> Libraries </a> </div> </li> </ul> <ul class="navbar-nav ml-auto navbar-nav__right navbar-subscribe justify-content-center align-items-center"> <li class="nav-item"> <a class="nav-link" rel="noreferrer" href="https://twitter.com/paperswithcode"> <span class="nav-link-social-icon icon-wrapper icon-fa icon-fa-brands" data-name="twitter"><svg viewBox="0 0 512.001 515.25" xmlns="http://www.w3.org/2000/svg"><path d="M459.37 152.016c.326 4.548.326 9.097.326 13.645 0 138.72-105.583 298.558-298.559 298.558C101.685 464.22 46.457 447 0 417.114c8.447.973 16.568 1.298 25.34 1.298 49.054 0 94.213-16.568 130.274-44.832-46.132-.975-84.792-31.188-98.113-72.772 6.499.975 12.996 1.624 19.819 1.624 9.42 0 18.843-1.3 27.613-3.573-48.08-9.747-84.142-51.98-84.142-102.984v-1.3c13.968 7.798 30.213 12.67 47.43 13.32-28.263-18.843-46.78-51.006-46.78-87.391 0-19.492 5.196-37.36 14.294-52.954 51.654 63.674 129.3 105.258 216.364 109.807-1.624-7.797-2.599-15.918-2.599-24.04 0-57.827 46.782-104.934 104.934-104.934 30.214 0 57.502 12.67 76.671 33.136 23.715-4.548 46.455-13.319 66.599-25.34-7.798 24.367-24.366 44.834-46.132 57.828 21.117-2.274 41.584-8.122 60.426-16.244-14.292 20.791-32.161 39.309-52.628 54.253z"/></svg></span> </a> </li> <li class="nav-item"> <a id="signin-link" class="nav-link" href="/accounts/login?next=/search">Sign In</a> </li> </ul> </div> </nav> <!-- Page modals --> <div class="modal fade" id="emailModal" tabindex="-1" role="dialog" aria-labelledby="emailModalLabel" aria-hidden="true"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <h3 class="modal-title" id="emailModalLabel">Subscribe to the PwC Newsletter</h3> <button type="button" class="close" data-dismiss="modal" data-bs-dismiss="modal" aria-label="Close"> <span aria-hidden="true">×</span> </button> </div> <form action="" method="post"> <div class="modal-body"> <div class="modal-body-info-text"> Stay informed on the latest trending ML papers with code, research developments, libraries, methods, and datasets.<br/><br/> <a href="/newsletter">Read previous issues</a> </div> <input type="hidden" name="csrfmiddlewaretoken" value="YcqYLE4qTapKfU278psgDJUI43t0el6DKuRkqVA4LgnyjRtzpW71yQFokfa7teD0"> <input placeholder="Enter your email" type="email" class="form-control pwc-email" name="address" id="id_address" max_length="100" required> </div> <div class="modal-footer"> <button type="submit" class="btn btn-primary">Subscribe</button> </div> </form> </div> </div> </div> <!-- Login --> <div class="modal fade" id="loginModal" tabindex="-1" role="dialog" aria-labelledby="loginModalLabel" aria-hidden="true"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <h5 class="modal-title" id="loginModalLabel">Join the community</h5> <button type="button" class="close btn-close" data-dismiss="modal" data-bs-dismiss="modal" aria-label="Close"> <span aria-hidden="true">×</span> </button> </div> <div class="login-modal-message"> You need to <a href="/accounts/login?next=/search">log in</a> to edit.<br/> You can <a href="/accounts/register?next=/search">create a new account</a> if you don't have one.<br/><br/> </div> </div> </div> </div> <div class="container content content-buffer "> <div class="author-search-page"> <div class="title home-page-header"> <div class="row"> <div class="col-lg-6"> <h2 class="home-page-title"> Search Results for author: <span class="author-name">Yong Xu</span> </h2> <h3 class="home-page-subtitle">Found <b>166</b> papers, <b>81</b> papers with code</h3> </div> <div class="col-lg-6 index-group"> <div id="authorSortBy" style="float: right;" class="btn-group pull-right search-page-order-by" role="group"> <div class="btn-group dropdown" role="group"> <button id="btnGroupDrop1" type="button" class="btn btn-outline-secondary dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false"> Date Published <span class=" icon-wrapper icon-fa icon-fa-regular" data-name="chevron-down"><svg viewBox="0 0 448 513.795" xmlns="http://www.w3.org/2000/svg"><path d="M441.9 168.28c4.7 4.7 4.7 12.3 0 17l-209.4 209.4c-4.7 4.7-12.3 4.7-17 0L6.1 185.28c-4.7-4.7-4.7-12.3 0-17l19.8-19.8c4.7-4.7 12.3-4.7 17 0L224 329.18l181.1-180.7c4.701-4.7 12.302-4.7 17 0z"/></svg></span> </button> <div class="dropdown-menu" aria-labelledby="btnGroupDrop1"> <a class="dropdown-item" href="?q=author%3AYong+Xu&order_by=date">Date Published</a> <a class="dropdown-item" href="?q=author%3AYong+Xu&order_by=stars">Github Stars</a> </div> </div> <a href="?q=author%3AYong+Xu&order=asc" type="button" class="btn btn-outline-secondary"> <span class=" icon-wrapper icon-fa icon-fa-regular" data-name="arrow-down"><svg viewBox="0 0 448 513.795" xmlns="http://www.w3.org/2000/svg"><path d="M441.9 251.08c4.7 4.7 4.7 12.3 0 17l-209.4 209.4c-4.7 4.7-12.3 4.7-17 0L6.1 268.08c-4.7-4.7-4.7-12.3 0-17l19.8-19.8c4.7-4.7 12.3-4.7 17 0L198 386.38V44.98c0-6.599 5.401-12 12-12h28c6.6 0 12 5.401 12 12v341.4l155.1-155.1c4.701-4.7 12.302-4.7 17 0z"/></svg></span> </a> </div> </div> </div> </div> <div class="infinite-container text-center"> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/revisiting-classification-taxonomy-for"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/revisiting-classification-taxonomy-for">Revisiting Classification Taxonomy for Grammatical Errors</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/revisiting-classification-taxonomy-for#code">no code implementations</a> • <span class="author-name-text item-date-pub">17 Feb 2025</span> • <span class="author-span "> <a href="/author/deqing-zou">Deqing Zou</a></span>, <span class="author-span "> <a href="/author/jingheng-ye">Jingheng Ye</a></span>, <span class="author-span "> <a href="/author/yulu-liu">Yulu Liu</a></span>, <span class="author-span "> <a href="/author/yu-wu">Yu Wu</a></span>, <span class="author-span "> <a href="/author/zishan-xu">Zishan Xu</a></span>, <span class="author-span "> <a href="/author/yinghui-li">Yinghui Li</a></span>, <span class="author-span "> <a href="/author/hai-tao-zheng">Hai-Tao Zheng</a></span>, <span class="author-span "> <a href="/author/bingxu-an">Bingxu An</a></span>, <span class="author-span "> <a href="/author/zhao-wei">Zhao Wei</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">Grammatical error classification plays a crucial role in language learning systems, but existing classification taxonomies often lack rigorous validation, leading to inconsistencies and unreliable feedback.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/revisiting-classification-taxonomy-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/revisiting-classification-taxonomy-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/combining-knowledge-graph-and-llms-for"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/combining-knowledge-graph-and-llms-for">Combining Knowledge Graph and LLMs for Enhanced Zero-shot Visual Question Answering</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/combining-knowledge-graph-and-llms-for#code">no code implementations</a> • <span class="author-name-text item-date-pub">22 Jan 2025</span> • <span class="author-span "> <a href="/author/qian-tao">Qian Tao</a></span>, <span class="author-span "> <a href="/author/xiaoyang-fan">Xiaoyang Fan</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/xingquan-zhu">Xingquan Zhu</a></span>, <span class="author-span "> <a href="/author/yufei-tang">Yufei Tang</a></span> </p> <p class="item-strip-abstract">Meanwhile, the knowledge graph is used to expand and connect users' queries to the image content for better visual question answering.</p> <div class="sota"> </div> <p> <a href="/task/knowledge-graphs"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Knowledge Graphs</span> </span> </a> <a href="/task/question-answering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/56ae901a-265f-415f-b175-ce54133d648b.jpg"> <span>Question Answering</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/combining-knowledge-graph-and-llms-for#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/combining-knowledge-graph-and-llms-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/combining-knowledge-graph-and-llms-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2481188 --> <div class="col-lg-3 item-image-col"> <a href="/paper/heterogeneous-graph-collaborative-filtering-2"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/heterogeneous-graph-collaborative-filtering-2">MixRec: Heterogeneous Graph Collaborative Filtering</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/heterogeneous-graph-collaborative-filtering-2#code">1 code implementation</a> • <span class="author-name-text item-date-pub">18 Dec 2024</span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/meiyan-xie">Meiyan Xie</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span> </p> <p class="item-strip-abstract">For modern recommender systems, the use of low-dimensional latent representations to embed users and items based on their observed interactions has become commonplace.</p> <div class="sota"> </div> <p> <a href="/task/collaborative-filtering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000595-96a2d3eb.jpg"> <span>Collaborative Filtering</span> </span> </a> <a href="/task/contrastive-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/a2dca842-c6b6-4209-b2a8-dffeac2ef283.jpg"> <span>Contrastive Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/heterogeneous-graph-collaborative-filtering-2#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 17</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/heterogeneous-graph-collaborative-filtering-2" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/heterogeneous-graph-collaborative-filtering-2#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/privacy-preserving-federated-foundation-model"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/privacy-preserving-federated-foundation-model">Privacy-Preserving Federated Foundation Model for Generalist Ultrasound Artificial Intelligence</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/privacy-preserving-federated-foundation-model#code">no code implementations</a> • <span class="author-name-text item-date-pub">25 Nov 2024</span> • <span class="author-span "> <a href="/author/yuncheng-jiang">Yuncheng Jiang</a></span>, <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span "> <a href="/author/jinke-ren">Jinke Ren</a></span>, <span class="author-span "> <a href="/author/jun-wei">Jun Wei</a></span>, <span class="author-span "> <a href="/author/zixun-zhang">Zixun Zhang</a></span>, <span class="author-span "> <a href="/author/yiwen-hu">Yiwen Hu</a></span>, <span class="author-span "> <a href="/author/yunbi-liu">Yunbi Liu</a></span>, <span class="author-span "> <a href="/author/rui-sun">Rui Sun</a></span>, <span class="author-span "> <a href="/author/xuemei-tang">Xuemei Tang</a></span>, <span class="author-span "> <a href="/author/juan-du">Juan Du</a></span>, <span class="author-span "> <a href="/author/xiang-wan">Xiang Wan</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/bo-du">Bo Du</a></span>, <span class="author-span "> <a href="/author/xin-gao">Xin Gao</a></span>, <span class="author-span "> <a href="/author/guangyu-wang">Guangyu Wang</a></span>, <span class="author-span "> <a href="/author/shaohua-zhou">Shaohua Zhou</a></span>, <span class="author-span "> <a href="/author/shuguang-cui">Shuguang Cui</a></span>, <span class="author-span "> <a href="/author/rick-siow-mong-goh">Rick Siow Mong Goh</a></span>, <span class="author-span "> <a href="/author/yong-liu">Yong liu</a></span>, <span class="author-span "> <a href="/author/zhen-li">Zhen Li</a></span> </p> <p class="item-strip-abstract">Notably, UltraFedFM surpasses the diagnostic accuracy of mid-level ultrasonographers and matches the performance of expert-level sonographers in the joint diagnosis of 8 common systemic diseases.</p> <div class="sota"> </div> <p> <a href="/task/federated-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Federated Learning</span> </span> </a> <a href="/task/lesion-segmentation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001169-fda6fc4e.jpg"> <span>Lesion Segmentation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/privacy-preserving-federated-foundation-model#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/privacy-preserving-federated-foundation-model" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/privacy-preserving-federated-foundation-model#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/multi-path-exploration-and-feedback"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multi-path-exploration-and-feedback">Multi-path Exploration and Feedback Adjustment for Text-to-Image Person Retrieval</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multi-path-exploration-and-feedback#code">no code implementations</a> • <span class="author-name-text item-date-pub">26 Oct 2024</span> • <span class="author-span "> <a href="/author/bin-kang">Bin Kang</a></span>, <span class="author-span "> <a href="/author/bin-chen">Bin Chen</a></span>, <span class="author-span "> <a href="/author/junjie-wang">Junjie Wang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">Text-based person retrieval aims to identify the specific persons using textual descriptions as queries.</p> <div class="sota"> </div> <p> <a href="/task/cross-modal-alignment"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>cross-modal alignment</span> </span> </a> <a href="/task/person-retrieval"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Person Retrieval</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/multi-path-exploration-and-feedback#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multi-path-exploration-and-feedback" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multi-path-exploration-and-feedback#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/mambasci-efficient-mamba-unet-for-quad-bayer"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/mambasci-efficient-mamba-unet-for-quad-bayer">MambaSCI: Efficient Mamba-UNet for Quad-Bayer Patterned Video Snapshot Compressive Imaging</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/mambasci-efficient-mamba-unet-for-quad-bayer#code">no code implementations</a> • <span class="author-name-text item-date-pub">18 Oct 2024</span> • <span class="author-span "> <a href="/author/zhenghao-pan">Zhenghao Pan</a></span>, <span class="author-span "> <a href="/author/haijin-zeng">Haijin Zeng</a></span>, <span class="author-span "> <a href="/author/jiezhang-cao">JieZhang Cao</a></span>, <span class="author-span "> <a href="/author/yongyong-chen">Yongyong Chen</a></span>, <span class="author-span "> <a href="/author/kai-zhang">Kai Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">To address this challenge, we propose the MambaSCI method, which leverages the Mamba and UNet architectures for efficient reconstruction of quad-Bayer patterned color video SCI.</p> <div class="sota"> </div> <p> <a href="/task/demosaicking"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Demosaicking</span> </span> </a> <a href="/task/mamba"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Mamba</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/mambasci-efficient-mamba-unet-for-quad-bayer" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/mambasci-efficient-mamba-unet-for-quad-bayer#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/restorative-speech-enhancement-a-progressive"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/restorative-speech-enhancement-a-progressive">Restorative Speech Enhancement: A Progressive Approach Using SE and Codec Modules</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/restorative-speech-enhancement-a-progressive#code">no code implementations</a> • <span class="author-name-text item-date-pub">2 Oct 2024</span> • <span class="author-span "> <a href="/author/hsin-tien-chiang">Hsin-Tien Chiang</a></span>, <span class="author-span "> <a href="/author/hao-zhang">Hao Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">In challenging environments with significant noise and reverberation, traditional speech enhancement (SE) methods often lead to over-suppressed speech, creating artifacts during listening and harming downstream tasks performance.</p> <div class="sota"> </div> <p> <a href="/task/quantization"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Quantization</span> </span> </a> <a href="/task/speech-enhancement"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/bf2407da-ded8-4af2-9537-59a43b7d3ca3.jpg"> <span>Speech Enhancement</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/restorative-speech-enhancement-a-progressive" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/restorative-speech-enhancement-a-progressive#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/ezaudio-enhancing-text-to-audio-generation"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/ezaudio-enhancing-text-to-audio-generation">EzAudio: Enhancing Text-to-Audio Generation with Efficient Diffusion Transformer</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/ezaudio-enhancing-text-to-audio-generation#code">no code implementations</a> • <span class="author-name-text item-date-pub">17 Sep 2024</span> • <span class="author-span "> <a href="/author/jiarui-hai">Jiarui Hai</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/hao-zhang">Hao Zhang</a></span>, <span class="author-span "> <a href="/author/chenxing-li">Chenxing Li</a></span>, <span class="author-span "> <a href="/author/helin-wang">Helin Wang</a></span>, <span class="author-span "> <a href="/author/mounya-elhilali">Mounya Elhilali</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">Latent diffusion models have shown promising results in text-to-audio (T2A) generation tasks, yet previous models have encountered difficulties in generation quality, computational cost, diffusion sampling, and data preparation.</p> <div class="sota"> </div> <p> <a href="/task/audio-generation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001058-46c65c04.jpg"> <span>Audio Generation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/ezaudio-enhancing-text-to-audio-generation" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/ezaudio-enhancing-text-to-audio-generation#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/libriheavymix-a-20000-hour-dataset-for-single"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/libriheavymix-a-20000-hour-dataset-for-single">LibriheavyMix: A 20,000-Hour Dataset for Single-Channel Reverberant Multi-Talker Speech Separation, ASR and Speaker Diarization</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/libriheavymix-a-20000-hour-dataset-for-single#code">no code implementations</a> • <span class="author-name-text item-date-pub">1 Sep 2024</span> • <span class="author-span "> <a href="/author/zengrui-jin">Zengrui Jin</a></span>, <span class="author-span "> <a href="/author/yifan-yang">Yifan Yang</a></span>, <span class="author-span "> <a href="/author/mohan-shi">Mohan Shi</a></span>, <span class="author-span "> <a href="/author/wei-kang">Wei Kang</a></span>, <span class="author-span "> <a href="/author/xiaoyu-yang">Xiaoyu Yang</a></span>, <span class="author-span "> <a href="/author/zengwei-yao">Zengwei Yao</a></span>, <span class="author-span "> <a href="/author/fangjun-kuang">Fangjun Kuang</a></span>, <span class="author-span "> <a href="/author/liyong-guo">Liyong Guo</a></span>, <span class="author-span "> <a href="/author/lingwei-meng">Lingwei Meng</a></span>, <span class="author-span "> <a href="/author/long-lin">Long Lin</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/daniel-povey">Daniel Povey</a></span> </p> <p class="item-strip-abstract">This paper presents a large-scale far-field overlapping speech dataset, crafted to advance research in speech separation, recognition, and speaker diarization.</p> <div class="sota"> </div> <p> <a href="/task/speaker-diarization-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>speaker-diarization</span> </span> </a> <a href="/task/speaker-diarization"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Speaker Diarization</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/libriheavymix-a-20000-hour-dataset-for-single#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/libriheavymix-a-20000-hour-dataset-for-single" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/libriheavymix-a-20000-hour-dataset-for-single#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/advancing-multi-talker-asr-performance-with"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/advancing-multi-talker-asr-performance-with">Advancing Multi-talker ASR Performance with Large Language Models</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/advancing-multi-talker-asr-performance-with#code">no code implementations</a> • <span class="author-name-text item-date-pub">30 Aug 2024</span> • <span class="author-span "> <a href="/author/mohan-shi">Mohan Shi</a></span>, <span class="author-span "> <a href="/author/zengrui-jin">Zengrui Jin</a></span>, <span class="author-span "> <a href="/author/yaoxun-xu">Yaoxun Xu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/kun-wei">Kun Wei</a></span>, <span class="author-span "> <a href="/author/yiwen-shao">Yiwen Shao</a></span>, <span class="author-span "> <a href="/author/chunlei-zhang">Chunlei Zhang</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">Recognizing overlapping speech from multiple speakers in conversational scenarios is one of the most challenging problem for automatic speech recognition (ASR).</p> <div class="sota"> </div> <p> <a href="/task/automatic-speech-recognition-2"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Automatic Speech Recognition</span> </span> </a> <a href="/task/automatic-speech-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Automatic Speech Recognition (ASR)</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/advancing-multi-talker-asr-performance-with#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/advancing-multi-talker-asr-performance-with" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/advancing-multi-talker-asr-performance-with#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/deep-code-search-with-naming-agnostic"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/deep-code-search-with-naming-agnostic">Deep Code Search with Naming-Agnostic Contrastive Multi-View Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/deep-code-search-with-naming-agnostic#code">no code implementations</a> • <span class="author-name-text item-date-pub">18 Aug 2024</span> • <span class="author-span "> <a href="/author/jiadong-feng">Jiadong Feng</a></span>, <span class="author-span "> <a href="/author/wei-li">Wei Li</a></span>, <span class="author-span "> <a href="/author/zhao-wei">Zhao Wei</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/juhong-wang">Juhong Wang</a></span>, <span class="author-span "> <a href="/author/hui-li">Hui Li</a></span> </p> <p class="item-strip-abstract">However, developers may not follow the same naming conventions and the same variable may have different variable names in different implementations, bringing a challenge to deep learning based code search methods that rely on explicit variable correspondences to understand source code.</p> <div class="sota"> </div> <p> <a href="/task/code-search"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Code Search</span> </span> </a> <a href="/task/contrastive-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/a2dca842-c6b6-4209-b2a8-dffeac2ef283.jpg"> <span>Contrastive Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/deep-code-search-with-naming-agnostic#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/deep-code-search-with-naming-agnostic" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/deep-code-search-with-naming-agnostic#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2408273 --> <div class="col-lg-3 item-image-col"> <a href="/paper/opencity-open-spatio-temporal-foundation"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/933f9bde-8816-4c9b-b1a9-ec103f449028.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/opencity-open-spatio-temporal-foundation">OpenCity: Open Spatio-Temporal Foundation Models for Traffic Prediction</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/opencity-open-spatio-temporal-foundation#code">1 code implementation</a> • <span class="author-name-text item-date-pub">16 Aug 2024</span> • <span class="author-span "> <a href="/author/zhonghang-li">Zhonghang Li</a></span>, <span class="author-span "> <a href="/author/long-xia">Long Xia</a></span>, <span class="author-span "> <a href="/author/lei-shi">Lei Shi</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/dawei-yin">Dawei Yin</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span> </p> <p class="item-strip-abstract">Accurate traffic forecasting is crucial for effective urban planning and transportation management, enabling efficient resource allocation and enhanced travel experiences.</p> <div class="sota"> </div> <p> <a href="/task/prediction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Prediction</span> </span> </a> <a href="/task/traffic-prediction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000582-a51d2d0f.jpg"> <span>Traffic Prediction</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/opencity-open-spatio-temporal-foundation#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 94</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/opencity-open-spatio-temporal-foundation" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/opencity-open-spatio-temporal-foundation#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2381217 --> <div class="col-lg-3 item-image-col"> <a href="/paper/self-supervised-3d-point-cloud-completion-via"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/self-supervised-3d-point-cloud-completion-via">Self-supervised 3D Point Cloud Completion via Multi-view Adversarial Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/self-supervised-3d-point-cloud-completion-via#code">1 code implementation</a> • <span class="author-name-text item-date-pub">13 Jul 2024</span> • <span class="author-span "> <a href="/author/lintai-wu">Lintai Wu</a></span>, <span class="author-span "> <a href="/author/xianjing-cheng">Xianjing Cheng</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/huanqiang-zeng">Huanqiang Zeng</a></span>, <span class="author-span "> <a href="/author/junhui-hou">Junhui Hou</a></span> </p> <p class="item-strip-abstract">Additionally, we render the reconstructed complete shape into multi-view depth maps and design an adversarial learning module to learn the geometry of the target shape from category-specific single-view depth images.</p> <div class="sota"> </div> <p> <a href="/task/point-cloud-completion"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Point Cloud Completion</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 3</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/self-supervised-3d-point-cloud-completion-via" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/self-supervised-3d-point-cloud-completion-via#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/text-queried-target-sound-event-localization"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/text-queried-target-sound-event-localization">Text-Queried Target Sound Event Localization</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/text-queried-target-sound-event-localization#code">no code implementations</a> • <span class="author-name-text item-date-pub">23 Jun 2024</span> • <span class="author-span "> <a href="/author/jinzheng-zhao">Jinzheng Zhao</a></span>, <span class="author-span "> <a href="/author/xinyuan-qian">Xinyuan Qian</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/haohe-liu">Haohe Liu</a></span>, <span class="author-span "> <a href="/author/yin-cao">Yin Cao</a></span>, <span class="author-span "> <a href="/author/davide-berghi">Davide Berghi</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span> </p> <p class="item-strip-abstract">Sound event localization and detection (SELD) aims to determine the appearance of sound classes, together with their Direction of Arrival (DOA).</p> <div class="sota"> </div> <p> <a href="/task/room-impulse-response"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Room Impulse Response (RIR)</span> </span> </a> <a href="/task/sound-event-localization-and-detection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Sound Event Localization and Detection</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/text-queried-target-sound-event-localization#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/text-queried-target-sound-event-localization" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/text-queried-target-sound-event-localization#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/multi-channel-multi-speaker-asr-using-target"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multi-channel-multi-speaker-asr-using-target">Multi-Channel Multi-Speaker ASR Using Target Speaker's Solo Segment</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multi-channel-multi-speaker-asr-using-target#code">no code implementations</a> • <span class="author-name-text item-date-pub">13 Jun 2024</span> • <span class="author-span "> <a href="/author/yiwen-shao">Yiwen Shao</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span>, <span class="author-span "> <a href="/author/daniel-povey">Daniel Povey</a></span>, <span class="author-span "> <a href="/author/sanjeev-khudanpur">Sanjeev Khudanpur</a></span> </p> <p class="item-strip-abstract">In the field of multi-channel, multi-speaker Automatic Speech Recognition (ASR), the task of discerning and accurately transcribing a target speaker's speech within background noise remains a formidable challenge.</p> <div class="sota"> </div> <p> <a href="/task/automatic-speech-recognition-2"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Automatic Speech Recognition</span> </span> </a> <a href="/task/automatic-speech-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Automatic Speech Recognition (ASR)</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/multi-channel-multi-speaker-asr-using-target#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multi-channel-multi-speaker-asr-using-target" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multi-channel-multi-speaker-asr-using-target#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2346392 --> <div class="col-lg-3 item-image-col"> <a href="/paper/flashst-a-simple-and-universal-prompt-tuning"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/dc3f0bf1-4bd0-434c-8e96-97eebaa078cb.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/flashst-a-simple-and-universal-prompt-tuning">FlashST: A Simple and Universal Prompt-Tuning Framework for Traffic Prediction</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/flashst-a-simple-and-universal-prompt-tuning#code">1 code implementation</a> • <span class="author-name-text item-date-pub">28 May 2024</span> • <span class="author-span "> <a href="/author/zhonghang-li">Zhonghang Li</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span> </p> <p class="item-strip-abstract">Additionally, we incorporate a distribution mapping mechanism to align the data distributions of pre-training and downstream data, facilitating effective knowledge transfer in spatio-temporal forecasting.</p> <div class="sota"> </div> <p> <a href="/task/in-context-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>In-Context Learning</span> </span> </a> <a href="/task/prediction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Prediction</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/flashst-a-simple-and-universal-prompt-tuning#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 65</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/flashst-a-simple-and-universal-prompt-tuning" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/flashst-a-simple-and-universal-prompt-tuning#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2346390 --> <div class="col-lg-3 item-image-col"> <a href="/paper/ov-dquo-open-vocabulary-detr-with-denoising"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/69c17193-d046-4ff1-a435-fa13b5f0de5a.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/ov-dquo-open-vocabulary-detr-with-denoising">OV-DQUO: Open-Vocabulary DETR with Denoising Text Query Training and Open-World Unknown Objects Supervision</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/ov-dquo-open-vocabulary-detr-with-denoising#code">1 code implementation</a> • <span class="author-name-text item-date-pub">28 May 2024</span> • <span class="author-span "> <a href="/author/junjie-wang">Junjie Wang</a></span>, <span class="author-span "> <a href="/author/bin-chen">Bin Chen</a></span>, <span class="author-span "> <a href="/author/bin-kang">Bin Kang</a></span>, <span class="author-span "> <a href="/author/yulin-li">Yulin Li</a></span>, <span class="author-span "> <a href="/author/yichi-chen">YiChi Chen</a></span>, <span class="author-span "> <a href="/author/weizhi-xian">Weizhi Xian</a></span>, <span class="author-span "> <a href="/author/huifeng-chang">Huifeng Chang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">However, existing open-vocabulary detectors trained on base category data tend to assign higher confidence to trained categories and confuse novel categories with the background.</p> <div class="sota"> <p> <a href="/sota/open-vocabulary-object-detection-on-lvis-v1-0"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/open-vocabulary-object-detection-on-lvis-v1-0-small_9b650839.png"/> </a> Ranked #3 on <a href="/sota/open-vocabulary-object-detection-on-lvis-v1-0"> Open Vocabulary Object Detection on LVIS v1.0 </a> </p> </div> <p> <a href="/task/contrastive-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/a2dca842-c6b6-4209-b2a8-dffeac2ef283.jpg"> <span>Contrastive Learning</span> </span> </a> <a href="/task/denoising"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/6c4d53f8-9c6d-47c8-80c7-1b8e1c0a7d42.jpg"> <span>Denoising</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/ov-dquo-open-vocabulary-detr-with-denoising#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 14</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/ov-dquo-open-vocabulary-detr-with-denoising" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/ov-dquo-open-vocabulary-detr-with-denoising#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/fpdiou-loss-a-loss-function-for-efficient"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2405.09942.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/fpdiou-loss-a-loss-function-for-efficient">FPDIoU Loss: A Loss Function for Efficient Bounding Box Regression of Rotated Object Detection</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/fpdiou-loss-a-loss-function-for-efficient#code">no code implementations</a> • <span class="author-name-text item-date-pub">16 May 2024</span> • <span class="author-span "> <a href="/author/siliang-ma">Siliang Ma</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">In order to improve the efficiency and accuracy of bounding box regression for rotated object detection, we proposed a novel metric for arbitrary shapes comparison based on minimum points distance, which takes most of the factors from existing loss functions for rotated object detection into account, i. e., the overlap or nonoverlapping area, the central points distance and the rotation angle.</p> <div class="sota"> </div> <p> <a href="/task/object"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Object</span> </span> </a> <a href="/task/object-detection-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>object-detection</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/fpdiou-loss-a-loss-function-for-efficient#tasks"> <span class="badge badge-primary"> <b>+4</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/fpdiou-loss-a-loss-function-for-efficient" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/fpdiou-loss-a-loss-function-for-efficient#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/the-robodrive-challenge-drive-anytime"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2405.08816.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/the-robodrive-challenge-drive-anytime">The RoboDrive Challenge: Drive Anytime Anywhere in Any Condition</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/the-robodrive-challenge-drive-anytime#code">no code implementations</a> • <span class="author-name-text item-date-pub">14 May 2024</span> • <span class="author-span "> <a href="/author/lingdong-kong">Lingdong Kong</a></span>, <span class="author-span "> <a href="/author/shaoyuan-xie">Shaoyuan Xie</a></span>, <span class="author-span "> <a href="/author/hanjiang-hu">Hanjiang Hu</a></span>, <span class="author-span "> <a href="/author/yaru-niu">Yaru Niu</a></span>, <span class="author-span "> <a href="/author/wei-tsang-ooi">Wei Tsang Ooi</a></span>, <span class="author-span "> <a href="/author/benoit-r-cottereau">Benoit R. Cottereau</a></span>, <span class="author-span "> <a href="/author/lai-xing-ng">Lai Xing Ng</a></span>, <span class="author-span "> <a href="/author/yuexin-ma">Yuexin Ma</a></span>, <span class="author-span "> <a href="/author/wenwei-zhang">Wenwei Zhang</a></span>, <span class="author-span "> <a href="/author/liang-pan">Liang Pan</a></span>, <span class="author-span "> <a href="/author/kai-chen">Kai Chen</a></span>, <span class="author-span "> <a href="/author/ziwei-liu">Ziwei Liu</a></span>, <span class="author-span "> <a href="/author/weichao-qiu">Weichao Qiu</a></span>, <span class="author-span "> <a href="/author/wei-zhang">Wei zhang</a></span>, <span class="author-span "> <a href="/author/xu-cao">Xu Cao</a></span>, <span class="author-span "> <a href="/author/hao-lu">Hao Lu</a></span>, <span class="author-span "> <a href="/author/ying-cong-chen">Ying-Cong Chen</a></span>, <span class="author-span "> <a href="/author/caixin-kang">Caixin Kang</a></span>, <span class="author-span "> <a href="/author/xinning-zhou">Xinning Zhou</a></span>, <span class="author-span "> <a href="/author/chengyang-ying">Chengyang Ying</a></span>, <span class="author-span "> <a href="/author/wentao-shang">Wentao Shang</a></span>, <span class="author-span "> <a href="/author/xingxing-wei">Xingxing Wei</a></span>, <span class="author-span "> <a href="/author/yinpeng-dong">Yinpeng Dong</a></span>, <span class="author-span "> <a href="/author/bo-yang">Bo Yang</a></span>, <span class="author-span "> <a href="/author/shengyin-jiang">Shengyin Jiang</a></span>, <span class="author-span "> <a href="/author/zeliang-ma">Zeliang Ma</a></span>, <span class="author-span "> <a href="/author/dengyi-ji">Dengyi Ji</a></span>, <span class="author-span "> <a href="/author/haiwen-li">Haiwen Li</a></span>, <span class="author-span "> <a href="/author/xingliang-huang">Xingliang Huang</a></span>, <span class="author-span "> <a href="/author/yu-tian">Yu Tian</a></span>, <span class="author-span "> <a href="/author/genghua-kou">Genghua Kou</a></span>, <span class="author-span "> <a href="/author/fan-jia">Fan Jia</a></span>, <span class="author-span "> <a href="/author/yingfei-liu">Yingfei Liu</a></span>, <span class="author-span "> <a href="/author/tiancai-wang">Tiancai Wang</a></span>, <span class="author-span "> <a href="/author/ying-li">Ying Li</a></span>, <span class="author-span "> <a href="/author/xiaoshuai-hao">Xiaoshuai Hao</a></span>, <span class="author-span "> <a href="/author/yifan-yang">Yifan Yang</a></span>, <span class="author-span "> <a href="/author/hui-zhang">HUI ZHANG</a></span>, <span class="author-span "> <a href="/author/mengchuan-wei">Mengchuan Wei</a></span>, <span class="author-span "> <a href="/author/yi-zhou">Yi Zhou</a></span>, <span class="author-span "> <a href="/author/haimei-zhao">Haimei Zhao</a></span>, <span class="author-span "> <a href="/author/jing-zhang">Jing Zhang</a></span>, <span class="author-span "> <a href="/author/jinke-li">Jinke Li</a></span>, <span class="author-span "> <a href="/author/xiao-he">Xiao He</a></span>, <span class="author-span "> <a href="/author/xiaoqiang-cheng">Xiaoqiang Cheng</a></span>, <span class="author-span "> <a href="/author/bingyang-zhang">Bingyang Zhang</a></span>, <span class="author-span "> <a href="/author/lirong-zhao">Lirong Zhao</a></span>, <span class="author-span "> <a href="/author/dianlei-ding">Dianlei Ding</a></span>, <span class="author-span "> <a href="/author/fangsheng-liu">Fangsheng Liu</a></span>, <span class="author-span "> <a href="/author/yixiang-yan">Yixiang Yan</a></span>, <span class="author-span "> <a href="/author/hongming-wang">Hongming Wang</a></span>, <span class="author-span "> <a href="/author/nanfei-ye">Nanfei Ye</a></span>, <span class="author-span "> <a href="/author/lun-luo">Lun Luo</a></span>, <span class="author-span "> <a href="/author/yubo-tian">Yubo Tian</a></span>, <span class="author-span "> <a href="/author/yiwei-zuo">Yiwei Zuo</a></span>, <span class="author-span "> <a href="/author/zhe-cao">Zhe Cao</a></span>, <span class="author-span "> <a href="/author/yi-ren">Yi Ren</a></span>, <span class="author-span "> <a href="/author/yunfan-li">Yunfan Li</a></span>, <span class="author-span "> <a href="/author/wenjie-liu">Wenjie Liu</a></span>, <span class="author-span "> <a href="/author/xun-wu">Xun Wu</a></span>, <span class="author-span "> <a href="/author/yifan-mao">Yifan Mao</a></span>, <span class="author-span "> <a href="/author/ming-li">Ming Li</a></span>, <span class="author-span "> <a href="/author/jian-liu">Jian Liu</a></span>, <span class="author-span "> <a href="/author/jiayang-liu">Jiayang Liu</a></span>, <span class="author-span "> <a href="/author/zihan-qin">Zihan Qin</a></span>, <span class="author-span "> <a href="/author/cunxi-chu">Cunxi Chu</a></span>, <span class="author-span "> <a href="/author/jialei-xu">Jialei Xu</a></span>, <span class="author-span "> <a href="/author/wenbo-zhao">Wenbo Zhao</a></span>, <span class="author-span "> <a href="/author/junjun-jiang">Junjun Jiang</a></span>, <span class="author-span "> <a href="/author/xianming-liu">Xianming Liu</a></span>, <span class="author-span "> <a href="/author/ziyan-wang">Ziyan Wang</a></span>, <span class="author-span "> <a href="/author/chiwei-li">Chiwei Li</a></span>, <span class="author-span "> <a href="/author/shilong-li">Shilong Li</a></span>, <span class="author-span "> <a href="/author/chendong-yuan">Chendong Yuan</a></span>, <span class="author-span "> <a href="/author/songyue-yang">Songyue Yang</a></span>, <span class="author-span "> <a href="/author/wentao-liu">Wentao Liu</a></span>, <span class="author-span "> <a href="/author/peng-chen">Peng Chen</a></span>, <span class="author-span "> <a href="/author/bin-zhou">Bin Zhou</a></span>, <span class="author-span "> <a href="/author/yubo-wang">YuBo Wang</a></span>, <span class="author-span "> <a href="/author/chi-zhang">Chi Zhang</a></span>, <span class="author-span "> <a href="/author/jianhang-sun">Jianhang Sun</a></span>, <span class="author-span "> <a href="/author/hai-chen">Hai Chen</a></span>, <span class="author-span "> <a href="/author/xiao-yang">Xiao Yang</a></span>, <span class="author-span "> <a href="/author/lizhong-wang">Lizhong Wang</a></span>, <span class="author-span "> <a href="/author/dongyi-fu">Dongyi Fu</a></span>, <span class="author-span "> <a href="/author/yongchun-lin">Yongchun Lin</a></span>, <span class="author-span "> <a href="/author/huitong-yang">Huitong Yang</a></span>, <span class="author-span "> <a href="/author/haoang-li">Haoang Li</a></span>, <span class="author-span "> <a href="/author/yadan-luo">Yadan Luo</a></span>, <span class="author-span "> <a href="/author/xianjing-cheng">Xianjing Cheng</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">In the realm of autonomous driving, robust perception under out-of-distribution conditions is paramount for the safe deployment of vehicles.</p> <div class="sota"> </div> <p> <a href="/task/autonomous-driving"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000363-06d10c79.jpg"> <span>Autonomous Driving</span> </span> </a> <a href="/task/data-augmentation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001560-029cbc00.jpg"> <span>Data Augmentation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/the-robodrive-challenge-drive-anytime#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/the-robodrive-challenge-drive-anytime" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/the-robodrive-challenge-drive-anytime#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/masked-two-channel-decoupling-framework-for-1"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2404.17340.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/masked-two-channel-decoupling-framework-for-1">Masked Two-channel Decoupling Framework for Incomplete Multi-view Weak Multi-label Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/masked-two-channel-decoupling-framework-for-1#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/neurips-2023-11"> NeurIPS 2023 </a> </span> • <span class="author-span "> <a href="/author/chengliang-liu">Chengliang Liu</a></span>, <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/yabo-liu">Yabo Liu</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span "> <a href="/author/zhihao-wu">Zhihao Wu</a></span>, <span class="author-span "> <a href="/author/xiaoling-luo">Xiaoling Luo</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">Multi-view learning has become a popular research topic in recent years, but research on the cross-application of classic multi-label classification and multi-view learning is still in its early stages.</p> <div class="sota"> </div> <p> <a href="/task/multi-label-classification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Multi-Label Classification</span> </span> </a> <a href="/task/multi-label-classification-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>MUlTI-LABEL-ClASSIFICATION</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/masked-two-channel-decoupling-framework-for-1#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/masked-two-channel-decoupling-framework-for-1" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/masked-two-channel-decoupling-framework-for-1#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/cdimc-net-cognitive-deep-incomplete-multi"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2403.19514.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/cdimc-net-cognitive-deep-incomplete-multi">CDIMC-net: Cognitive Deep Incomplete Multi-view Clustering Network</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/cdimc-net-cognitive-deep-incomplete-multi#code">no code implementations</a> • <span class="author-name-text item-date-pub">28 Mar 2024</span> • <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/zheng-zhang">Zheng Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/bob-zhang">Bob Zhang</a></span>, <span class="author-span "> <a href="/author/lunke-fei">Lunke Fei</a></span>, <span class="author-span "> <a href="/author/guo-sen-xie">Guo-Sen Xie</a></span> </p> <p class="item-strip-abstract">In this paper, we propose a novel incomplete multi-view clustering network, called Cognitive Deep Incomplete Multi-view Clustering Network (CDIMC-net), to address these issues.</p> <div class="sota"> </div> <p> <a href="/task/clustering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001594-3ce5d6d8.jpg"> <span>Clustering</span> </span> </a> <a href="/task/graph-embedding"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/4867272e-8232-4786-b735-41b4dccb5adf.jpg"> <span>Graph Embedding</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/cdimc-net-cognitive-deep-incomplete-multi#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/cdimc-net-cognitive-deep-incomplete-multi" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/cdimc-net-cognitive-deep-incomplete-multi#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2303897 --> <div class="col-lg-3 item-image-col"> <a href="/paper/alphafin-benchmarking-financial-analysis-with"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2403.12582.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/alphafin-benchmarking-financial-analysis-with">AlphaFin: Benchmarking Financial Analysis with Retrieval-Augmented Stock-Chain Framework</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/alphafin-benchmarking-financial-analysis-with#code">1 code implementation</a> • <span class="author-name-text item-date-pub">19 Mar 2024</span> • <span class="author-span "> <a href="/author/xiang-li">Xiang Li</a></span>, <span class="author-span "> <a href="/author/zhenyu-li">Zhenyu Li</a></span>, <span class="author-span "> <a href="/author/chen-shi">Chen Shi</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/qing-du">Qing Du</a></span>, <span class="author-span "> <a href="/author/mingkui-tan">Mingkui Tan</a></span>, <span class="author-span "> <a href="/author/jun-huang">Jun Huang</a></span>, <span class="author-span "> <a href="/author/wei-lin-1">Wei Lin</a></span> </p> <p class="item-strip-abstract">The task of financial analysis primarily encompasses two key areas: stock trend prediction and the corresponding financial question answering.</p> <div class="sota"> </div> <p> <a href="/task/benchmarking"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Benchmarking</span> </span> </a> <a href="/task/financial-analysis"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Financial Analysis</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/alphafin-benchmarking-financial-analysis-with#tasks"> <span class="badge badge-primary"> <b>+4</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 161</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/alphafin-benchmarking-financial-analysis-with" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/alphafin-benchmarking-financial-analysis-with#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2302734 --> <div class="col-lg-3 item-image-col"> <a href="/paper/queryagent-a-reliable-and-efficient-reasoning"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/83171c88-21fe-4101-98e4-67d65f19677a.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/queryagent-a-reliable-and-efficient-reasoning">QueryAgent: A Reliable and Efficient Reasoning Framework with Environmental Feedback-based Self-Correction</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/queryagent-a-reliable-and-efficient-reasoning#code">1 code implementation</a> • <span class="author-name-text item-date-pub">18 Mar 2024</span> • <span class="author-span "> <a href="/author/xiang-huang">Xiang Huang</a></span>, <span class="author-span "> <a href="/author/sitao-cheng">Sitao Cheng</a></span>, <span class="author-span "> <a href="/author/shanshan-huang">Shanshan Huang</a></span>, <span class="author-span "> <a href="/author/jiayu-shen">Jiayu Shen</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/chaoyun-zhang">Chaoyun Zhang</a></span>, <span class="author-span "> <a href="/author/yuzhong-qu">Yuzhong Qu</a></span> </p> <p class="item-strip-abstract">Employing Large Language Models (LLMs) for semantic parsing has achieved remarkable success.</p> <div class="sota"> </div> <p> <a href="/task/semantic-parsing"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Semantic Parsing</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 21</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/queryagent-a-reliable-and-efficient-reasoning" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/queryagent-a-reliable-and-efficient-reasoning#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/call-me-when-necessary-llms-can-efficiently"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2403.08593.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/call-me-when-necessary-llms-can-efficiently">Call Me When Necessary: LLMs can Efficiently and Faithfully Reason over Structured Environments</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/call-me-when-necessary-llms-can-efficiently#code">no code implementations</a> • <span class="author-name-text item-date-pub">13 Mar 2024</span> • <span class="author-span "> <a href="/author/sitao-cheng">Sitao Cheng</a></span>, <span class="author-span "> <a href="/author/ziyuan-zhuang">Ziyuan Zhuang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/fangkai-yang">Fangkai Yang</a></span>, <span class="author-span "> <a href="/author/chaoyun-zhang">Chaoyun Zhang</a></span>, <span class="author-span "> <a href="/author/xiaoting-qin">Xiaoting Qin</a></span>, <span class="author-span "> <a href="/author/xiang-huang">Xiang Huang</a></span>, <span class="author-span "> <a href="/author/ling-chen">Ling Chen</a></span>, <span class="author-span "> <a href="/author/qingwei-lin">QIngwei Lin</a></span>, <span class="author-span "> <a href="/author/dongmei-zhang">Dongmei Zhang</a></span>, <span class="author-span "> <a href="/author/saravan-rajmohan">Saravan Rajmohan</a></span>, <span class="author-span "> <a href="/author/qi-zhang">Qi Zhang</a></span> </p> <p class="item-strip-abstract">We propose Reasoning-Path-Editing (Readi), a novel framework where LLMs can efficiently and faithfully reason over structured environments.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/call-me-when-necessary-llms-can-efficiently" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/call-me-when-necessary-llms-can-efficiently#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2391650 --> <div class="col-lg-3 item-image-col"> <a href="/paper/tracking-meets-lora-faster-training-larger"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2403.05231.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/tracking-meets-lora-faster-training-larger">Tracking Meets LoRA: Faster Training, Larger Model, Stronger Performance</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/tracking-meets-lora-faster-training-larger#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Mar 2024</span> • <span class="author-span "> <a href="/author/liting-lin">Liting Lin</a></span>, <span class="author-span "> <a href="/author/heng-fan">Heng Fan</a></span>, <span class="author-span "> <a href="/author/zhipeng-zhang">Zhipeng Zhang</a></span>, <span class="author-span "> <a href="/author/yaowei-wang">YaoWei Wang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/haibin-ling">Haibin Ling</a></span> </p> <p class="item-strip-abstract">The essence of our work lies in adapting LoRA, a technique that fine-tunes a small subset of model parameters without adding inference latency, to the domain of visual tracking.</p> <div class="sota"> <p> <a href="/sota/visual-object-tracking-on-uav123"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/visual-object-tracking-on-uav123-small_62ce3fed.png"/> </a> Ranked #1 on <a class="sota-task" href="/sota/visual-object-tracking-on-uav123"> Visual Object Tracking on UAV123 </a> </p> </div> <p> <a href="/task/parameter-efficient-fine-tuning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>parameter-efficient fine-tuning</span> </span> </a> <a href="/task/visual-object-tracking"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Visual Object Tracking</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/tracking-meets-lora-faster-training-larger#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 60</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/tracking-meets-lora-faster-training-larger" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/tracking-meets-lora-faster-training-larger#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2297430 --> <div class="col-lg-3 item-image-col"> <a href="/paper/urbangpt-spatio-temporal-large-language"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/c3bee5aa-02a3-4414-9414-783ed32e02ed.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/urbangpt-spatio-temporal-large-language">UrbanGPT: Spatio-Temporal Large Language Models</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/urbangpt-spatio-temporal-large-language#code">1 code implementation</a> • <span class="author-name-text item-date-pub">25 Feb 2024</span> • <span class="author-span "> <a href="/author/zhonghang-li">Zhonghang Li</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/jiabin-tang">Jiabin Tang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/lei-shi">Lei Shi</a></span>, <span class="author-span "> <a href="/author/long-xia">Long Xia</a></span>, <span class="author-span "> <a href="/author/dawei-yin">Dawei Yin</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span> </p> <p class="item-strip-abstract">These findings highlight the potential of building large language models for spatio-temporal learning, particularly in zero-shot scenarios where labeled data is scarce.</p> <div class="sota"> </div> <p> <a href="/task/10-shot-image-generation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>10-shot image generation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 328</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/urbangpt-spatio-temporal-large-language" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/urbangpt-spatio-temporal-large-language#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/unsupervised-sign-language-translation-and"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2402.07726.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/unsupervised-sign-language-translation-and">Unsupervised Sign Language Translation and Generation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/unsupervised-sign-language-translation-and#code">no code implementations</a> • <span class="author-name-text item-date-pub">12 Feb 2024</span> • <span class="author-span "> <a href="/author/zhengsheng-guo">Zhengsheng Guo</a></span>, <span class="author-span "> <a href="/author/zhiwei-he">Zhiwei He</a></span>, <span class="author-span "> <a href="/author/wenxiang-jiao">Wenxiang Jiao</a></span>, <span class="author-span "> <a href="/author/xing-wang">Xing Wang</a></span>, <span class="author-span "> <a href="/author/rui-wang">Rui Wang</a></span>, <span class="author-span "> <a href="/author/kehai-chen">Kehai Chen</a></span>, <span class="author-span "> <a href="/author/zhaopeng-tu">Zhaopeng Tu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/min-zhang">Min Zhang</a></span> </p> <p class="item-strip-abstract">Motivated by the success of unsupervised neural machine translation (UNMT), we introduce an unsupervised sign language translation and generation network (USLNet), which learns from abundant single-modality (text and video) data without parallel sign language data.</p> <div class="sota"> </div> <p> <a href="/task/machine-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000257-2b560008_M7RFnV9.jpg"> <span>Machine Translation</span> </span> </a> <a href="/task/sign-language-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001449-f3f0d984.jpg"> <span>Sign Language Translation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/unsupervised-sign-language-translation-and#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/unsupervised-sign-language-translation-and" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/unsupervised-sign-language-translation-and#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2269517 --> <div class="col-lg-3 item-image-col"> <a href="/paper/3d-shape-completion-on-unseen-categories-a"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/0c1ad5f7-a527-42d3-9292-fa3c8ba10705.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/3d-shape-completion-on-unseen-categories-a">3D Shape Completion on Unseen Categories:A Weakly-supervised Approach</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/3d-shape-completion-on-unseen-categories-a#code">1 code implementation</a> • <span class="author-name-text item-date-pub">19 Jan 2024</span> • <span class="author-span "> <a href="/author/lintai-wu">Lintai Wu</a></span>, <span class="author-span "> <a href="/author/junhui-hou">Junhui Hou</a></span>, <span class="author-span "> <a href="/author/linqi-song">Linqi Song</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">Specifically, we construct a prior bank consisting of representative shapes from the seen categories.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 9</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/3d-shape-completion-on-unseen-categories-a" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/3d-shape-completion-on-unseen-categories-a#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/text-conditional-attribute-alignment-across"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1460133.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/text-conditional-attribute-alignment-across">Text-conditional Attribute Alignment across Latent Spaces for 3D Controllable Face Image Synthesis</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/text-conditional-attribute-alignment-across#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2024-1"> CVPR 2024 </a> </span> • <span class="author-span "> <a href="/author/feifan-xu">Feifan Xu</a></span>, <span class="author-span "> <a href="/author/rui-li">Rui Li</a></span>, <span class="author-span "> <a href="/author/si-wu">Si Wu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/hau-san-wong-2">Hau San Wong</a></span> </p> <p class="item-strip-abstract">To address these issues we propose a Text-conditional Attribute aLignment approach for 3D controllable face image synthesis and our model is referred to as TcALign.</p> <div class="sota"> </div> <p> <a href="/task/attribute"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Attribute</span> </span> </a> <a href="/task/image-generation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/5ac09bd9-8785-4253-8cf4-4412dcd36426.jpg"> <span>Image Generation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/text-conditional-attribute-alignment-across" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/text-conditional-attribute-alignment-across#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/vretoucher-learning-cross-frame-feature"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1460291.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/vretoucher-learning-cross-frame-feature">VRetouchEr: Learning Cross-frame Feature Interdependence with Imperfection Flow for Face Retouching in Videos</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/vretoucher-learning-cross-frame-feature#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2024-1"> CVPR 2024 </a> </span> • <span class="author-span "> <a href="/author/wen-xue">Wen Xue</a></span>, <span class="author-span "> <a href="/author/le-jiang">Le Jiang</a></span>, <span class="author-span "> <a href="/author/lianxin-xie">Lianxin Xie</a></span>, <span class="author-span "> <a href="/author/si-wu">Si Wu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/hau-san-wong-2">Hau San Wong</a></span> </p> <p class="item-strip-abstract">Face Video Retouching is a complex task that often requires labor-intensive manual editing.</p> <div class="sota"> </div> <p> <a href="/task/image-retouching"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Retouching</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/vretoucher-learning-cross-frame-feature" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/vretoucher-learning-cross-frame-feature#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2238999 --> <div class="col-lg-3 item-image-col"> <a href="/paper/taskweaver-a-code-first-agent-framework"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/449a3bc8-f216-429f-b6cd-b76544d97166.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/taskweaver-a-code-first-agent-framework">TaskWeaver: A Code-First Agent Framework</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/taskweaver-a-code-first-agent-framework#code">1 code implementation</a> • <span class="author-name-text item-date-pub">29 Nov 2023</span> • <span class="author-span "> <a href="/author/bo-qiao">Bo Qiao</a></span>, <span class="author-span "> <a href="/author/liqun-li">Liqun Li</a></span>, <span class="author-span "> <a href="/author/xu-zhang">Xu Zhang</a></span>, <span class="author-span "> <a href="/author/shilin-he">Shilin He</a></span>, <span class="author-span "> <a href="/author/yu-kang">Yu Kang</a></span>, <span class="author-span "> <a href="/author/chaoyun-zhang">Chaoyun Zhang</a></span>, <span class="author-span "> <a href="/author/fangkai-yang">Fangkai Yang</a></span>, <span class="author-span "> <a href="/author/hang-dong">Hang Dong</a></span>, <span class="author-span "> <a href="/author/jue-zhang">Jue Zhang</a></span>, <span class="author-span "> <a href="/author/lu-wang">Lu Wang</a></span>, <span class="author-span "> <a href="/author/minghua-ma">Minghua Ma</a></span>, <span class="author-span "> <a href="/author/pu-zhao">Pu Zhao</a></span>, <span class="author-span "> <a href="/author/si-qin">Si Qin</a></span>, <span class="author-span "> <a href="/author/xiaoting-qin">Xiaoting Qin</a></span>, <span class="author-span "> <a href="/author/chao-du">Chao Du</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/qingwei-lin">QIngwei Lin</a></span>, <span class="author-span "> <a href="/author/saravan-rajmohan">Saravan Rajmohan</a></span>, <span class="author-span "> <a href="/author/dongmei-zhang">Dongmei Zhang</a></span> </p> <p class="item-strip-abstract">TaskWeaver provides support for rich data structures, flexible plugin usage, and dynamic plugin selection, and leverages LLM coding capabilities for complex logic.</p> <div class="sota"> </div> <p> <a href="/task/natural-language-understanding"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Natural Language Understanding</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 5,524</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/taskweaver-a-code-first-agent-framework" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/taskweaver-a-code-first-agent-framework#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2239622 --> <div class="col-lg-3 item-image-col"> <a href="/paper/gpt-st-generative-pre-training-of-spatio"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/ad85562b-4ccc-4966-be24-8ab7f3a2b5da.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/gpt-st-generative-pre-training-of-spatio">GPT-ST: Generative Pre-Training of Spatio-Temporal Graph Neural Networks</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/gpt-st-generative-pre-training-of-spatio#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/neurips-2023-11"> NeurIPS 2023 </a> </span> • <span class="author-span "> <a href="/author/zhonghang-li">Zhonghang Li</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span> </p> <p class="item-strip-abstract">This strategy guides the mask autoencoder in learning robust spatio-temporal representations and facilitates the modeling of different relationships, ranging from intra-cluster to inter-cluster, in an easy-to-hard training manner.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 78</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/gpt-st-generative-pre-training-of-spatio" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/gpt-st-generative-pre-training-of-spatio#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2289304 --> <div class="col-lg-3 item-image-col"> <a href="/paper/everything-of-thoughts-defying-the-law-of"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/962188d7-473f-44a7-beaa-511903b82ef5.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/everything-of-thoughts-defying-the-law-of">Everything of Thoughts: Defying the Law of Penrose Triangle for Thought Generation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/everything-of-thoughts-defying-the-law-of#code">1 code implementation</a> • <span class="author-name-text item-date-pub">7 Nov 2023</span> • <span class="author-span "> <a href="/author/ruomeng-ding">Ruomeng Ding</a></span>, <span class="author-span "> <a href="/author/chaoyun-zhang">Chaoyun Zhang</a></span>, <span class="author-span "> <a href="/author/lu-wang">Lu Wang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/minghua-ma">Minghua Ma</a></span>, <span class="author-span "> <a href="/author/wei-zhang">Wei zhang</a></span>, <span class="author-span "> <a href="/author/si-qin">Si Qin</a></span>, <span class="author-span "> <a href="/author/saravan-rajmohan">Saravan Rajmohan</a></span>, <span class="author-span "> <a href="/author/qingwei-lin">QIngwei Lin</a></span>, <span class="author-span "> <a href="/author/dongmei-zhang">Dongmei Zhang</a></span> </p> <p class="item-strip-abstract">To address these limitations, we introduce a novel thought prompting approach called "Everything of Thoughts" (XoT) to defy the law of "Penrose triangle of existing thought paradigms.</p> <div class="sota"> </div> <p> <a href="/task/decision-making"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Decision Making</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 139</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/everything-of-thoughts-defying-the-law-of" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/everything-of-thoughts-defying-the-law-of#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/usee-unified-speech-enhancement-and-editing"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2310.00900.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/usee-unified-speech-enhancement-and-editing">uSee: Unified Speech Enhancement and Editing with Conditional Diffusion Models</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/usee-unified-speech-enhancement-and-editing#code">no code implementations</a> • <span class="author-name-text item-date-pub">2 Oct 2023</span> • <span class="author-span "> <a href="/author/muqiao-yang">Muqiao Yang</a></span>, <span class="author-span "> <a href="/author/chunlei-zhang">Chunlei Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/zhongweiyang-xu">Zhongweiyang Xu</a></span>, <span class="author-span "> <a href="/author/heming-wang">Heming Wang</a></span>, <span class="author-span "> <a href="/author/bhiksha-raj">Bhiksha Raj</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">Speech enhancement aims to improve the quality of speech signals in terms of quality and intelligibility, and speech editing refers to the process of editing the speech according to specific user needs.</p> <div class="sota"> </div> <p> <a href="/task/denoising"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/6c4d53f8-9c6d-47c8-80c7-1b8e1c0a7d42.jpg"> <span>Denoising</span> </span> </a> <a href="/task/self-supervised-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001882-b4b42454.jpg"> <span>Self-Supervised Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/usee-unified-speech-enhancement-and-editing#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/usee-unified-speech-enhancement-and-editing" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/usee-unified-speech-enhancement-and-editing#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/cross-modal-vertical-federated-learning-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2306.02673.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/cross-modal-vertical-federated-learning-for">Cross-Modal Vertical Federated Learning for MRI Reconstruction</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/cross-modal-vertical-federated-learning-for#code">no code implementations</a> • <span class="author-name-text item-date-pub">5 Jun 2023</span> • <span class="author-span "> <a href="/author/yunlu-yan">Yunlu Yan</a></span>, <span class="author-span "> <a href="/author/hong-wang">Hong Wang</a></span>, <span class="author-span "> <a href="/author/yawen-huang">Yawen Huang</a></span>, <span class="author-span "> <a href="/author/nanjun-he">Nanjun He</a></span>, <span class="author-span "> <a href="/author/lei-zhu">Lei Zhu</a></span>, <span class="author-span "> <a href="/author/yuexiang-li">Yuexiang Li</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/yefeng-zheng">Yefeng Zheng</a></span> </p> <p class="item-strip-abstract">To this end, we formulate this practical-yet-challenging cross-modal vertical federated learning task, in which shape data from multiple hospitals have different modalities with a small amount of multi-modality data collected from the same individuals.</p> <div class="sota"> </div> <p> <a href="/task/disentanglement"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Disentanglement</span> </span> </a> <a href="/task/mri-reconstruction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000002391-f435def5.jpg"> <span>MRI Reconstruction</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/cross-modal-vertical-federated-learning-for#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/cross-modal-vertical-federated-learning-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/cross-modal-vertical-federated-learning-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2147945 --> <div class="col-lg-3 item-image-col"> <a href="/paper/graph-transformer-for-recommendation"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2306.02330.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/graph-transformer-for-recommendation">Graph Transformer for Recommendation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/graph-transformer-for-recommendation#code">1 code implementation</a> • <span class="author-name-text item-date-pub">4 Jun 2023</span> • <span class="author-span "> <a href="/author/chaoliu-li">Chaoliu Li</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/xubin-ren">Xubin Ren</a></span>, <span class="author-span "> <a href="/author/yaowen-ye">Yaowen Ye</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span> </p> <p class="item-strip-abstract">This paper presents a novel approach to representation learning in recommender systems by integrating generative self-supervised learning with graph transformer architecture.</p> <div class="sota"> </div> <p> <a href="/task/collaborative-filtering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000595-96a2d3eb.jpg"> <span>Collaborative Filtering</span> </span> </a> <a href="/task/data-augmentation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001560-029cbc00.jpg"> <span>Data Augmentation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/graph-transformer-for-recommendation#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 72</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/graph-transformer-for-recommendation" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/graph-transformer-for-recommendation#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/design-and-implementation-of-emergency"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2305.08542.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/design-and-implementation-of-emergency">Design and Implementation of Emergency Simulated Lighting System Based on Tello UAV</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/design-and-implementation-of-emergency#code">no code implementations</a> • <span class="author-name-text item-date-pub">15 May 2023</span> • <span class="author-span "> <a href="/author/yexin-pan">Yexin Pan</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/bo-ma">Bo Ma</a></span>, <span class="author-span "> <a href="/author/chuanhuang-li">Chuanhuang Li</a></span> </p> <p class="item-strip-abstract">Third, the flight control module has designed a specialized command control framework based on Tello UAV's API, which converts the planned flight path into command statements, forms flight text, and controls the flight of unmanned aerial vehicles accordingly.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/design-and-implementation-of-emergency" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/design-and-implementation-of-emergency#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2130555 --> <div class="col-lg-3 item-image-col"> <a href="/paper/lmeye-an-interactive-perception-network-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/8287c9ec-d2c8-4c15-a452-dd7519a34360.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/lmeye-an-interactive-perception-network-for">LMEye: An Interactive Perception Network for Large Language Models</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/lmeye-an-interactive-perception-network-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">5 May 2023</span> • <span class="author-span "> <a href="/author/yunxin-li">Yunxin Li</a></span>, <span class="author-span "> <a href="/author/baotian-hu">Baotian Hu</a></span>, <span class="author-span "> <a href="/author/xinyu-chen">Xinyu Chen</a></span>, <span class="author-span "> <a href="/author/lin-ma">Lin Ma</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/min-zhang">Min Zhang</a></span> </p> <p class="item-strip-abstract">LMEye addresses this issue by allowing the LLM to request the desired visual information aligned with various human instructions, which we term as the dynamic visual information interaction.</p> <div class="sota"> </div> <p> <a href="/task/language-modelling"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000267-8df06634.jpg"> <span>Language Modelling</span> </span> </a> <a href="/task/large-language-model"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Large Language Model</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/lmeye-an-interactive-perception-network-for#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 48</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/lmeye-an-interactive-perception-network-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/lmeye-an-interactive-perception-network-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2137155 --> <div class="col-lg-3 item-image-col"> <a href="/paper/information-recovery-driven-deep-incomplete"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2304.00429.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/information-recovery-driven-deep-incomplete">Information Recovery-Driven Deep Incomplete Multiview Clustering Network</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/information-recovery-driven-deep-incomplete#code">2 code implementations</a> • <span class="author-name-text item-date-pub">2 Apr 2023</span> • <span class="author-span "> <a href="/author/chengliang-liu">Chengliang Liu</a></span>, <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/zhihao-wu">Zhihao Wu</a></span>, <span class="author-span "> <a href="/author/xiaoling-luo">Xiaoling Luo</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">Concretely, a two-stage autoencoder network with the self-attention structure is built to synchronously extract high-level semantic representations of multiple views and recover the missing data.</p> <div class="sota"> </div> <p> <a href="/task/clustering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001594-3ce5d6d8.jpg"> <span>Clustering</span> </span> </a> <a href="/task/graph-reconstruction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Reconstruction</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/information-recovery-driven-deep-incomplete#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 11</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/information-recovery-driven-deep-incomplete" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/information-recovery-driven-deep-incomplete#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-reliable-representations-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2303.17117.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-reliable-representations-for">Reliable Representations Learning for Incomplete Multi-View Partial Multi-Label Classification</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-reliable-representations-for#code">no code implementations</a> • <span class="author-name-text item-date-pub">30 Mar 2023</span> • <span class="author-span "> <a href="/author/chengliang-liu">Chengliang Liu</a></span>, <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/bob-zhang">Bob Zhang</a></span>, <span class="author-span "> <a href="/author/liqiang-nie">Liqiang Nie</a></span>, <span class="author-span "> <a href="/author/min-zhang">Min Zhang</a></span> </p> <p class="item-strip-abstract">The application of multi-view contrastive learning has further facilitated this process, however, the existing multi-view contrastive learning methods crudely separate the so-called negative pair, which largely results in the separation of samples belonging to the same category or similar ones.</p> <div class="sota"> </div> <p> <a href="/task/classification-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/d0eafcb3-1a12-430b-8bb5-6f6bbff1a4b3.jpg"> <span>Classification</span> </span> </a> <a href="/task/contrastive-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/a2dca842-c6b6-4209-b2a8-dffeac2ef283.jpg"> <span>Contrastive Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/learning-reliable-representations-for#tasks"> <span class="badge badge-primary"> <b>+4</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-reliable-representations-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-reliable-representations-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2121600 --> <div class="col-lg-3 item-image-col"> <a href="/paper/deep-double-incomplete-multi-view-multi-label"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/deep-double-incomplete-multi-view-multi-label">Deep Double Incomplete Multi-view Multi-label Learning with Incomplete Labels and Missing Views</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/deep-double-incomplete-multi-view-multi-label#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/ieee-transactions-on-neural-networks-and-14"> IEEE Transactions on Neural Networks and Learning Systems 2023 </a> </span> • <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/chengliang-liu">Chengliang Liu</a></span>, <span class="author-span "> <a href="/author/shijie-deng">Shijie Deng</a></span>, <span class="author-span "> <a href="/author/yicheng-liu">Yicheng Liu</a></span>, <span class="author-span "> <a href="/author/lunke-fei">Lunke Fei</a></span>, <span class="author-span "> <a href="/author/ke-yan">Ke Yan</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">View missing and label missing are two challenging problems in the applications of multi-view multi-label classification scenery.</p> <div class="sota"> </div> <p> <a href="/task/classification-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/d0eafcb3-1a12-430b-8bb5-6f6bbff1a4b3.jpg"> <span>Classification</span> </span> </a> <a href="/task/decoder"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Decoder</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/deep-double-incomplete-multi-view-multi-label#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 5</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/deep-double-incomplete-multi-view-multi-label" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/deep-double-incomplete-multi-view-multi-label#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2108818 --> <div class="col-lg-3 item-image-col"> <a href="/paper/dicnet-deep-instance-level-contrastive"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2303.08358.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/dicnet-deep-instance-level-contrastive">DICNet: Deep Instance-Level Contrastive Network for Double Incomplete Multi-View Multi-Label Classification</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/dicnet-deep-instance-level-contrastive#code">2 code implementations</a> • <span class="author-name-text item-date-pub">15 Mar 2023</span> • <span class="author-span "> <a href="/author/chengliang-liu">Chengliang Liu</a></span>, <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/xiaoling-luo">Xiaoling Luo</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span "> <a href="/author/zhihao-wu">Zhihao Wu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">To deal with the double incomplete multi-view multi-label classification problem, we propose a deep instance-level contrastive network, namely DICNet.</p> <div class="sota"> </div> <p> <a href="/task/contrastive-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/a2dca842-c6b6-4209-b2a8-dffeac2ef283.jpg"> <span>Contrastive Learning</span> </span> </a> <a href="/task/missing-labels"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Missing Labels</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/dicnet-deep-instance-level-contrastive#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 25</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/dicnet-deep-instance-level-contrastive" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/dicnet-deep-instance-level-contrastive#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2104524 --> <div class="col-lg-3 item-image-col"> <a href="/paper/graph-less-collaborative-filtering"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/6917c5ac-4d3e-4a1b-8151-57cda462600d.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/graph-less-collaborative-filtering">Graph-less Collaborative Filtering</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/graph-less-collaborative-filtering#code">1 code implementation</a> • <span class="author-name-text item-date-pub">15 Mar 2023</span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span "> <a href="/author/jiao-shi">Jiao Shi</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">Motivated by these limitations, we propose a simple and effective collaborative filtering model (SimRec) that marries the power of knowledge distillation and contrastive learning.</p> <div class="sota"> </div> <p> <a href="/task/collaborative-filtering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000595-96a2d3eb.jpg"> <span>Collaborative Filtering</span> </span> </a> <a href="/task/contrastive-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/a2dca842-c6b6-4209-b2a8-dffeac2ef283.jpg"> <span>Contrastive Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/graph-less-collaborative-filtering#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 32</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/graph-less-collaborative-filtering" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/graph-less-collaborative-filtering#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2103835 --> <div class="col-lg-3 item-image-col"> <a href="/paper/disentangled-graph-social-recommendation"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2303.07810.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/disentangled-graph-social-recommendation">Disentangled Graph Social Recommendation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/disentangled-graph-social-recommendation#code">1 code implementation</a> • <span class="author-name-text item-date-pub">14 Mar 2023</span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/yizhen-shao">Yizhen Shao</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/huance-xu">Huance Xu</a></span>, <span class="author-span "> <a href="/author/jian-pei">Jian Pei</a></span> </p> <p class="item-strip-abstract">In this work, we design a Disentangled Graph Neural Network (DGNN) with the integration of latent memory units, which empowers DGNN to maintain factorized representations for heterogeneous types of user and item connections.</p> <div class="sota"> </div> <p> <a href="/task/graph-neural-network"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Neural Network</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 19</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/disentangled-graph-social-recommendation" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/disentangled-graph-social-recommendation#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2108817 --> <div class="col-lg-3 item-image-col"> <a href="/paper/incomplete-multi-view-multi-label-learning"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2303.07180.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/incomplete-multi-view-multi-label-learning">Incomplete Multi-View Multi-Label Learning via Label-Guided Masked View- and Category-Aware Transformers</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/incomplete-multi-view-multi-label-learning#code">1 code implementation</a> • <span class="author-name-text item-date-pub">13 Mar 2023</span> • <span class="author-span "> <a href="/author/chengliang-liu">Chengliang Liu</a></span>, <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/xiaoling-luo">Xiaoling Luo</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">The former aggregates information from different views in the process of extracting view-specific features, and the latter learns subcategory embedding to improve classification performance.</p> <div class="sota"> </div> <p> <a href="/task/multi-label-classification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Multi-Label Classification</span> </span> </a> <a href="/task/multi-label-classification-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>MUlTI-LABEL-ClASSIFICATION</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/incomplete-multi-view-multi-label-learning#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 9</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/incomplete-multi-view-multi-label-learning" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/incomplete-multi-view-multi-label-learning#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2097962 --> <div class="col-lg-3 item-image-col"> <a href="/paper/heterogeneous-graph-contrastive-learning-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2303.00995.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/heterogeneous-graph-contrastive-learning-for">Heterogeneous Graph Contrastive Learning for Recommendation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/heterogeneous-graph-contrastive-learning-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">2 Mar 2023</span> • <span class="author-span "> <a href="/author/mengru-chen">Mengru Chen</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/wei-wei">Wei Wei</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/ronghua-luo">Ronghua Luo</a></span> </p> <p class="item-strip-abstract">In light of this, we propose a Heterogeneous Graph Contrastive Learning (HGCL), which is able to incorporate heterogeneous relational semantics into the user-item interaction modeling with contrastive learning-enhanced knowledge transfer across different views.</p> <div class="sota"> </div> <p> <a href="/task/contrastive-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/a2dca842-c6b6-4209-b2a8-dffeac2ef283.jpg"> <span>Contrastive Learning</span> </span> </a> <a href="/task/recommendation-systems"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000592-2508deea.jpg"> <span>Recommendation Systems</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/heterogeneous-graph-contrastive-learning-for#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 106</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/heterogeneous-graph-contrastive-learning-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/heterogeneous-graph-contrastive-learning-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/multi-behavior-graph-neural-networks-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2302.08678.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multi-behavior-graph-neural-networks-for">Multi-Behavior Graph Neural Networks for Recommender System</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multi-behavior-graph-neural-networks-for#code">no code implementations</a> • <span class="author-name-text item-date-pub">17 Feb 2023</span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/peng-dai">Peng Dai</a></span>, <span class="author-span "> <a href="/author/liefeng-bo">Liefeng Bo</a></span> </p> <p class="item-strip-abstract">Recent years have witnessed the emerging success of many deep learning-based recommendation models for augmenting collaborative filtering architectures with various neural network architectures, such as multi-layer perceptron and autoencoder.</p> <div class="sota"> </div> <p> <a href="/task/collaborative-filtering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000595-96a2d3eb.jpg"> <span>Collaborative Filtering</span> </span> </a> <a href="/task/graph-neural-network"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Neural Network</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/multi-behavior-graph-neural-networks-for#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multi-behavior-graph-neural-networks-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multi-behavior-graph-neural-networks-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2348633 --> <div class="col-lg-3 item-image-col"> <a href="/paper/coherent-event-guided-low-light-video"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1289249.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/coherent-event-guided-low-light-video">Coherent Event Guided Low-Light Video Enhancement</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/coherent-event-guided-low-light-video#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/iccv-2023-1"> ICCV 2023 </a> </span> • <span class="author-span "> <a href="/author/jinxiu-liang">Jinxiu Liang</a></span>, <span class="author-span "> <a href="/author/yixin-yang">Yixin Yang</a></span>, <span class="author-span "> <a href="/author/boyu-li">Boyu Li</a></span>, <span class="author-span "> <a href="/author/peiqi-duan">Peiqi Duan</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/boxin-shi">Boxin Shi</a></span> </p> <p class="item-strip-abstract">With frame-based cameras, capturing fast-moving scenes without suffering from blur often comes at the cost of low SNR and low contrast.</p> <div class="sota"> </div> <p> <a href="/task/video-enhancement"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Video Enhancement</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 26</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/coherent-event-guided-low-light-video" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/coherent-event-guided-low-light-video#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/cigar-cross-modality-graph-reasoning-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1209908.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/cigar-cross-modality-graph-reasoning-for">CIGAR: Cross-Modality Graph Reasoning for Domain Adaptive Object Detection</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/cigar-cross-modality-graph-reasoning-for#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2023-1"> CVPR 2023 </a> </span> • <span class="author-span "> <a href="/author/yabo-liu">Yabo Liu</a></span>, <span class="author-span "> <a href="/author/jinghua-wang">Jinghua Wang</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span "> <a href="/author/yaowei-wang">YaoWei Wang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">To overcome these problems, we propose a cross-modality graph reasoning adaptation (CIGAR) method to take advantage of both visual and linguistic knowledge.</p> <div class="sota"> </div> <p> <a href="/task/graph-matching"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Matching</span> </span> </a> <a href="/task/object-detection-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>object-detection</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/cigar-cross-modality-graph-reasoning-for#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/cigar-cross-modality-graph-reasoning-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/cigar-cross-modality-graph-reasoning-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2147118 --> <div class="col-lg-3 item-image-col"> <a href="/paper/highly-confident-local-structure-based"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1210054.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/highly-confident-local-structure-based">Highly Confident Local Structure Based Consensus Graph Learning for Incomplete Multi-View Clustering</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/highly-confident-local-structure-based#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2023-1"> CVPR 2023 </a> </span> • <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/chengliang-liu">Chengliang Liu</a></span>, <span class="author-span "> <a href="/author/gehui-xu">Gehui Xu</a></span>, <span class="author-span "> <a href="/author/zhihao-wu">Zhihao Wu</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span "> <a href="/author/lunke-fei">Lunke Fei</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">Graph-based multi-view clustering has attracted extensive attention because of the powerful clustering-structure representation ability and noise robustness.</p> <div class="sota"> </div> <p> <a href="/task/clustering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001594-3ce5d6d8.jpg"> <span>Clustering</span> </span> </a> <a href="/task/graph-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/highly-confident-local-structure-based#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 4</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/highly-confident-local-structure-based" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/highly-confident-local-structure-based#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2070336 --> <div class="col-lg-3 item-image-col"> <a href="/paper/million-scale-object-detection-with-large"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/19bd5dc0-32a6-4bb1-b846-969e4cf8267f.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/million-scale-object-detection-with-large">Universal Object Detection with Large Vision Model</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/million-scale-object-detection-with-large#code">1 code implementation</a> • <span class="author-name-text item-date-pub">19 Dec 2022</span> • <span class="author-span "> <a href="/author/feng-lin">Feng Lin</a></span>, <span class="author-span "> <a href="/author/wenze-hu">Wenze Hu</a></span>, <span class="author-span "> <a href="/author/yaowei-wang">YaoWei Wang</a></span>, <span class="author-span "> <a href="/author/yonghong-tian">Yonghong Tian</a></span>, <span class="author-span "> <a href="/author/guangming-lu">Guangming Lu</a></span>, <span class="author-span "> <a href="/author/fanglin-chen">Fanglin Chen</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/xiaoyu-wang">Xiaoyu Wang</a></span> </p> <p class="item-strip-abstract">In this study, our focus is on a specific challenge: the large-scale, multi-domain universal object detection problem, which contributes to the broader goal of achieving a universal vision system.</p> <div class="sota"> </div> <p> <a href="/task/model"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>model</span> </span> </a> <a href="/task/object"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Object</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/million-scale-object-detection-with-large#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 25</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/million-scale-object-detection-with-large" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/million-scale-object-detection-with-large#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2062842 --> <div class="col-lg-3 item-image-col"> <a href="/paper/leveraging-single-view-images-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2212.00564.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/leveraging-single-view-images-for">Leveraging Single-View Images for Unsupervised 3D Point Cloud Completion</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/leveraging-single-view-images-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">1 Dec 2022</span> • <span class="author-span "> <a href="/author/lintai-wu">Lintai Wu</a></span>, <span class="author-span "> <a href="/author/qijian-zhang">Qijian Zhang</a></span>, <span class="author-span "> <a href="/author/junhui-hou">Junhui Hou</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">The experimental results of our method are superior to those of the state-of-the-art unsupervised methods by a large margin.</p> <div class="sota"> </div> <p> <a href="/task/point-cloud-completion"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Point Cloud Completion</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 21</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/leveraging-single-view-images-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/leveraging-single-view-images-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/deep-neural-mel-subband-beamformer-for-in-car"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2211.12590.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/deep-neural-mel-subband-beamformer-for-in-car">Deep Neural Mel-Subband Beamformer for In-car Speech Separation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/deep-neural-mel-subband-beamformer-for-in-car#code">no code implementations</a> • <span class="author-name-text item-date-pub">22 Nov 2022</span> • <span class="author-span "> <a href="/author/vinay-kothapally">Vinay Kothapally</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">While current deep learning (DL)-based beamforming techniques have been proved effective in speech separation, they are often designed to process narrow-band (NB) frequencies independently which results in higher computational costs and inference times, making them unsuitable for real-world use.</p> <div class="sota"> </div> <p> <a href="/task/speech-separation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000246-5755ee34.jpg"> <span>Speech Separation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/deep-neural-mel-subband-beamformer-for-in-car" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/deep-neural-mel-subband-beamformer-for-in-car#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2054594 --> <div class="col-lg-3 item-image-col"> <a href="/paper/temporal-modeling-matters-a-novel-temporal"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/d46df515-da1f-48a0-9694-f07001db804e.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/temporal-modeling-matters-a-novel-temporal">Temporal Modeling Matters: A Novel Temporal Emotional Modeling Approach for Speech Emotion Recognition</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/temporal-modeling-matters-a-novel-temporal#code">1 code implementation</a> • <span class="author-name-text item-date-pub">14 Nov 2022</span> • <span class="author-span "> <a href="/author/jiaxin-ye">Jiaxin Ye</a></span>, <span class="author-span "> <a href="/author/xin-cheng-wen">Xin-Cheng Wen</a></span>, <span class="author-span "> <a href="/author/yujie-wei">Yujie Wei</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/kunhong-liu">KunHong Liu</a></span>, <span class="author-span "> <a href="/author/hongming-shan">Hongming Shan</a></span> </p> <p class="item-strip-abstract">Specifically, TIM-Net first employs temporal-aware blocks to learn temporal affective representation, then integrates complementary information from the past and the future to enrich contextual representations, and finally, fuses multiple time scale features for better adaptation to the emotional variation.</p> <div class="sota"> </div> <p> <a href="/task/speech-emotion-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Speech Emotion Recognition</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 167</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/temporal-modeling-matters-a-novel-temporal" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/temporal-modeling-matters-a-novel-temporal#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2092152 --> <div class="col-lg-3 item-image-col"> <a href="/paper/gm-tcnet-gated-multi-scale-temporal"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/f3b260a8-5534-41c6-954d-363cc809b081.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/gm-tcnet-gated-multi-scale-temporal">GM-TCNet: Gated Multi-scale Temporal Convolutional Network using Emotion Causality for Speech Emotion Recognition</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/gm-tcnet-gated-multi-scale-temporal#code">1 code implementation</a> • <span class="author-name-text item-date-pub">28 Oct 2022</span> • <span class="author-span "> <a href="/author/jia-xin-ye">Jia-Xin Ye</a></span>, <span class="author-span "> <a href="/author/xin-cheng-wen">Xin-Cheng Wen</a></span>, <span class="author-span "> <a href="/author/xuan-ze-wang">Xuan-Ze Wang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/yan-luo">Yan Luo</a></span>, <span class="author-span "> <a href="/author/chang-li-wu">Chang-Li Wu</a></span>, <span class="author-span "> <a href="/author/li-yan-chen">Li-Yan Chen</a></span>, <span class="author-span "> <a href="/author/kun-hong-liu">Kun-Hong Liu</a></span> </p> <p class="item-strip-abstract">In this paper, we propose a Gated Multi-scale Temporal Convolutional Network (GM-TCNet) to construct a novel emotional causality representation learning component with a multi-scale receptive field.</p> <div class="sota"> </div> <p> <a href="/task/representation-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000228-3131cfbf_nx72Tly.jpg"> <span>Representation Learning</span> </span> </a> <a href="/task/speech-emotion-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Speech Emotion Recognition</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 167</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/gm-tcnet-gated-multi-scale-temporal" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/gm-tcnet-gated-multi-scale-temporal#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/prompt-driven-efficient-open-set-semi"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2209.14205.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/prompt-driven-efficient-open-set-semi">Prompt-driven efficient Open-set Semi-supervised Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/prompt-driven-efficient-open-set-semi#code">no code implementations</a> • <span class="author-name-text item-date-pub">28 Sep 2022</span> • <span class="author-span "> <a href="/author/haoran-li">Haoran Li</a></span>, <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span "> <a href="/author/tao-zhou">Tao Zhou</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/xiaojun-chang">Xiaojun Chang</a></span> </p> <p class="item-strip-abstract">In this paper, we propose a prompt-driven efficient OSSL framework, called OpenPrompt, which can propagate class information from labeled to unlabeled data with only a small number of trainable parameters.</p> <div class="sota"> </div> <p> <a href="/task/computational-efficiency"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Computational Efficiency</span> </span> </a> <a href="/task/outlier-detection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Outlier Detection</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/prompt-driven-efficient-open-set-semi" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/prompt-driven-efficient-open-set-semi#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2009764 --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-survey-on-incomplete-multi-view-clustering"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2208.08040.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-survey-on-incomplete-multi-view-clustering">A Survey on Incomplete Multi-view Clustering</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-survey-on-incomplete-multi-view-clustering#code">1 code implementation</a> • <span class="author-name-text item-date-pub">17 Aug 2022</span> • <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/zheng-zhang">Zheng Zhang</a></span>, <span class="author-span "> <a href="/author/lunke-fei">Lunke Fei</a></span>, <span class="author-span "> <a href="/author/bob-zhang">Bob Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/zhao-zhang">Zhao Zhang</a></span>, <span class="author-span "> <a href="/author/jinxing-li">Jinxing Li</a></span> </p> <p class="item-strip-abstract">However, in practical applications, such as disease diagnosis, multimedia analysis, and recommendation system, it is common to observe that not all views of samples are available in many cases, which leads to the failure of the conventional multi-view clustering methods.</p> <div class="sota"> </div> <p> <a href="/task/clustering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001594-3ce5d6d8.jpg"> <span>Clustering</span> </span> </a> <a href="/task/incomplete-multi-view-clustering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Incomplete multi-view clustering</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/a-survey-on-incomplete-multi-view-clustering#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 44</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-survey-on-incomplete-multi-view-clustering" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-survey-on-incomplete-multi-view-clustering#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2005077 --> <div class="col-lg-3 item-image-col"> <a href="/paper/localized-sparse-incomplete-multi-view"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2208.02998.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/localized-sparse-incomplete-multi-view">Localized Sparse Incomplete Multi-view Clustering</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/localized-sparse-incomplete-multi-view#code">1 code implementation</a> • <span class="author-name-text item-date-pub">5 Aug 2022</span> • <span class="author-span "> <a href="/author/chengliang-liu">Chengliang Liu</a></span>, <span class="author-span "> <a href="/author/zhihao-wu">Zhihao Wu</a></span>, <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">Moreover, a novel local graph embedding term is introduced to learn the structured consensus representation.</p> <div class="sota"> </div> <p> <a href="/task/clustering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001594-3ce5d6d8.jpg"> <span>Clustering</span> </span> </a> <a href="/task/graph-embedding"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/4867272e-8232-4786-b735-41b4dccb5adf.jpg"> <span>Graph Embedding</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/localized-sparse-incomplete-multi-view#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 21</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/localized-sparse-incomplete-multi-view" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/localized-sparse-incomplete-multi-view#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/ctl-mtnet-a-novel-capsnet-and-transfer"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2207.10644.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/ctl-mtnet-a-novel-capsnet-and-transfer">CTL-MTNet: A Novel CapsNet and Transfer Learning-Based Mixed Task Net for the Single-Corpus and Cross-Corpus Speech Emotion Recognition</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/ctl-mtnet-a-novel-capsnet-and-transfer#code">no code implementations</a> • <span class="author-name-text item-date-pub">18 Jul 2022</span> • <span class="author-span "> <a href="/author/xin-cheng-wen">Xin-Cheng Wen</a></span>, <span class="author-span "> <a href="/author/jia-xin-ye">Jia-Xin Ye</a></span>, <span class="author-span "> <a href="/author/yan-luo">Yan Luo</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/xuan-ze-wang">Xuan-Ze Wang</a></span>, <span class="author-span "> <a href="/author/chang-li-wu">Chang-Li Wu</a></span>, <span class="author-span "> <a href="/author/kun-hong-liu">Kun-Hong Liu</a></span> </p> <p class="item-strip-abstract">For the single-corpus task, the combination of Convolution-Pooling and Attention CapsNet module CPAC) is designed by embedding the self-attention mechanism to the CapsNet, guiding the module to focus on the important features that can be fed into different capsules.</p> <div class="sota"> </div> <p> <a href="/task/cross-corpus"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Cross-corpus</span> </span> </a> <a href="/task/speech-emotion-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Speech Emotion Recognition</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/ctl-mtnet-a-novel-capsnet-and-transfer#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/ctl-mtnet-a-novel-capsnet-and-transfer" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/ctl-mtnet-a-novel-capsnet-and-transfer#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1983349 --> <div class="col-lg-3 item-image-col"> <a href="/paper/self-supervised-low-light-image-enhancement-1"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/self-supervised-low-light-image-enhancement-1">Self-Supervised Low-Light Image Enhancement Using Discrepant Untrained Network Priors</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/self-supervised-low-light-image-enhancement-1#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/ieee-transactions-on-circuits-and-systems-for-6"> IEEE Transactions on Circuits and Systems for Video Technology 2022 </a> </span> • <span class="author-span "> <a href="/author/jinxiu-liang">Jinxiu Liang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/yuhui-quan">Yuhui Quan</a></span>, <span class="author-span "> <a href="/author/boxin-shi">Boxin Shi</a></span>, <span class="author-span "> <a href="/author/hui-ji">Hui Ji</a></span> </p> <p class="item-strip-abstract">The enhancement is done by jointly optimizing the Retinex decomposition and the illumination adjustment.</p> <div class="sota"> </div> <p> <a href="/task/denoising"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/6c4d53f8-9c6d-47c8-80c7-1b8e1c0a7d42.jpg"> <span>Denoising</span> </span> </a> <a href="/task/low-light-image-enhancement"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000098-4572d568_kd4fjWN.jpg"> <span>Low-Light Image Enhancement</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 18</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/self-supervised-low-light-image-enhancement-1" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/self-supervised-low-light-image-enhancement-1#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1977550 --> <div class="col-lg-3 item-image-col"> <a href="/paper/multi-behavior-sequential-recommendation-with"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2206.02687.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multi-behavior-sequential-recommendation-with">Multi-Behavior Sequential Recommendation with Temporal Graph Transformer</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multi-behavior-sequential-recommendation-with#code">1 code implementation</a> • <span class="author-name-text item-date-pub">6 Jun 2022</span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/jian-pei">Jian Pei</a></span> </p> <p class="item-strip-abstract">The new TGT method endows the sequential recommendation architecture to distill dedicated knowledge for type-specific behavior relational context and the implicit behavior dependencies.</p> <div class="sota"> </div> <p> <a href="/task/sequential-recommendation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Sequential Recommendation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 14</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multi-behavior-sequential-recommendation-with" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multi-behavior-sequential-recommendation-with#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/deniable-steganography"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2205.12587.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/deniable-steganography">Deniable Steganography</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/deniable-steganography#code">no code implementations</a> • <span class="author-name-text item-date-pub">25 May 2022</span> • <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/zhihua-xia">Zhihua Xia</a></span>, <span class="author-span "> <a href="/author/zichi-wang">Zichi Wang</a></span>, <span class="author-span "> <a href="/author/xinpeng-zhang">Xinpeng Zhang</a></span>, <span class="author-span "> <a href="/author/jian-weng">Jian Weng</a></span> </p> <p class="item-strip-abstract">With a stego media discovered, the adversary could find out the sender or receiver and coerce them to disclose the secret message, which we name as coercive attack in this paper.</p> <div class="sota"> </div> <p> <a href="/task/steganalysis"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Steganalysis</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/deniable-steganography" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/deniable-steganography#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/neuralecho-a-self-attentive-recurrent-neural"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2205.10401.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/neuralecho-a-self-attentive-recurrent-neural">NeuralEcho: A Self-Attentive Recurrent Neural Network For Unified Acoustic Echo Suppression And Speech Enhancement</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/neuralecho-a-self-attentive-recurrent-neural#code">no code implementations</a> • <span class="author-name-text item-date-pub">20 May 2022</span> • <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/chunlei-zhang">Chunlei Zhang</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">Acoustic echo cancellation (AEC) plays an important role in the full-duplex speech communication as well as the front-end speech enhancement for recognition in the conditions when the loudspeaker plays back.</p> <div class="sota"> </div> <p> <a href="/task/acoustic-echo-cancellation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Acoustic echo cancellation</span> </span> </a> <a href="/task/speech-enhancement"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/bf2407da-ded8-4af2-9537-59a43b7d3ca3.jpg"> <span>Speech Enhancement</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/neuralecho-a-self-attentive-recurrent-neural#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/neuralecho-a-self-attentive-recurrent-neural" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/neuralecho-a-self-attentive-recurrent-neural#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1958086 --> <div class="col-lg-3 item-image-col"> <a href="/paper/hypergraph-contrastive-collaborative"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2204.12200.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/hypergraph-contrastive-collaborative">Hypergraph Contrastive Collaborative Filtering</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/hypergraph-contrastive-collaborative#code">1 code implementation</a> • <span class="author-name-text item-date-pub">26 Apr 2022</span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/jiashu-zhao">Jiashu Zhao</a></span>, <span class="author-span "> <a href="/author/dawei-yin">Dawei Yin</a></span>, <span class="author-span "> <a href="/author/jimmy-xiangji-huang">Jimmy Xiangji Huang</a></span> </p> <p class="item-strip-abstract">Additionally, our HCCF model effectively integrates the hypergraph structure encoding with self-supervised learning to reinforce the representation quality of recommender systems, based on the hypergraph-enhanced self-discrimination.</p> <div class="sota"> </div> <p> <a href="/task/collaborative-filtering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000595-96a2d3eb.jpg"> <span>Collaborative Filtering</span> </span> </a> <a href="/task/contrastive-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/a2dca842-c6b6-4209-b2a8-dffeac2ef283.jpg"> <span>Contrastive Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/hypergraph-contrastive-collaborative#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 96</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/hypergraph-contrastive-collaborative" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/hypergraph-contrastive-collaborative#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1953565 --> <div class="col-lg-3 item-image-col"> <a href="/paper/spatial-temporal-hypergraph-self-supervised"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2204.08587.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/spatial-temporal-hypergraph-self-supervised">Spatial-Temporal Hypergraph Self-Supervised Learning for Crime Prediction</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/spatial-temporal-hypergraph-self-supervised#code">1 code implementation</a> • <span class="author-name-text item-date-pub">18 Apr 2022</span> • <span class="author-span "> <a href="/author/zhonghang-li">Zhonghang Li</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/jian-pei">Jian Pei</a></span> </p> <p class="item-strip-abstract">Crime has become a major concern in many cities, which calls for the rising demand for timely predicting citywide crime occurrence.</p> <div class="sota"> </div> <p> <a href="/task/crime-prediction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Crime Prediction</span> </span> </a> <a href="/task/decision-making"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Decision Making</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/spatial-temporal-hypergraph-self-supervised#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 34</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/spatial-temporal-hypergraph-self-supervised" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/spatial-temporal-hypergraph-self-supervised#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/global-supervised-contrastive-loss-and-view"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2204.07943.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/global-supervised-contrastive-loss-and-view">Global-Supervised Contrastive Loss and View-Aware-Based Post-Processing for Vehicle Re-Identification</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/global-supervised-contrastive-loss-and-view#code">no code implementations</a> • <span class="author-name-text item-date-pub">17 Apr 2022</span> • <span class="author-span "> <a href="/author/zhijun-hu">Zhijun Hu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/xianjing-cheng">Xianjing Cheng</a></span>, <span class="author-span "> <a href="/author/zaijun-zhang">Zaijun Zhang</a></span>, <span class="author-span "> <a href="/author/lilei-sun">Lilei Sun</a></span>, <span class="author-span "> <a href="/author/yaowei-wang">YaoWei Wang</a></span> </p> <p class="item-strip-abstract">The proposed VABPP method is the first time that the view-aware-based method is used as a post-processing method in the field of vehicle re-identification.</p> <div class="sota"> </div> <p> <a href="/task/attribute"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Attribute</span> </span> </a> <a href="/task/vehicle-re-identification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000129-5408a716_UuHzCSz.jpg"> <span>Vehicle Re-Identification</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/global-supervised-contrastive-loss-and-view" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/global-supervised-contrastive-loss-and-view#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1944455 --> <div class="col-lg-3 item-image-col"> <a href="/paper/eend-ss-joint-end-to-end-neural-speaker"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2203.17068.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/eend-ss-joint-end-to-end-neural-speaker">EEND-SS: Joint End-to-End Neural Speaker Diarization and Speech Separation for Flexible Number of Speakers</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/eend-ss-joint-end-to-end-neural-speaker#code">1 code implementation</a> • <span class="author-name-text item-date-pub">31 Mar 2022</span> • <span class="author-span "> <a href="/author/soumi-maiti">Soumi Maiti</a></span>, <span class="author-span "> <a href="/author/yushi-ueda">Yushi Ueda</a></span>, <span class="author-span "> <a href="/author/shinji-watanabe">Shinji Watanabe</a></span>, <span class="author-span "> <a href="/author/chunlei-zhang">Chunlei Zhang</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">In this paper, we present a novel framework that jointly performs three tasks: speaker diarization, speech separation, and speaker counting.</p> <div class="sota"> </div> <p> <a href="/task/decoder"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Decoder</span> </span> </a> <a href="/task/speaker-diarization-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>speaker-diarization</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/eend-ss-joint-end-to-end-neural-speaker#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 8,784</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/eend-ss-joint-end-to-end-neural-speaker" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/eend-ss-joint-end-to-end-neural-speaker#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1943553 --> <div class="col-lg-3 item-image-col"> <a href="/paper/fine-grained-object-classification-via-self"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/b9ab20d3-b5f4-4b5b-92d3-c9521dde0c4b.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/fine-grained-object-classification-via-self">Fine-Grained Object Classification via Self-Supervised Pose Alignment</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/fine-grained-object-classification-via-self#code">2 code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2022-1"> CVPR 2022 </a> </span> • <span class="author-span "> <a href="/author/xuhui-yang">Xuhui Yang</a></span>, <span class="author-span "> <a href="/author/yaowei-wang">YaoWei Wang</a></span>, <span class="author-span "> <a href="/author/ke-chen">Ke Chen</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/yonghong-tian">Yonghong Tian</a></span> </p> <p class="item-strip-abstract">Semantic patterns of fine-grained objects are determined by subtle appearance difference of local parts, which thus inspires a number of part-based methods.</p> <div class="sota"> </div> <p> <a href="/task/classification-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/d0eafcb3-1a12-430b-8bb5-6f6bbff1a4b3.jpg"> <span>Classification</span> </span> </a> <a href="/task/object"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Object</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/fine-grained-object-classification-via-self#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 23</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/fine-grained-object-classification-via-self" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/fine-grained-object-classification-via-self#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1915102 --> <div class="col-lg-3 item-image-col"> <a href="/paper/contrastive-meta-learning-with-behavior"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2202.08523.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/contrastive-meta-learning-with-behavior">Contrastive Meta Learning with Behavior Multiplicity for Recommendation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/contrastive-meta-learning-with-behavior#code">1 code implementation</a> • <span class="author-name-text item-date-pub">17 Feb 2022</span> • <span class="author-span "> <a href="/author/wei-wei">Wei Wei</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/jiashu-zhao">Jiashu Zhao</a></span>, <span class="author-span "> <a href="/author/dawei-yin">Dawei Yin</a></span> </p> <p class="item-strip-abstract">In addition, to capture the diverse multi-behavior patterns, we design a contrastive meta network to encode the customized behavior heterogeneity for different users.</p> <div class="sota"> </div> <p> <a href="/task/contrastive-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/a2dca842-c6b6-4209-b2a8-dffeac2ef283.jpg"> <span>Contrastive Learning</span> </span> </a> <a href="/task/meta-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001088-6b0b3a7f_0bh9941.jpg"> <span>Meta-Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 47</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/contrastive-meta-learning-with-behavior" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/contrastive-meta-learning-with-behavior#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1899065 --> <div class="col-lg-3 item-image-col"> <a href="/paper/collaborative-reflection-augmented"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2201.03158.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/collaborative-reflection-augmented">Collaborative Reflection-Augmented Autoencoder Network for Recommender Systems</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/collaborative-reflection-augmented#code">1 code implementation</a> • <span class="author-name-text item-date-pub">10 Jan 2022</span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/huance-xu">Huance Xu</a></span>, <span class="author-span "> <a href="/author/xiang-li">Xiang Li</a></span>, <span class="author-span "> <a href="/author/weiguo-zhang">WeiGuo Zhang</a></span> </p> <p class="item-strip-abstract">As the deep learning techniques have expanded to real-world recommendation tasks, many deep neural network based Collaborative Filtering (CF) models have been developed to project user-item interactions into latent feature space, based on various neural architectures, such as multi-layer perceptron, auto-encoder and graph neural networks.</p> <div class="sota"> </div> <p> <a href="/task/collaborative-filtering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000595-96a2d3eb.jpg"> <span>Collaborative Filtering</span> </span> </a> <a href="/task/recommendation-systems"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000592-2508deea.jpg"> <span>Recommendation Systems</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 2</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/collaborative-reflection-augmented" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/collaborative-reflection-augmented#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1898719 --> <div class="col-lg-3 item-image-col"> <a href="/paper/spatial-temporal-sequential-hypergraph-1"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2201.02435.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/spatial-temporal-sequential-hypergraph-1">Spatial-Temporal Sequential Hypergraph Network for Crime Prediction with Dynamic Multiplex Relation Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/spatial-temporal-sequential-hypergraph-1#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/ijcai-2021-8"> IJCAI 2021 </a> </span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/peng-dai">Peng Dai</a></span>, <span class="author-span "> <a href="/author/liefeng-bo">Liefeng Bo</a></span>, <span class="author-span "> <a href="/author/xiyue-zhang">Xiyue Zhang</a></span>, <span class="author-span "> <a href="/author/tianyi-chen">Tianyi Chen</a></span> </p> <p class="item-strip-abstract">Crime prediction is crucial for public safety and resource optimization, yet is very challenging due to two aspects: i) the dynamics of criminal patterns across time and space, crime events are distributed unevenly on both spatial and temporal domains; ii) time-evolving dependencies between different types of crimes (e. g., Theft, Robbery, Assault, Damage) which reveal fine-grained semantics of crimes.</p> <div class="sota"> </div> <p> <a href="/task/crime-prediction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Crime Prediction</span> </span> </a> <a href="/task/relation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Relation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 18</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/spatial-temporal-sequential-hypergraph-1" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/spatial-temporal-sequential-hypergraph-1#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1898726 --> <div class="col-lg-3 item-image-col"> <a href="/paper/multi-behavior-enhanced-recommendation-with"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2201.02307.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multi-behavior-enhanced-recommendation-with">Multi-Behavior Enhanced Recommendation with Cross-Interaction Collaborative Relation Modeling</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multi-behavior-enhanced-recommendation-with#code">1 code implementation</a> • <span class="author-name-text item-date-pub">7 Jan 2022</span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/peng-dai">Peng Dai</a></span>, <span class="author-span "> <a href="/author/mengyin-lu">Mengyin Lu</a></span>, <span class="author-span "> <a href="/author/liefeng-bo">Liefeng Bo</a></span> </p> <p class="item-strip-abstract">Due to the overlook of user's multi-behavioral patterns over different items, existing recommendation methods are insufficient to capture heterogeneous collaborative signals from user multi-behavior data.</p> <div class="sota"> </div> <p> <a href="/task/collaborative-filtering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000595-96a2d3eb.jpg"> <span>Collaborative Filtering</span> </span> </a> <a href="/task/recommendation-systems"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000592-2508deea.jpg"> <span>Recommendation Systems</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/multi-behavior-enhanced-recommendation-with#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 14</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multi-behavior-enhanced-recommendation-with" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multi-behavior-enhanced-recommendation-with#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/sphericgan-semi-supervised-hyper-spherical"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1021932.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/sphericgan-semi-supervised-hyper-spherical">SphericGAN: Semi-Supervised Hyper-Spherical Generative Adversarial Networks for Fine-Grained Image Synthesis</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/sphericgan-semi-supervised-hyper-spherical#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2022-1"> CVPR 2022 </a> </span> • <span class="author-span "> <a href="/author/tianyi-chen">Tianyi Chen</a></span>, <span class="author-span "> <a href="/author/yunfei-zhang">Yunfei Zhang</a></span>, <span class="author-span "> <a href="/author/xiaoyang-huo">Xiaoyang Huo</a></span>, <span class="author-span "> <a href="/author/si-wu">Si Wu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/hau-san-wong-2">Hau San Wong</a></span> </p> <p class="item-strip-abstract">To reduce the dependence of generative models on labeled data, we propose a semi-supervised hyper-spherical GAN for class-conditional fine-grained image generation, and our model is referred to as SphericGAN.</p> <div class="sota"> </div> <p> <a href="/task/generative-adversarial-network"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Generative Adversarial Network</span> </span> </a> <a href="/task/image-generation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/5ac09bd9-8785-4253-8cf4-4412dcd36426.jpg"> <span>Image Generation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/sphericgan-semi-supervised-hyper-spherical" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/sphericgan-semi-supervised-hyper-spherical#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1886578 --> <div class="col-lg-3 item-image-col"> <a href="/paper/specificity-preserving-federated-learning-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/a60d3213-df93-421f-9207-34c78d39cffe.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/specificity-preserving-federated-learning-for">Specificity-Preserving Federated Learning for MR Image Reconstruction</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/specificity-preserving-federated-learning-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">9 Dec 2021</span> • <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span "> <a href="/author/yunlu-yan">Yunlu Yan</a></span>, <span class="author-span "> <a href="/author/shanshan-wang">Shanshan Wang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/ling-shao">Ling Shao</a></span>, <span class="author-span "> <a href="/author/huazhu-fu">Huazhu Fu</a></span> </p> <p class="item-strip-abstract">The core idea is to divide the MR reconstruction model into two parts: a globally shared encoder to obtain a generalized representation at the global level, and a client-specific decoder to preserve the domain-specific properties of each client, which is important for collaborative reconstruction when the clients have unique distribution.</p> <div class="sota"> </div> <p> <a href="/task/federated-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Federated Learning</span> </span> </a> <a href="/task/image-reconstruction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Reconstruction</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/specificity-preserving-federated-learning-for#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 45</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/specificity-preserving-federated-learning-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/specificity-preserving-federated-learning-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1879578 --> <div class="col-lg-3 item-image-col"> <a href="/paper/swintrack-a-simple-and-strong-baseline-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2112.00995.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/swintrack-a-simple-and-strong-baseline-for">SwinTrack: A Simple and Strong Baseline for Transformer Tracking</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/swintrack-a-simple-and-strong-baseline-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">2 Dec 2021</span> • <span class="author-span "> <a href="/author/liting-lin">Liting Lin</a></span>, <span class="author-span "> <a href="/author/heng-fan">Heng Fan</a></span>, <span class="author-span "> <a href="/author/zhipeng-zhang">Zhipeng Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/haibin-ling">Haibin Ling</a></span> </p> <p class="item-strip-abstract">The potential of Transformer in representation learning remains under-explored.</p> <div class="sota"> <p> <a href="/sota/visual-object-tracking-on-trackingnet"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/visual-object-tracking-on-trackingnet-small_b4203628.png"/> </a> Ranked #15 on <a href="/sota/visual-object-tracking-on-trackingnet"> Visual Object Tracking on TrackingNet </a> </p> </div> <p> <a href="/task/representation-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000228-3131cfbf_nx72Tly.jpg"> <span>Representation Learning</span> </span> </a> <a href="/task/visual-object-tracking"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Visual Object Tracking</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/swintrack-a-simple-and-strong-baseline-for#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 242</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/swintrack-a-simple-and-strong-baseline-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/swintrack-a-simple-and-strong-baseline-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1871306 --> <div class="col-lg-3 item-image-col"> <a href="/paper/encoding-spatial-distribution-of"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/914166.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/encoding-spatial-distribution-of">Encoding Spatial Distribution of Convolutional Features for Texture Representation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/encoding-spatial-distribution-of#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/neurips-2021-12"> NeurIPS 2021 </a> </span> • <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/feng-li">Feng Li</a></span>, <span class="author-span "> <a href="/author/zhile-chen">Zhile Chen</a></span>, <span class="author-span "> <a href="/author/jinxiu-liang">Jinxiu Liang</a></span>, <span class="author-span "> <a href="/author/yuhui-quan">Yuhui Quan</a></span> </p> <p class="item-strip-abstract">Existing convolutional neural networks (CNNs) often use global average pooling (GAP) to aggregate feature maps into a single representation.</p> <div class="sota"> </div> <p> <a href="/task/material-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Material Recognition</span> </span> </a> <a href="/task/retrieval"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/8576b666-5d7a-4b88-a1e2-5dcc3ea02f16.jpg"> <span>Retrieval</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/encoding-spatial-distribution-of#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 17</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/encoding-spatial-distribution-of" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/encoding-spatial-distribution-of#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/joint-aec-and-beamforming-with-double-talk"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2111.04904.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/joint-aec-and-beamforming-with-double-talk">Joint Neural AEC and Beamforming with Double-Talk Detection</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/joint-aec-and-beamforming-with-double-talk#code">no code implementations</a> • <span class="author-name-text item-date-pub">9 Nov 2021</span> • <span class="author-span "> <a href="/author/vinay-kothapally">Vinay Kothapally</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">We train the proposed model in an end-to-end approach to eliminate background noise and echoes from far-end audio devices, which include nonlinear distortions.</p> <div class="sota"> </div> <p> <a href="/task/acoustic-echo-cancellation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Acoustic echo cancellation</span> </span> </a> <a href="/task/deep-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Deep Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/joint-aec-and-beamforming-with-double-talk#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/joint-aec-and-beamforming-with-double-talk" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/joint-aec-and-beamforming-with-double-talk#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1916936 --> <div class="col-lg-3 item-image-col"> <a href="/paper/multi-modal-aggregation-network-for-fast-mr"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2110.08080.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multi-modal-aggregation-network-for-fast-mr">Deep multi-modal aggregation network for MR image reconstruction with auxiliary modality</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multi-modal-aggregation-network-for-fast-mr#code">2 code implementations</a> • <span class="author-name-text item-date-pub">15 Oct 2021</span> • <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span "> <a href="/author/huazhu-fu">Huazhu Fu</a></span>, <span class="author-span "> <a href="/author/tianfei-zhou">Tianfei Zhou</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/ling-shao">Ling Shao</a></span>, <span class="author-span "> <a href="/author/david-zhang">David Zhang</a></span> </p> <p class="item-strip-abstract">Magnetic resonance (MR) imaging produces detailed images of organs and tissues with better contrast, but it suffers from a long acquisition time, which makes the image quality vulnerable to say motion artifacts.</p> <div class="sota"> </div> <p> <a href="/task/image-reconstruction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Reconstruction</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 10</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multi-modal-aggregation-network-for-fast-mr" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multi-modal-aggregation-network-for-fast-mr#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1882658 --> <div class="col-lg-3 item-image-col"> <a href="/paper/traffic-flow-forecasting-with-spatial"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2110.04038.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/traffic-flow-forecasting-with-spatial">Traffic Flow Forecasting with Spatial-Temporal Graph Diffusion Network</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/traffic-flow-forecasting-with-spatial#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Oct 2021</span> • <span class="author-span "> <a href="/author/xiyue-zhang">Xiyue Zhang</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/peng-dai">Peng Dai</a></span>, <span class="author-span "> <a href="/author/liefeng-bo">Liefeng Bo</a></span>, <span class="author-span "> <a href="/author/junbo-zhang">Junbo Zhang</a></span>, <span class="author-span "> <a href="/author/yu-zheng">Yu Zheng</a></span> </p> <p class="item-strip-abstract">Accurate forecasting of citywide traffic flow has been playing critical role in a variety of spatial-temporal mining applications, such as intelligent traffic control and public risk assessment.</p> <div class="sota"> </div> <p> <a href="/task/traffic-prediction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000582-a51d2d0f.jpg"> <span>Traffic Prediction</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 40</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/traffic-flow-forecasting-with-spatial" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/traffic-flow-forecasting-with-spatial#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1842112 --> <div class="col-lg-3 item-image-col"> <a href="/paper/global-context-enhanced-social-recommendation"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2110.04039.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/global-context-enhanced-social-recommendation">Global Context Enhanced Social Recommendation with Hierarchical Graph Neural Networks</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/global-context-enhanced-social-recommendation#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Oct 2021</span> • <span class="author-span "> <a href="/author/huance-xu">Huance Xu</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/hao-xing">Hao Xing</a></span>, <span class="author-span "> <a href="/author/dawei-yin">Dawei Yin</a></span> </p> <p class="item-strip-abstract">Social recommendation which aims to leverage social connections among users to enhance the recommendation performance.</p> <div class="sota"> </div> <p> <a href="/task/graph-neural-network"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Neural Network</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 14</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/global-context-enhanced-social-recommendation" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/global-context-enhanced-social-recommendation#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1842121 --> <div class="col-lg-3 item-image-col"> <a href="/paper/social-recommendation-with-self-supervised"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2110.03958.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/social-recommendation-with-self-supervised">Social Recommendation with Self-Supervised Metagraph Informax Network</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/social-recommendation-with-self-supervised#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Oct 2021</span> • <span class="author-span "> <a href="/author/xiaoling-long">Xiaoling Long</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/huance-xu">Huance Xu</a></span>, <span class="author-span "> <a href="/author/peng-dai">Peng Dai</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/liefeng-bo">Liefeng Bo</a></span> </p> <p class="item-strip-abstract">To model relation heterogeneity, we design a metapath-guided heterogeneous graph neural network to aggregate feature embeddings from different types of meta-relations across users and items, em-powering SMIN to maintain dedicated representations for multi-faceted user- and item-wise dependencies.</p> <div class="sota"> </div> <p> <a href="/task/collaborative-filtering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000595-96a2d3eb.jpg"> <span>Collaborative Filtering</span> </span> </a> <a href="/task/graph-neural-network"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Neural Network</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 13</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/social-recommendation-with-self-supervised" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/social-recommendation-with-self-supervised#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1842119 --> <div class="col-lg-3 item-image-col"> <a href="/paper/graph-meta-network-for-multi-behavior"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2110.03969.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/graph-meta-network-for-multi-behavior">Graph Meta Network for Multi-Behavior Recommendation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/graph-meta-network-for-multi-behavior#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Oct 2021</span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span "> <a href="/author/peng-dai">Peng Dai</a></span>, <span class="author-span "> <a href="/author/liefeng-bo">Liefeng Bo</a></span> </p> <p class="item-strip-abstract">Modern recommender systems often embed users and items into low-dimensional latent representations, based on their observed interactions.</p> <div class="sota"> </div> <p> <a href="/task/diversity"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Diversity</span> </span> </a> <a href="/task/meta-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001088-6b0b3a7f_0bh9941.jpg"> <span>Meta-Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/graph-meta-network-for-multi-behavior#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 42</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/graph-meta-network-for-multi-behavior" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/graph-meta-network-for-multi-behavior#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1842118 --> <div class="col-lg-3 item-image-col"> <a href="/paper/knowledge-aware-coupled-graph-neural-network"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2110.03987.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/knowledge-aware-coupled-graph-neural-network">Knowledge-aware Coupled Graph Neural Network for Social Recommendation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/knowledge-aware-coupled-graph-neural-network#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Oct 2021</span> • <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span "> <a href="/author/huance-xu">Huance Xu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/peng-dai">Peng Dai</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/mengyin-lu">Mengyin Lu</a></span>, <span class="author-span "> <a href="/author/liefeng-bo">Liefeng Bo</a></span>, <span class="author-span "> <a href="/author/hao-xing">Hao Xing</a></span>, <span class="author-span "> <a href="/author/xiaoping-lai">Xiaoping Lai</a></span>, <span class="author-span "> <a href="/author/yanfang-ye">Yanfang Ye</a></span> </p> <p class="item-strip-abstract">While many recent efforts show the effectiveness of neural network-based social recommender systems, several important challenges have not been well addressed yet: (i) The majority of models only consider users' social connections, while ignoring the inter-dependent knowledge across items; (ii) Most of existing solutions are designed for singular type of user-item interactions, making them infeasible to capture the interaction heterogeneity; (iii) The dynamic nature of user-item interactions has been less explored in many social-aware recommendation techniques.</p> <div class="sota"> </div> <p> <a href="/task/collaborative-filtering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000595-96a2d3eb.jpg"> <span>Collaborative Filtering</span> </span> </a> <a href="/task/graph-neural-network"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Neural Network</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 33</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/knowledge-aware-coupled-graph-neural-network" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/knowledge-aware-coupled-graph-neural-network#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1842117 --> <div class="col-lg-3 item-image-col"> <a href="/paper/graph-enhanced-multi-task-learning-of-multi"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2110.03996.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/graph-enhanced-multi-task-learning-of-multi">Graph-Enhanced Multi-Task Learning of Multi-Level Transition Dynamics for Session-based Recommendation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/graph-enhanced-multi-task-learning-of-multi#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Oct 2021</span> • <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span "> <a href="/author/jiahui-chen">Jiahui Chen</a></span>, <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/peng-dai">Peng Dai</a></span>, <span class="author-span "> <a href="/author/yanqing-chen">Yanqing Chen</a></span>, <span class="author-span "> <a href="/author/liefeng-bo">Liefeng Bo</a></span>, <span class="author-span "> <a href="/author/jiashu-zhao">Jiashu Zhao</a></span>, <span class="author-span "> <a href="/author/jimmy-xiangji-huang">Jimmy Xiangji Huang</a></span> </p> <p class="item-strip-abstract">The learning process of intra- and inter-session transition dynamics are integrated, to preserve the underlying low- and high-level item relationships in a common latent space.</p> <div class="sota"> </div> <p> <a href="/task/graph-neural-network"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Neural Network</span> </span> </a> <a href="/task/multi-task-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000069-a2f3e151.jpg"> <span>Multi-Task Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/graph-enhanced-multi-task-learning-of-multi#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 17</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/graph-enhanced-multi-task-learning-of-multi" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/graph-enhanced-multi-task-learning-of-multi#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1842116 --> <div class="col-lg-3 item-image-col"> <a href="/paper/knowledge-enhanced-hierarchical-graph"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2110.04000.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/knowledge-enhanced-hierarchical-graph">Knowledge-Enhanced Hierarchical Graph Transformer Network for Multi-Behavior Recommendation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/knowledge-enhanced-hierarchical-graph#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Oct 2021</span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/peng-dai">Peng Dai</a></span>, <span class="author-span "> <a href="/author/xiyue-zhang">Xiyue Zhang</a></span>, <span class="author-span "> <a href="/author/hongsheng-yang">Hongsheng Yang</a></span>, <span class="author-span "> <a href="/author/jian-pei">Jian Pei</a></span>, <span class="author-span "> <a href="/author/liefeng-bo">Liefeng Bo</a></span> </p> <p class="item-strip-abstract">In particular: i) complex inter-dependencies across different types of user behaviors; ii) the incorporation of knowledge-aware item relations into the multi-behavior recommendation framework; iii) dynamic characteristics of multi-typed user-item interactions.</p> <div class="sota"> </div> <p> <a href="/task/graph-attention"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Attention</span> </span> </a> <a href="/task/recommendation-systems"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000592-2508deea.jpg"> <span>Recommendation Systems</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 58</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/knowledge-enhanced-hierarchical-graph" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/knowledge-enhanced-hierarchical-graph#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1842115 --> <div class="col-lg-3 item-image-col"> <a href="/paper/multiplex-behavioral-relation-learning-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2110.04002.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multiplex-behavioral-relation-learning-for">Multiplex Behavioral Relation Learning for Recommendation via Memory Augmented Transformer Network</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multiplex-behavioral-relation-learning-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Oct 2021</span> • <span class="author-span "> <a href="/author/lianghao-xia">Lianghao Xia</a></span>, <span class="author-span "> <a href="/author/chao-huang">Chao Huang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/peng-dai">Peng Dai</a></span>, <span class="author-span "> <a href="/author/bo-zhang">Bo Zhang</a></span>, <span class="author-span "> <a href="/author/liefeng-bo">Liefeng Bo</a></span> </p> <p class="item-strip-abstract">The overlook of multiplex behavior relations can hardly recognize the multi-modal contextual signals across different types of interactions, which limit the feasibility of current recommendation methods.</p> <div class="sota"> </div> <p> <a href="/task/recommendation-systems"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000592-2508deea.jpg"> <span>Recommendation Systems</span> </span> </a> <a href="/task/relation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Relation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/multiplex-behavioral-relation-learning-for#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 21</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multiplex-behavioral-relation-learning-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multiplex-behavioral-relation-learning-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1825174 --> <div class="col-lg-3 item-image-col"> <a href="/paper/exploring-separable-attention-for-multi"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2109.01664.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/exploring-separable-attention-for-multi">Exploring Separable Attention for Multi-Contrast MR Image Super-Resolution</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/exploring-separable-attention-for-multi#code">1 code implementation</a> • <span class="author-name-text item-date-pub">3 Sep 2021</span> • <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span "> <a href="/author/yunlu-yan">Yunlu Yan</a></span>, <span class="author-span "> <a href="/author/kai-yu">Kai Yu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/ling-shao">Ling Shao</a></span>, <span class="author-span "> <a href="/author/huazhu-fu">Huazhu Fu</a></span> </p> <p class="item-strip-abstract">Our SANet could explore the areas of high-intensity and low-intensity regions in the "forward" and "reverse" directions with the help of the auxiliary contrast, while learning clearer anatomical structure and edge information for the SR of a target-contrast MR image.</p> <div class="sota"> </div> <p> <a href="/task/image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/5071982a-dbf1-4ad0-a76e-e4aa34c7c310.jpg"> <span>Image Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 44</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/exploring-separable-attention-for-multi" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/exploring-separable-attention-for-multi#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/heterogeneous-relational-message-passing"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2109.00711.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/heterogeneous-relational-message-passing">Heterogeneous relational message passing networks for molecular dynamics simulations</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/heterogeneous-relational-message-passing#code">no code implementations</a> • <span class="author-name-text item-date-pub">2 Sep 2021</span> • <span class="author-span "> <a href="/author/zun-wang">Zun Wang</a></span>, <span class="author-span "> <a href="/author/chong-wang">Chong Wang</a></span>, <span class="author-span "> <a href="/author/sibo-zhao">Sibo Zhao</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/shaogang-hao">Shaogang Hao</a></span>, <span class="author-span "> <a href="/author/chang-yu-hsieh-1">Chang Yu Hsieh</a></span>, <span class="author-span "> <a href="/author/bing-lin-gu">Bing-Lin Gu</a></span>, <span class="author-span "> <a href="/author/wenhui-duan">Wenhui Duan</a></span> </p> <p class="item-strip-abstract">With many frameworks based on message passing neural networks proposed to predict molecular and bulk properties, machine learning methods have tremendously shifted the paradigms of computational sciences underpinning physics, material science, chemistry, and biology.</p> <div class="sota"> </div> <p> <a href="/task/machine-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>BIG-bench Machine Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/heterogeneous-relational-message-passing" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/heterogeneous-relational-message-passing#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/fully-non-homogeneous-atmospheric-scattering"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2108.11292.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/fully-non-homogeneous-atmospheric-scattering">Fully Non-Homogeneous Atmospheric Scattering Modeling with Convolutional Neural Networks for Single Image Dehazing</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/fully-non-homogeneous-atmospheric-scattering#code">no code implementations</a> • <span class="author-name-text item-date-pub">25 Aug 2021</span> • <span class="author-span "> <a href="/author/cong-wang">Cong Wang</a></span>, <span class="author-span "> <a href="/author/yan-huang">Yan Huang</a></span>, <span class="author-span "> <a href="/author/yuexian-zou-1">Yuexian Zou</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">However, it is noted that ASM-based SIDM degrades its performance in dehazing real world hazy images due to the limited modelling ability of ASM where the atmospheric light factor (ALF) and the angular scattering coefficient (ASC) are assumed as constants for one image.</p> <div class="sota"> </div> <p> <a href="/task/image-dehazing"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Dehazing</span> </span> </a> <a href="/task/single-image-dehazing"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Single Image Dehazing</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/fully-non-homogeneous-atmospheric-scattering" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/fully-non-homogeneous-atmospheric-scattering#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1795242 --> <div class="col-lg-3 item-image-col"> <a href="/paper/mtrans-multi-modal-transformer-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2106.14248.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/mtrans-multi-modal-transformer-for">Multi-Modal Transformer for Accelerated MR Imaging</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/mtrans-multi-modal-transformer-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">27 Jun 2021</span> • <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span "> <a href="/author/yunlu-yan">Yunlu Yan</a></span>, <span class="author-span "> <a href="/author/geng-chen">Geng Chen</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/ling-shao">Ling Shao</a></span>, <span class="author-span "> <a href="/author/huazhu-fu">Huazhu Fu</a></span> </p> <p class="item-strip-abstract">To this end, we propose a multi-modal transformer (MTrans), which is capable of transferring multi-scale features from the target modality to the auxiliary modality, for accelerated MR imaging.</p> <div class="sota"> </div> <p> <a href="/task/image-reconstruction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Reconstruction</span> </span> </a> <a href="/task/super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000032-0f0cf3b2.jpg"> <span>Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 73</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/mtrans-multi-modal-transformer-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/mtrans-multi-modal-transformer-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1795266 --> <div class="col-lg-3 item-image-col"> <a href="/paper/dual-stream-reciprocal-disentanglement"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2106.13929.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/dual-stream-reciprocal-disentanglement">Dual-Stream Reciprocal Disentanglement Learning for Domain Adaptation Person Re-Identification</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/dual-stream-reciprocal-disentanglement#code">1 code implementation</a> • <span class="author-name-text item-date-pub">26 Jun 2021</span> • <span class="author-span "> <a href="/author/huafeng-li">Huafeng Li</a></span>, <span class="author-span "> <a href="/author/kaixiong-xu">Kaixiong Xu</a></span>, <span class="author-span "> <a href="/author/jinxing-li">Jinxing Li</a></span>, <span class="author-span "> <a href="/author/guangming-lu">Guangming Lu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/zhengtao-yu">Zhengtao Yu</a></span>, <span class="author-span "> <a href="/author/david-zhang">David Zhang</a></span> </p> <p class="item-strip-abstract">Since human-labeled samples are free for the target set, unsupervised person re-identification (Re-ID) has attracted much attention in recent years, by additionally exploiting the source set.</p> <div class="sota"> </div> <p> <a href="/task/disentanglement"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Disentanglement</span> </span> </a> <a href="/task/domain-adaptation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000588-823db955.jpg"> <span>Domain Adaptation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/dual-stream-reciprocal-disentanglement#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 5</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/dual-stream-reciprocal-disentanglement" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/dual-stream-reciprocal-disentanglement#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/deep-texture-recognition-via-exploiting-cross"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/828504.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/deep-texture-recognition-via-exploiting-cross">Deep Texture Recognition via Exploiting Cross-Layer Statistical Self-Similarity</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/deep-texture-recognition-via-exploiting-cross#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2021-1"> CVPR 2021 </a> </span> • <span class="author-span "> <a href="/author/zhile-chen">Zhile Chen</a></span>, <span class="author-span "> <a href="/author/feng-li">Feng Li</a></span>, <span class="author-span "> <a href="/author/yuhui-quan">Yuhui Quan</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/hui-ji">Hui Ji</a></span> </p> <p class="item-strip-abstract">In recent years, convolutional neural networks (CNNs) have become a prominent tool for texture recognition.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/deep-texture-recognition-via-exploiting-cross" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/deep-texture-recognition-via-exploiting-cross#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1788379 --> <div class="col-lg-3 item-image-col"> <a href="/paper/task-transformer-network-for-joint-mri"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2106.06742.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/task-transformer-network-for-joint-mri">Task Transformer Network for Joint MRI Reconstruction and Super-Resolution</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/task-transformer-network-for-joint-mri#code">1 code implementation</a> • <span class="author-name-text item-date-pub">12 Jun 2021</span> • <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span "> <a href="/author/yunlu-yan">Yunlu Yan</a></span>, <span class="author-span "> <a href="/author/huazhu-fu">Huazhu Fu</a></span>, <span class="author-span "> <a href="/author/li-chen">Li Chen</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">Then, a task transformer module is designed to embed and synthesize the relevance between the two tasks.</p> <div class="sota"> <p> <a href="/sota/image-super-resolution-on-ixi"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/image-super-resolution-on-ixi-small_cbcba20a.png"/> </a> Ranked #9 on <a href="/sota/image-super-resolution-on-ixi"> Image Super-Resolution on IXI </a> </p> </div> <p> <a href="/task/image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/5071982a-dbf1-4ad0-a76e-e4aa34c7c310.jpg"> <span>Image Super-Resolution</span> </span> </a> <a href="/task/mri-reconstruction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000002391-f435def5.jpg"> <span>MRI Reconstruction</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 101</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/task-transformer-network-for-joint-mri" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/task-transformer-network-for-joint-mri#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1775852 --> <div class="col-lg-3 item-image-col"> <a href="/paper/multi-contrast-mri-super-resolution-via-a"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2105.08949.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multi-contrast-mri-super-resolution-via-a">Multi-Contrast MRI Super-Resolution via a Multi-Stage Integration Network</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multi-contrast-mri-super-resolution-via-a#code">1 code implementation</a> • <span class="author-name-text item-date-pub">19 May 2021</span> • <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span "> <a href="/author/huazhu-fu">Huazhu Fu</a></span>, <span class="author-span "> <a href="/author/shuhao-yuan">Shuhao Yuan</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">In this work, we propose a multi-stage integration network (i. e., MINet) for multi-contrast MRI SR, which explicitly models the dependencies between multi-contrast images at different stages to guide image SR.</p> <div class="sota"> </div> <p> <a href="/task/super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000032-0f0cf3b2.jpg"> <span>Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 46</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multi-contrast-mri-super-resolution-via-a" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multi-contrast-mri-super-resolution-via-a#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/donet-dual-octave-network-for-fast-mr-image"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2105.05980.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/donet-dual-octave-network-for-fast-mr-image">DONet: Dual-Octave Network for Fast MR Image Reconstruction</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/donet-dual-octave-network-for-fast-mr-image#code">no code implementations</a> • <span class="author-name-text item-date-pub">12 May 2021</span> • <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span "> <a href="/author/zhanyuan-yang">Zhanyuan Yang</a></span>, <span class="author-span "> <a href="/author/huazhu-fu">Huazhu Fu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/jian-yang">Jian Yang</a></span>, <span class="author-span "> <a href="/author/ling-shao">Ling Shao</a></span> </p> <p class="item-strip-abstract">In this paper, we propose the Dual-Octave Network (DONet), which is capable of learning multi-scale spatial-frequency features from both the real and imaginary components of MR data, for fast parallel MR image reconstruction.</p> <div class="sota"> </div> <p> <a href="/task/image-reconstruction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Reconstruction</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/donet-dual-octave-network-for-fast-mr-image" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/donet-dual-octave-network-for-fast-mr-image#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/mimo-self-attentive-rnn-beamformer-for-multi"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2104.08450.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/mimo-self-attentive-rnn-beamformer-for-multi">MIMO Self-attentive RNN Beamformer for Multi-speaker Speech Separation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/mimo-self-attentive-rnn-beamformer-for-multi#code">no code implementations</a> • <span class="author-name-text item-date-pub">17 Apr 2021</span> • <span class="author-span "> <a href="/author/xiyun-li">Xiyun Li</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/jiaming-xu">Jiaming Xu</a></span>, <span class="author-span "> <a href="/author/bo-xu">Bo Xu</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">The spatial self-attention module is designed to attend on the cross-channel correlation in the covariance matrices.</p> <div class="sota"> </div> <p> <a href="/task/automatic-speech-recognition-2"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Automatic Speech Recognition</span> </span> </a> <a href="/task/automatic-speech-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Automatic Speech Recognition (ASR)</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/mimo-self-attentive-rnn-beamformer-for-multi#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/mimo-self-attentive-rnn-beamformer-for-multi" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/mimo-self-attentive-rnn-beamformer-for-multi#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1758777 --> <div class="col-lg-3 item-image-col"> <a href="/paper/dual-octave-convolution-for-accelerated"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2104.05345.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/dual-octave-convolution-for-accelerated">Dual-Octave Convolution for Accelerated Parallel MR Image Reconstruction</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/dual-octave-convolution-for-accelerated#code">1 code implementation</a> • <span class="author-name-text item-date-pub">12 Apr 2021</span> • <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span "> <a href="/author/zhanyuan-yang">Zhanyuan Yang</a></span>, <span class="author-span "> <a href="/author/geng-chen">Geng Chen</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/ling-shao">Ling Shao</a></span> </p> <p class="item-strip-abstract">We evaluate the performance of the proposed model on the acceleration of multi-coil MR image reconstruction.</p> <div class="sota"> </div> <p> <a href="/task/image-reconstruction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Reconstruction</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 25</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/dual-octave-convolution-for-accelerated" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/dual-octave-convolution-for-accelerated#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/metricnet-towards-improved-modeling-for-non"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2104.01227.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/metricnet-towards-improved-modeling-for-non">MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/metricnet-towards-improved-modeling-for-non#code">no code implementations</a> • <span class="author-name-text item-date-pub">2 Apr 2021</span> • <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/chunlei-zhang">Chunlei Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/shixiong-zhang">ShiXiong Zhang</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">The objective speech quality assessment is usually conducted by comparing received speech signal with its clean reference, while human beings are capable of evaluating the speech quality without any reference, such as in the mean opinion score (MOS) tests.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/metricnet-towards-improved-modeling-for-non" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/metricnet-towards-improved-modeling-for-non#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/tecanet-temporal-contextual-attention-network"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2103.16849.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/tecanet-temporal-contextual-attention-network">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/tecanet-temporal-contextual-attention-network#code">no code implementations</a> • <span class="author-name-text item-date-pub">31 Mar 2021</span> • <span class="author-span "> <a href="/author/helin-wang">Helin Wang</a></span>, <span class="author-span "> <a href="/author/bo-wu">Bo Wu</a></span>, <span class="author-span "> <a href="/author/lianwu-chen">LianWu Chen</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/jianwei-yu">Jianwei Yu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/chao-weng">Chao Weng</a></span>, <span class="author-span "> <a href="/author/dan-su">Dan Su</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">In this paper, we exploit the effective way to leverage contextual information to improve the speech dereverberation performance in real-world reverberant environments.</p> <div class="sota"> </div> <p> <a href="/task/room-impulse-response"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Room Impulse Response (RIR)</span> </span> </a> <a href="/task/speech-dereverberation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Speech Dereverberation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/tecanet-temporal-contextual-attention-network" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/tecanet-temporal-contextual-attention-network#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1748877 --> <div class="col-lg-3 item-image-col"> <a href="/paper/asymmetric-cnn-for-image-super-resolution"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/pgr-0001748877-09290fde.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/asymmetric-cnn-for-image-super-resolution">Asymmetric CNN for image super-resolution</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/asymmetric-cnn-for-image-super-resolution#code">1 code implementation</a> • <span class="author-name-text item-date-pub">25 Mar 2021</span> • <span class="author-span "> <a href="/author/chunwei-tian">Chunwei Tian</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/wangmeng-zuo">WangMeng Zuo</a></span>, <span class="author-span "> <a href="/author/chia-wen-lin">Chia-Wen Lin</a></span>, <span class="author-span "> <a href="/author/david-zhang">David Zhang</a></span> </p> <p class="item-strip-abstract">In this paper, we propose an asymmetric CNN (ACNet) comprising an asymmetric block (AB), a memory enhancement block (MEB) and a high-frequency feature enhancement block (HFFEB) for image super-resolution.</p> <div class="sota"> </div> <p> <a href="/task/image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/5071982a-dbf1-4ad0-a76e-e4aa34c7c310.jpg"> <span>Image Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 79</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/asymmetric-cnn-for-image-super-resolution" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/asymmetric-cnn-for-image-super-resolution#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/distributed-newton-optimization-with"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2102.08726.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/distributed-newton-optimization-with">Distributed Newton Optimization with Maximized Convergence Rate</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/distributed-newton-optimization-with#code">no code implementations</a> • <span class="author-name-text item-date-pub">17 Feb 2021</span> • <span class="author-span "> <a href="/author/damian-marelli">Damián Marelli</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/minyue-fu">Minyue Fu</a></span>, <span class="author-span "> <a href="/author/zenghong-huang">Zenghong Huang</a></span> </p> <p class="item-strip-abstract">As the second step towards our goal we complement the proposed method with a fully distributed method for estimating the optimal step size that maximizes convergence speed.</p> <div class="sota"> </div> <p> <a href="/task/distributed-optimization"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Distributed Optimization</span> </span> </a> <span class="badge badge-primary badge-primary-nohover">Optimization and Control</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/distributed-newton-optimization-with" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/distributed-newton-optimization-with#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1727850 --> <div class="col-lg-3 item-image-col"> <a href="/paper/multiface-a-generic-training-mechanism-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2101.09899.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multiface-a-generic-training-mechanism-for">MultiFace: A Generic Training Mechanism for Boosting Face Recognition Performance</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multiface-a-generic-training-mechanism-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">25 Jan 2021</span> • <span class="author-span "> <a href="/author/jing-xu">Jing Xu</a></span>, <span class="author-span "> <a href="/author/tszhang-guo">Tszhang Guo</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/zenglin-xu">Zenglin Xu</a></span>, <span class="author-span "> <a href="/author/kun-bai">Kun Bai</a></span> </p> <p class="item-strip-abstract">Deep Convolutional Neural Networks (DCNNs) and their variants have been widely used in large scale face recognition(FR) recently.</p> <div class="sota"> </div> <p> <a href="/task/clustering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001594-3ce5d6d8.jpg"> <span>Clustering</span> </span> </a> <a href="/task/descriptive"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Descriptive</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/multiface-a-generic-training-mechanism-for#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 35</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multiface-a-generic-training-mechanism-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multiface-a-generic-training-mechanism-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/field-free-spin-orbit-torque-induced"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2101.08518.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/field-free-spin-orbit-torque-induced">Field-free spin-orbit torque-induced switching of perpendicular magnetization in a ferrimagnetic layer with vertical composition gradient</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/field-free-spin-orbit-torque-induced#code">no code implementations</a> • <span class="author-name-text item-date-pub">21 Jan 2021</span> • <span class="author-span "> <a href="/author/zhenyi-zheng">Zhenyi Zheng</a></span>, <span class="author-span "> <a href="/author/yue-zhang">Yue Zhang</a></span>, <span class="author-span "> <a href="/author/victor-lopez-dominguez">Victor Lopez-Dominguez</a></span>, <span class="author-span "> <a href="/author/luis-sanchez-tejerina">Luis Sánchez-Tejerina</a></span>, <span class="author-span "> <a href="/author/jiacheng-shi">Jiacheng Shi</a></span>, <span class="author-span "> <a href="/author/xueqiang-feng">Xueqiang Feng</a></span>, <span class="author-span "> <a href="/author/lei-chen">Lei Chen</a></span>, <span class="author-span "> <a href="/author/zilu-wang">Zilu Wang</a></span>, <span class="author-span "> <a href="/author/zhizhong-zhang">Zhizhong Zhang</a></span>, <span class="author-span "> <a href="/author/kun-zhang">Kun Zhang</a></span>, <span class="author-span "> <a href="/author/bin-hong">Bin Hong</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/youguang-zhang">Youguang Zhang</a></span>, <span class="author-span "> <a href="/author/mario-carpentieri">Mario Carpentieri</a></span>, <span class="author-span "> <a href="/author/albert-fert">Albert Fert</a></span>, <span class="author-span "> <a href="/author/giovanni-finocchio">Giovanni Finocchio</a></span>, <span class="author-span "> <a href="/author/weisheng-zhao">Weisheng Zhao</a></span>, <span class="author-span "> <a href="/author/pedram-khalili-amiri">Pedram Khalili Amiri</a></span> </p> <p class="item-strip-abstract">Existing methods to do so involve the application of an in-plane bias magnetic field, or incorporation of in-plane structural asymmetry in the device, both of which can be difficult to implement in practical applications.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Mesoscale and Nanoscale Physics</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/field-free-spin-orbit-torque-induced" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/field-free-spin-orbit-torque-induced#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/fwb-net-front-white-balance-network-for-color"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2101.08465.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/fwb-net-front-white-balance-network-for-color">FWB-Net:Front White Balance Network for Color Shift Correction in Single Image Dehazing via Atmospheric Light Estimation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/fwb-net-front-white-balance-network-for-color#code">no code implementations</a> • <span class="author-name-text item-date-pub">21 Jan 2021</span> • <span class="author-span "> <a href="/author/cong-wang">Cong Wang</a></span>, <span class="author-span "> <a href="/author/yan-huang">Yan Huang</a></span>, <span class="author-span "> <a href="/author/yuexian-zou-1">Yuexian Zou</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">However, for images taken in real-world, the illumination is not uniformly distributed over whole image which brings model mismatch and possibly results in color shift of the deep models using ASM.</p> <div class="sota"> </div> <p> <a href="/task/image-dehazing"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Dehazing</span> </span> </a> <a href="/task/single-image-dehazing"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Single Image Dehazing</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/fwb-net-front-white-balance-network-for-color" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/fwb-net-front-white-balance-network-for-color#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/symmetry-adapted-graph-neural-networks-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2101.02930.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/symmetry-adapted-graph-neural-networks-for">Symmetry-adapted graph neural networks for constructing molecular dynamics force fields</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/symmetry-adapted-graph-neural-networks-for#code">no code implementations</a> • <span class="author-name-text item-date-pub">8 Jan 2021</span> • <span class="author-span "> <a href="/author/zun-wang">Zun Wang</a></span>, <span class="author-span "> <a href="/author/chong-wang">Chong Wang</a></span>, <span class="author-span "> <a href="/author/sibo-zhao">Sibo Zhao</a></span>, <span class="author-span "> <a href="/author/shiqiao-du">Shiqiao Du</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/bing-lin-gu">Bing-Lin Gu</a></span>, <span class="author-span "> <a href="/author/wenhui-duan">Wenhui Duan</a></span> </p> <p class="item-strip-abstract">Molecular dynamics is a powerful simulation tool to explore material properties.</p> <div class="sota"> </div> <p> <a href="/task/feature-engineering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Feature Engineering</span> </span> </a> <a href="/task/translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Translation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/symmetry-adapted-graph-neural-networks-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/symmetry-adapted-graph-neural-networks-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/semi-supervised-single-stage-controllable"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/879315.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/semi-supervised-single-stage-controllable">Semi-Supervised Single-Stage Controllable GANs for Conditional Fine-Grained Image Generation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/semi-supervised-single-stage-controllable#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/iccv-2021-1"> ICCV 2021 </a> </span> • <span class="author-span "> <a href="/author/tianyi-chen">Tianyi Chen</a></span>, <span class="author-span "> <a href="/author/yi-liu">Yi Liu</a></span>, <span class="author-span "> <a href="/author/yunfei-zhang">Yunfei Zhang</a></span>, <span class="author-span "> <a href="/author/si-wu">Si Wu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/feng-liangbing">Feng Liangbing</a></span>, <span class="author-span "> <a href="/author/hau-san-wong-2">Hau San Wong</a></span> </p> <p class="item-strip-abstract">To ensure disentanglement among the variables, we maximize mutual information between the class-independent variable and synthesized images, map real images to the latent space of a generator to perform consistency regularization of cross-class attributes, and incorporate class semantic-based regularization into a discriminator's feature space.</p> <div class="sota"> </div> <p> <a href="/task/disentanglement"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Disentanglement</span> </span> </a> <a href="/task/image-generation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/5ac09bd9-8785-4253-8cf4-4412dcd36426.jpg"> <span>Image Generation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/semi-supervised-single-stage-controllable" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/semi-supervised-single-stage-controllable#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1839619 --> <div class="col-lg-3 item-image-col"> <a href="/paper/hypergraph-neural-networks-for-hypergraph"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/879329.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/hypergraph-neural-networks-for-hypergraph">Hypergraph Neural Networks for Hypergraph Matching</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/hypergraph-neural-networks-for-hypergraph#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/iccv-2021-1"> ICCV 2021 </a> </span> • <span class="author-span "> <a href="/author/xiaowei-liao">Xiaowei Liao</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/haibin-ling">Haibin Ling</a></span> </p> <p class="item-strip-abstract">Specifically, given two hypergraphs to be matched, we first construct an association hypergraph over them and convert the hypergraph matching problem into a node classification problem on the association hypergraph.</p> <div class="sota"> <p> <a href="/sota/graph-matching-on-willow-object-class"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/graph-matching-on-willow-object-class-small_53db2f72.png"/> </a> Ranked #12 on <a href="/sota/graph-matching-on-willow-object-class"> Graph Matching on Willow Object Class </a> </p> </div> <p> <a href="/task/graph-matching"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Matching</span> </span> </a> <a href="/task/hypergraph-matching"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Hypergraph Matching</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/hypergraph-neural-networks-for-hypergraph#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 6</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/hypergraph-neural-networks-for-hypergraph" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/hypergraph-neural-networks-for-hypergraph#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/detection-of-magnetic-gap-in-the-topological"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2012.15591.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/detection-of-magnetic-gap-in-the-topological">Detection of magnetic gap in the topological surface states of MnBi2Te4</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/detection-of-magnetic-gap-in-the-topological#code">no code implementations</a> • <span class="author-name-text item-date-pub">31 Dec 2020</span> • <span class="author-span "> <a href="/author/haoran-ji">Haoran Ji</a></span>, <span class="author-span "> <a href="/author/yanzhao-liu">Yanzhao Liu</a></span>, <span class="author-span "> <a href="/author/he-wang">He Wang</a></span>, <span class="author-span "> <a href="/author/jiawei-luo">Jiawei Luo</a></span>, <span class="author-span "> <a href="/author/jiaheng-li">Jiaheng Li</a></span>, <span class="author-span "> <a href="/author/hao-li">Hao Li</a></span>, <span class="author-span "> <a href="/author/yang-wu">Yang Wu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/jian-wang">Jian Wang</a></span> </p> <p class="item-strip-abstract">An essential ingredient to realize these quantum states is the magnetic gap in the topological surface states induced by the out-of-plane ferromagnetism on the surface of MnBi2Te4.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Materials Science</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/detection-of-magnetic-gap-in-the-topological" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/detection-of-magnetic-gap-in-the-topological#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/multi-channel-multi-frame-adl-mvdr-for-target"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2012.13442.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multi-channel-multi-frame-adl-mvdr-for-target">Multi-channel Multi-frame ADL-MVDR for Target Speech Separation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multi-channel-multi-frame-adl-mvdr-for-target#code">no code implementations</a> • <span class="author-name-text item-date-pub">24 Dec 2020</span> • <span class="author-span "> <a href="/author/zhuohuang-zhang">Zhuohuang Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/lianwu-chen">LianWu Chen</a></span>, <span class="author-span "> <a href="/author/donald-s-williamson">Donald S. Williamson</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">Many purely neural network based speech separation approaches have been proposed to improve objective assessment scores, but they often introduce nonlinear distortions that are harmful to modern automatic speech recognition (ASR) systems.</p> <div class="sota"> </div> <p> <a href="/task/automatic-speech-recognition-2"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Automatic Speech Recognition</span> </span> </a> <a href="/task/automatic-speech-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Automatic Speech Recognition (ASR)</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/multi-channel-multi-frame-adl-mvdr-for-target#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multi-channel-multi-frame-adl-mvdr-for-target" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multi-channel-multi-frame-adl-mvdr-for-target#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/vehicle-re-identification-based-on-dual"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2012.12519.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/vehicle-re-identification-based-on-dual">Vehicle Re-identification Based on Dual Distance Center Loss</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/vehicle-re-identification-based-on-dual#code">no code implementations</a> • <span class="author-name-text item-date-pub">23 Dec 2020</span> • <span class="author-span "> <a href="/author/zhijun-hu">Zhijun Hu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/lilei-sun">Lilei Sun</a></span>, <span class="author-span "> <a href="/author/raja-s-p">Raja S P</a></span> </p> <p class="item-strip-abstract">Moreover, by designing a Euclidean distance threshold between all center pairs, which not only strengthens the inter-class separability of center loss, but also makes the center loss (or DDCL) works well without the combination of softmax loss.</p> <div class="sota"> </div> <p> <a href="/task/person-re-identification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000127-8d414397.jpg"> <span>Person Re-Identification</span> </span> </a> <a href="/task/vehicle-re-identification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000129-5408a716_UuHzCSz.jpg"> <span>Vehicle Re-Identification</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/vehicle-re-identification-based-on-dual" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/vehicle-re-identification-based-on-dual#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/structural-disorder-induced-second-order"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2012.12052.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/structural-disorder-induced-second-order">Structural Disorder Induced Second-order Topological Insulators in Three Dimensions</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/structural-disorder-induced-second-order#code">no code implementations</a> • <span class="author-name-text item-date-pub">22 Dec 2020</span> • <span class="author-span "> <a href="/author/jiong-hao-wang">Jiong-Hao Wang</a></span>, <span class="author-span "> <a href="/author/yan-bin-yang">Yan-Bin Yang</a></span>, <span class="author-span "> <a href="/author/ning-dai">Ning Dai</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">Here we predict the existence of a secondorder topological insulating phase in an amorphous system without any crystalline symmetry.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Mesoscale and Nanoscale Physics</span> <span class="badge badge-primary badge-primary-nohover">Disordered Systems and Neural Networks</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/structural-disorder-induced-second-order" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/structural-disorder-induced-second-order#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/directional-asr-a-new-paradigm-for-e2e-multi"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2011.00091.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/directional-asr-a-new-paradigm-for-e2e-multi">Directional ASR: A New Paradigm for E2E Multi-Speaker Speech Recognition with Source Localization</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/directional-asr-a-new-paradigm-for-e2e-multi#code">no code implementations</a> • <span class="author-name-text item-date-pub">30 Oct 2020</span> • <span class="author-span "> <a href="/author/aswin-shanmugam-subramanian">Aswin Shanmugam Subramanian</a></span>, <span class="author-span "> <a href="/author/chao-weng">Chao Weng</a></span>, <span class="author-span "> <a href="/author/shinji-watanabe">Shinji Watanabe</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">The advantages of D-ASR over existing methods are threefold: (1) it provides explicit speaker locations, (2) it improves the explainability factor, and (3) it achieves better ASR performance as the process is more streamlined.</p> <div class="sota"> </div> <p> <a href="/task/automatic-speech-recognition-2"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Automatic Speech Recognition</span> </span> </a> <a href="/task/automatic-speech-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Automatic Speech Recognition (ASR)</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/directional-asr-a-new-paradigm-for-e2e-multi#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/directional-asr-a-new-paradigm-for-e2e-multi" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/directional-asr-a-new-paradigm-for-e2e-multi#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1600686 --> <div class="col-lg-3 item-image-col"> <a href="/paper/lasot-a-high-quality-large-scale-single"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2009.03465.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/lasot-a-high-quality-large-scale-single">LaSOT: A High-quality Large-scale Single Object Tracking Benchmark</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/lasot-a-high-quality-large-scale-single#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Sep 2020</span> • <span class="author-span "> <a href="/author/heng-fan">Heng Fan</a></span>, <span class="author-span "> <a href="/author/hexin-bai">Hexin Bai</a></span>, <span class="author-span "> <a href="/author/liting-lin">Liting Lin</a></span>, <span class="author-span "> <a href="/author/fan-yang">Fan Yang</a></span>, <span class="author-span "> <a href="/author/peng-chu">Peng Chu</a></span>, <span class="author-span "> <a href="/author/ge-deng">Ge Deng</a></span>, <span class="author-span "> <a href="/author/sijia-yu">Sijia Yu</a></span>, <span class="author-span "> <a href="/author/harshit">Harshit</a></span>, <span class="author-span "> <a href="/author/mingzhen-huang">Mingzhen Huang</a></span>, <span class="author-span "> <a href="/author/juehuan-liu">Juehuan Liu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/chunyuan-liao">Chunyuan Liao</a></span>, <span class="author-span "> <a href="/author/lin-yuan">Lin Yuan</a></span>, <span class="author-span "> <a href="/author/haibin-ling">Haibin Ling</a></span> </p> <p class="item-strip-abstract">The average video length of LaSOT is around 2, 500 frames, where each video contains various challenge factors that exist in real world video footage, such as the targets disappearing and re-appearing.</p> <div class="sota"> </div> <p> <a href="/task/object-tracking"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000553-467cdf5d_SvoYQZ2.jpg"> <span>Object Tracking</span> </span> </a> <a href="/task/visual-tracking"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Visual Tracking</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/lasot-a-high-quality-large-scale-single#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 117</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/lasot-a-high-quality-large-scale-single" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/lasot-a-high-quality-large-scale-single#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1585823 --> <div class="col-lg-3 item-image-col"> <a href="/paper/an-overview-of-deep-learning-based-audio"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/pgr-0001585823-f7827e7e.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/an-overview-of-deep-learning-based-audio">An Overview of Deep-Learning-Based Audio-Visual Speech Enhancement and Separation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/an-overview-of-deep-learning-based-audio#code">1 code implementation</a> • <span class="author-name-text item-date-pub">21 Aug 2020</span> • <span class="author-span "> <a href="/author/daniel-michelsanti">Daniel Michelsanti</a></span>, <span class="author-span "> <a href="/author/zheng-hua-tan">Zheng-Hua Tan</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span>, <span class="author-span "> <a href="/author/jesper-jensen">Jesper Jensen</a></span> </p> <p class="item-strip-abstract">Speech enhancement and speech separation are two related tasks, whose purpose is to extract either one or more target speech signals, respectively, from a mixture of sounds generated by several sources.</p> <div class="sota"> </div> <p> <a href="/task/deep-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Deep Learning</span> </span> </a> <a href="/task/speech-enhancement"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/bf2407da-ded8-4af2-9537-59a43b7d3ca3.jpg"> <span>Speech Enhancement</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/an-overview-of-deep-learning-based-audio#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 205</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/an-overview-of-deep-learning-based-audio" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/an-overview-of-deep-learning-based-audio#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1628969 --> <div class="col-lg-3 item-image-col"> <a href="/paper/adl-mvdr-all-deep-learning-mvdr-beamformer"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2008.06994.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/adl-mvdr-all-deep-learning-mvdr-beamformer">ADL-MVDR: All deep learning MVDR beamformer for target speech separation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/adl-mvdr-all-deep-learning-mvdr-beamformer#code">1 code implementation</a> • <span class="author-name-text item-date-pub">16 Aug 2020</span> • <span class="author-span "> <a href="/author/zhuohuang-zhang">Zhuohuang Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/lianwu-chen">LianWu Chen</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">Speech separation algorithms are often used to separate the target speech from other interfering sources.</p> <div class="sota"> </div> <p> <a href="/task/speech-separation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000246-5755ee34.jpg"> <span>Speech Separation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 5</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/adl-mvdr-all-deep-learning-mvdr-beamformer" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/adl-mvdr-all-deep-learning-mvdr-beamformer#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1983355 --> <div class="col-lg-3 item-image-col"> <a href="/paper/recurrent-exposure-generation-for-low-light"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2007.10963.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/recurrent-exposure-generation-for-low-light">Recurrent Exposure Generation for Low-Light Face Detection</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/recurrent-exposure-generation-for-low-light#code">1 code implementation</a> • <span class="author-name-text item-date-pub">21 Jul 2020</span> • <span class="author-span "> <a href="/author/jinxiu-liang">Jinxiu Liang</a></span>, <span class="author-span "> <a href="/author/jingwen-wang">Jingwen Wang</a></span>, <span class="author-span "> <a href="/author/yuhui-quan">Yuhui Quan</a></span>, <span class="author-span "> <a href="/author/tianyi-chen">Tianyi Chen</a></span>, <span class="author-span "> <a href="/author/jiaying-liu">Jiaying Liu</a></span>, <span class="author-span "> <a href="/author/haibin-ling">Haibin Ling</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">REG produces progressively and efficiently intermediate images corresponding to various exposure settings, and such pseudo-exposures are then fused by MED to detect faces across different lighting conditions.</p> <div class="sota"> </div> <p> <a href="/task/face-detection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000351-f7066399.jpg"> <span>Face Detection</span> </span> </a> <a href="/task/image-enhancement"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Enhancement</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 15</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/recurrent-exposure-generation-for-low-light" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/recurrent-exposure-generation-for-low-light#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1798317 --> <div class="col-lg-3 item-image-col"> <a href="/paper/designing-and-training-of-a-dual-cnn-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/pgr-0001798317-d9f7a72d.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/designing-and-training-of-a-dual-cnn-for">Designing and Training of A Dual CNN for Image Denoising</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/designing-and-training-of-a-dual-cnn-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Jul 2020</span> • <span class="author-span "> <a href="/author/chunwei-tian">Chunwei Tian</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/wangmeng-zuo">WangMeng Zuo</a></span>, <span class="author-span "> <a href="/author/bo-du">Bo Du</a></span>, <span class="author-span "> <a href="/author/chia-wen-lin">Chia-Wen Lin</a></span>, <span class="author-span "> <a href="/author/david-zhang">David Zhang</a></span> </p> <p class="item-strip-abstract">The enhancement block gathers and fuses the global and local features to provide complementary information for the latter network.</p> <div class="sota"> </div> <p> <a href="/task/image-denoising"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000714-068a8901_2PQwzdm.jpg"> <span>Image Denoising</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 47</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/designing-and-training-of-a-dual-cnn-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/designing-and-training-of-a-dual-cnn-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1507305 --> <div class="col-lg-3 item-image-col"> <a href="/paper/lightweight-image-super-resolution-with-2"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2007.04344.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/lightweight-image-super-resolution-with-2">Lightweight image super-resolution with enhanced CNN</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/lightweight-image-super-resolution-with-2#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Jul 2020</span> • <span class="author-span "> <a href="/author/chunwei-tian">Chunwei Tian</a></span>, <span class="author-span "> <a href="/author/ruibin-zhuge">Ruibin Zhuge</a></span>, <span class="author-span "> <a href="/author/zhihao-wu">Zhihao Wu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/wangmeng-zuo">WangMeng Zuo</a></span>, <span class="author-span "> <a href="/author/chen-chen">Chen Chen</a></span>, <span class="author-span "> <a href="/author/chia-wen-lin">Chia-Wen Lin</a></span> </p> <p class="item-strip-abstract">Finally, the IRB uses coarse high-frequency features from the RB to learn more accurate SR features and construct a SR image.</p> <div class="sota"> <p> <a href="/sota/image-super-resolution-on-set14-4x-upscaling"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/image-super-resolution-on-set14-4x-upscaling-small_05e239fa.png"/> </a> Ranked #61 on <a href="/sota/image-super-resolution-on-set14-4x-upscaling"> Image Super-Resolution on Set14 - 4x upscaling </a> </p> </div> <p> <a href="/task/image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/5071982a-dbf1-4ad0-a76e-e4aa34c7c310.jpg"> <span>Image Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 223</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/lightweight-image-super-resolution-with-2" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/lightweight-image-super-resolution-with-2#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/deep-bilateral-retinex-for-low-light-image"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2007.02018.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/deep-bilateral-retinex-for-low-light-image">Deep Bilateral Retinex for Low-Light Image Enhancement</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/deep-bilateral-retinex-for-low-light-image#code">no code implementations</a> • <span class="author-name-text item-date-pub">4 Jul 2020</span> • <span class="author-span "> <a href="/author/jinxiu-liang">Jinxiu Liang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/yuhui-quan">Yuhui Quan</a></span>, <span class="author-span "> <a href="/author/jingwen-wang">Jingwen Wang</a></span>, <span class="author-span "> <a href="/author/haibin-ling">Haibin Ling</a></span>, <span class="author-span "> <a href="/author/hui-ji">Hui Ji</a></span> </p> <p class="item-strip-abstract">Low-light images, i. e. the images captured in low-light conditions, suffer from very poor visibility caused by low contrast, color distortion and significant measurement noise.</p> <div class="sota"> </div> <p> <a href="/task/low-light-image-enhancement"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000098-4572d568_kd4fjWN.jpg"> <span>Low-Light Image Enhancement</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/deep-bilateral-retinex-for-low-light-image" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/deep-bilateral-retinex-for-low-light-image#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1413955 --> <div class="col-lg-3 item-image-col"> <a href="/paper/neural-spatio-temporal-beamformer-for-target"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2005.03889.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/neural-spatio-temporal-beamformer-for-target">Neural Spatio-Temporal Beamformer for Target Speech Separation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/neural-spatio-temporal-beamformer-for-target#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 May 2020</span> • <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/lian-wu-chen">Lian-Wu Chen</a></span>, <span class="author-span "> <a href="/author/chao-weng">Chao Weng</a></span>, <span class="author-span "> <a href="/author/jianming-liu">Jianming Liu</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">Purely neural network (NN) based speech separation and enhancement methods, although can achieve good objective scores, inevitably cause nonlinear speech distortions that are harmful for the automatic speech recognition (ASR).</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Audio and Speech Processing</span> <span class="badge badge-primary badge-primary-nohover">Sound</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 15</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/neural-spatio-temporal-beamformer-for-target" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/neural-spatio-temporal-beamformer-for-target#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/pathwise-unique-solutions-and-stochastic"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2004.05305.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/pathwise-unique-solutions-and-stochastic">Pathwise unique solutions and stochastic averaging for mixed stochastic partial differential equations driven by fractional Brownian motion and Brownian motion</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/pathwise-unique-solutions-and-stochastic#code">no code implementations</a> • <span class="author-name-text item-date-pub">11 Apr 2020</span> • <span class="author-span "> <a href="/author/bin-pei">Bin Pei</a></span>, <span class="author-span "> <a href="/author/yuzuru-inahama">Yuzuru Inahama</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">This paper is devoted to a system of stochastic partial differential equations (SPDEs) that have a slow component driven by fractional Brownian motion (fBm) with the Hurst parameter $H >1/2$ and a fast component driven by fast-varying diffusion.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Probability</span> <span class="badge badge-primary badge-primary-nohover">Dynamical Systems</span> <span class="badge badge-primary badge-primary-nohover">60G22, 60H05, 60H15, 34C29</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/pathwise-unique-solutions-and-stochastic" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/pathwise-unique-solutions-and-stochastic#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/multi-modal-multi-channel-target-speech"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2003.07032.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multi-modal-multi-channel-target-speech">Multi-modal Multi-channel Target Speech Separation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multi-modal-multi-channel-target-speech#code">no code implementations</a> • <span class="author-name-text item-date-pub">16 Mar 2020</span> • <span class="author-span "> <a href="/author/rongzhi-gu">Rongzhi Gu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/lian-wu-chen">Lian-Wu Chen</a></span>, <span class="author-span "> <a href="/author/yuexian-zou-1">Yuexian Zou</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">Target speech separation refers to extracting a target speaker's voice from an overlapped audio of simultaneous talkers.</p> <div class="sota"> </div> <p> <a href="/task/speech-separation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000246-5755ee34.jpg"> <span>Speech Separation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multi-modal-multi-channel-target-speech" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multi-modal-multi-channel-target-speech#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/enhancing-end-to-end-multi-channel-speech"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2003.03927.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/enhancing-end-to-end-multi-channel-speech">Enhancing End-to-End Multi-channel Speech Separation via Spatial Feature Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/enhancing-end-to-end-multi-channel-speech#code">no code implementations</a> • <span class="author-name-text item-date-pub">9 Mar 2020</span> • <span class="author-span "> <a href="/author/rongzhi-gu">Rongzhi Gu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/lian-wu-chen">Lian-Wu Chen</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/dan-su">Dan Su</a></span>, <span class="author-span "> <a href="/author/yuexian-zou-1">Yuexian Zou</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">Hand-crafted spatial features (e. g., inter-channel phase difference, IPD) play a fundamental role in recent deep learning based multi-channel speech separation (MCSS) methods.</p> <div class="sota"> </div> <p> <a href="/task/speech-separation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000246-5755ee34.jpg"> <span>Speech Separation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/enhancing-end-to-end-multi-channel-speech" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/enhancing-end-to-end-multi-channel-speech#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/self-supervised-learning-for-audio-visual"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2002.05314.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/self-supervised-learning-for-audio-visual">Self-supervised learning for audio-visual speaker diarization</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/self-supervised-learning-for-audio-visual#code">no code implementations</a> • <span class="author-name-text item-date-pub">13 Feb 2020</span> • <span class="author-span "> <a href="/author/yifan-ding">Yifan Ding</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/yahuan-cong">Yahuan Cong</a></span>, <span class="author-span "> <a href="/author/liqiang-wang">Liqiang Wang</a></span> </p> <p class="item-strip-abstract">Speaker diarization, which is to find the speech segments of specific speakers, has been widely used in human-centered applications such as video conferences or human-computer interaction systems.</p> <div class="sota"> </div> <p> <a href="/task/self-supervised-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001882-b4b42454.jpg"> <span>Self-Supervised Learning</span> </span> </a> <a href="/task/speaker-diarization-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>speaker-diarization</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/self-supervised-learning-for-audio-visual#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/self-supervised-learning-for-audio-visual" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/self-supervised-learning-for-audio-visual#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/deep-learning-on-image-denoising-an-overview"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1912.13171.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/deep-learning-on-image-denoising-an-overview">Deep Learning on Image Denoising: An overview</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/deep-learning-on-image-denoising-an-overview#code">no code implementations</a> • <span class="author-name-text item-date-pub">31 Dec 2019</span> • <span class="author-span "> <a href="/author/chunwei-tian">Chunwei Tian</a></span>, <span class="author-span "> <a href="/author/lunke-fei">Lunke Fei</a></span>, <span class="author-span "> <a href="/author/wenxian-zheng">Wenxian Zheng</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/wangmeng-zuo">WangMeng Zuo</a></span>, <span class="author-span "> <a href="/author/chia-wen-lin">Chia-Wen Lin</a></span> </p> <p class="item-strip-abstract">However, there are substantial differences in the various types of deep learning methods dealing with image denoising.</p> <div class="sota"> </div> <p> <a href="/task/deep-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Deep Learning</span> </span> </a> <a href="/task/image-denoising"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000714-068a8901_2PQwzdm.jpg"> <span>Image Denoising</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/deep-learning-on-image-denoising-an-overview" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/deep-learning-on-image-denoising-an-overview#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-unified-framework-for-speech-separation"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1912.07814.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-unified-framework-for-speech-separation">A Unified Framework for Speech Separation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-unified-framework-for-speech-separation#code">no code implementations</a> • <span class="author-name-text item-date-pub">17 Dec 2019</span> • <span class="author-span "> <a href="/author/fahimeh-bahmaninezhad">Fahimeh Bahmaninezhad</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/john-h-l-hansen">John H. L. Hansen</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">The initial solutions introduced for deep learning based speech separation analyzed the speech signals into time-frequency domain with STFT; and then encoded mixed signals were fed into a deep neural network based separator.</p> <div class="sota"> </div> <p> <a href="/task/speech-separation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000246-5755ee34.jpg"> <span>Speech Separation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-unified-framework-for-speech-separation" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-unified-framework-for-speech-separation#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/adaptive-gnn-for-image-analysis-and-editing"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/170707.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/adaptive-gnn-for-image-analysis-and-editing">Adaptive GNN for Image Analysis and Editing</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/adaptive-gnn-for-image-analysis-and-editing#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/neurips-2019-12"> NeurIPS 2019 </a> </span> • <span class="author-span "> <a href="/author/lingyu-liang">Lingyu Liang</a></span>, <span class="author-span "> <a href="/author/lianwen-jin">Lianwen Jin</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">In practical verification, we design a new regularization structure with guided feature to produce GNN-based filtering and propagation diffusion to tackle the ill-posed inverse problems of quotient image analysis (QIA), which recovers the reflectance ratio as a signature for image analysis or adjustment.</p> <div class="sota"> </div> <p> <a href="/task/graph-neural-network"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Graph Neural Network</span> </span> </a> <a href="/task/low-light-image-enhancement"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000098-4572d568_kd4fjWN.jpg"> <span>Low-Light Image Enhancement</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/adaptive-gnn-for-image-analysis-and-editing" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/adaptive-gnn-for-image-analysis-and-editing#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/audio-visual-speech-separation-and"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1909.07352.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/audio-visual-speech-separation-and">Audio-Visual Speech Separation and Dereverberation with a Two-Stage Multimodal Network</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/audio-visual-speech-separation-and#code">no code implementations</a> • <span class="author-name-text item-date-pub">16 Sep 2019</span> • <span class="author-span "> <a href="/author/ke-tan">Ke Tan</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">Background noise, interfering speech and room reverberation frequently distort target speech in real listening environments.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Audio and Speech Processing</span> <span class="badge badge-primary badge-primary-nohover">Sound</span> <span class="badge badge-primary badge-primary-nohover">Signal Processing</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/audio-visual-speech-separation-and" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/audio-visual-speech-separation-and#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2087433 --> <div class="col-lg-3 item-image-col"> <a href="/paper/image-denoising-using-deep-cnn-with-batch"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/image-denoising-using-deep-cnn-with-batch">Image denoising using deep CNN with batch renormalization</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/image-denoising-using-deep-cnn-with-batch#code">2 code implementations</a> • <span class="item-conference-link"> <a href="/conference/neural-networks-2019-9"> Neural Networks 2019 </a> </span> • <span class="author-span "> <a href="/author/chunwei-tian">Chunwei Tian</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/wangmeng-zuo">WangMeng Zuo</a></span> </p> <p class="item-strip-abstract">In this paper, we report the design of a novel network called a batch-renormalization denoising network (BRDNet).</p> <div class="sota"> </div> <p> <a href="/task/image-denoising"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000714-068a8901_2PQwzdm.jpg"> <span>Image Denoising</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 0</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/image-denoising-using-deep-cnn-with-batch" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/image-denoising-using-deep-cnn-with-batch#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/depth-agmnet-an-atrous-granular-multiscale"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1908.09346.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/depth-agmnet-an-atrous-granular-multiscale">Dedge-AGMNet:an effective stereo matching network optimized by depth edge auxiliary task</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/depth-agmnet-an-atrous-granular-multiscale#code">no code implementations</a> • <span class="author-name-text item-date-pub">25 Aug 2019</span> • <span class="author-span "> <a href="/author/weida-yang">Weida Yang</a></span>, <span class="author-span "> <a href="/author/xindong-ai">Xindong Ai</a></span>, <span class="author-span "> <a href="/author/zuliu-yang">Zuliu Yang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/yong-zhao">Yong Zhao</a></span> </p> <p class="item-strip-abstract">To improve the performance in ill-posed regions, this paper proposes an atrous granular multi-scale network based on depth edge subnetwork(Dedge-AGMNet).</p> <div class="sota"> </div> <p> <a href="/task/3d-architecture"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>3D Architecture</span> </span> </a> <a href="/task/disparity-estimation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Disparity Estimation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/depth-agmnet-an-atrous-granular-multiscale#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/depth-agmnet-an-atrous-granular-multiscale" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/depth-agmnet-an-atrous-granular-multiscale#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/coupled-projection-residual-network-for-mri"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1907.05598.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/coupled-projection-residual-network-for-mri">Coupled-Projection Residual Network for MRI Super-Resolution</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/coupled-projection-residual-network-for-mri#code">no code implementations</a> • <span class="author-name-text item-date-pub">12 Jul 2019</span> • <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span "> <a href="/author/kai-wang">Kai Wang</a></span>, <span class="author-span "> <a href="/author/shijian-lu">Shijian Lu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/heng-kong">Heng Kong</a></span>, <span class="author-span "> <a href="/author/ling-shao">Ling Shao</a></span> </p> <p class="item-strip-abstract">The deep sub-network learns from the residuals of the high-frequency image information, where multiple residual blocks are cascaded to magnify the MRI images at the last network layer.</p> <div class="sota"> </div> <p> <a href="/task/super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000032-0f0cf3b2.jpg"> <span>Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/coupled-projection-residual-network-for-mri" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/coupled-projection-residual-network-for-mri#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 499950 --> <div class="col-lg-3 item-image-col"> <a href="/paper/single-channel-signal-separation-and"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1906.07552.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/single-channel-signal-separation-and">Single-Channel Signal Separation and Deconvolution with Generative Adversarial Networks</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/single-channel-signal-separation-and#code">1 code implementation</a> • <span class="author-name-text item-date-pub">14 Jun 2019</span> • <span class="author-span "> <a href="/author/qiuqiang-kong">Qiuqiang Kong</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span>, <span class="author-span "> <a href="/author/philip-j-b-jackson">Philip J. B. Jackson</a></span>, <span class="author-span "> <a href="/author/mark-d-plumbley">Mark D. Plumbley</a></span> </p> <p class="item-strip-abstract">Single-channel signal separation and deconvolution aims to separate and deconvolve individual sources from a single-channel mixture and is a challenging problem in which no prior knowledge of the mixing filters is available.</p> <div class="sota"> </div> <p> <a href="/task/generative-adversarial-network"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Generative Adversarial Network</span> </span> </a> <a href="/task/image-inpainting"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/0c4f8127-809f-482f-b945-8cf7a508ba35.jpg"> <span>Image Inpainting</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 12</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/single-channel-signal-separation-and" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/single-channel-signal-separation-and#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/robust-classification-with-sparse"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1906.11885.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/robust-classification-with-sparse">Robust Classification with Sparse Representation Fusion on Diverse Data Subsets</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/robust-classification-with-sparse#code">no code implementations</a> • <span class="author-name-text item-date-pub">10 Jun 2019</span> • <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/zuoyong-li">Zuoyong Li</a></span>, <span class="author-span "> <a href="/author/jian-yang">Jian Yang</a></span> </p> <p class="item-strip-abstract">It performs Sparse Representation Fusion based on the Diverse Subset of training samples (SRFDS), which reduces the impact of randomness of the sample set and enhances the robustness of classification results.</p> <div class="sota"> </div> <p> <a href="/task/classification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001592-811f0118_3TU7fCb.jpg"> <span>General Classification</span> </span> </a> <a href="/task/robust-classification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Robust classification</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/robust-classification-with-sparse" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/robust-classification-with-sparse#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/supervised-discriminative-sparse-pca-for-com"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1905.11837.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/supervised-discriminative-sparse-pca-for-com">Supervised Discriminative Sparse PCA for Com-Characteristic Gene Selection and Tumor Classification on Multiview Biological Data</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/supervised-discriminative-sparse-pca-for-com#code">no code implementations</a> • <span class="author-name-text item-date-pub">28 May 2019</span> • <span class="author-span "> <a href="/author/chun-mei-feng">Chun-Mei Feng</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/jin-xing-liu">Jin-Xing Liu</a></span>, <span class="author-span "> <a href="/author/ying-lian-gao">Ying-Lian Gao</a></span>, <span class="author-span "> <a href="/author/chun-hou-zheng">Chun-Hou Zheng</a></span> </p> <p class="item-strip-abstract">To overcome this problem, this study developed a new PCA method, which is named the Supervised Discriminative Sparse PCA (SDSPCA).</p> <div class="sota"> </div> <p> <a href="/task/classification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001592-811f0118_3TU7fCb.jpg"> <span>General Classification</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/supervised-discriminative-sparse-pca-for-com" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/supervised-discriminative-sparse-pca-for-com#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-comprehensive-study-of-speech-separation"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1905.07497.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-comprehensive-study-of-speech-separation">A comprehensive study of speech separation: spectrogram vs waveform separation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-comprehensive-study-of-speech-separation#code">no code implementations</a> • <span class="author-name-text item-date-pub">17 May 2019</span> • <span class="author-span "> <a href="/author/fahimeh-bahmaninezhad">Fahimeh Bahmaninezhad</a></span>, <span class="author-span "> <a href="/author/jian-wu">Jian Wu</a></span>, <span class="author-span "> <a href="/author/rongzhi-gu">Rongzhi Gu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">We study the speech separation problem for far-field data (more similar to naturalistic audio streams) and develop multi-channel solutions for both frequency and time-domain separators with utilizing spectral, spatial and speaker location information.</p> <div class="sota"> </div> <p> <a href="/task/speech-recognition-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>speech-recognition</span> </span> </a> <a href="/task/speech-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000646-bb6f0daf.jpg"> <span>Speech Recognition</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/a-comprehensive-study-of-speech-separation#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-comprehensive-study-of-speech-separation" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-comprehensive-study-of-speech-separation#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/end-to-end-multi-channel-speech-separation"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1905.06286.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/end-to-end-multi-channel-speech-separation">End-to-End Multi-Channel Speech Separation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/end-to-end-multi-channel-speech-separation#code">no code implementations</a> • <span class="author-name-text item-date-pub">15 May 2019</span> • <span class="author-span "> <a href="/author/rongzhi-gu">Rongzhi Gu</a></span>, <span class="author-span "> <a href="/author/jian-wu">Jian Wu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/lian-wu-chen">Lian-Wu Chen</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/dan-su">Dan Su</a></span>, <span class="author-span "> <a href="/author/yuexian-zou-1">Yuexian Zou</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">This paper extended the previous approach and proposed a new end-to-end model for multi-channel speech separation.</p> <div class="sota"> </div> <p> <a href="/task/speech-separation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000246-5755ee34.jpg"> <span>Speech Separation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/end-to-end-multi-channel-speech-separation" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/end-to-end-multi-channel-speech-separation#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/time-domain-audio-visual-speech-separation"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1904.03760.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/time-domain-audio-visual-speech-separation">Time Domain Audio Visual Speech Separation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/time-domain-audio-visual-speech-separation#code">no code implementations</a> • <span class="author-name-text item-date-pub">7 Apr 2019</span> • <span class="author-span "> <a href="/author/jian-wu">Jian Wu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/shi-xiong-zhang">Shi-Xiong Zhang</a></span>, <span class="author-span "> <a href="/author/lian-wu-chen">Lian-Wu Chen</a></span>, <span class="author-span "> <a href="/author/meng-yu">Meng Yu</a></span>, <span class="author-span "> <a href="/author/lei-xie">Lei Xie</a></span>, <span class="author-span "> <a href="/author/dong-yu">Dong Yu</a></span> </p> <p class="item-strip-abstract">Audio-visual multi-modal modeling has been demonstrated to be effective in many speech related tasks, such as speech recognition and speech enhancement.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Audio and Speech Processing</span> <span class="badge badge-primary badge-primary-nohover">Sound</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/time-domain-audio-visual-speech-separation" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/time-domain-audio-visual-speech-separation#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/image-cartoon-texture-decomposition-using"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1811.04208.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/image-cartoon-texture-decomposition-using">Image Cartoon-Texture Decomposition Using Isotropic Patch Recurrence</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/image-cartoon-texture-decomposition-using#code">no code implementations</a> • <span class="author-name-text item-date-pub">10 Nov 2018</span> • <span class="author-span "> <a href="/author/ruotao-xu">Ruotao Xu</a></span>, <span class="author-span "> <a href="/author/yuhui-quan">Yuhui Quan</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">Aiming at separating the cartoon and texture layers from an image, cartoon-texture decomposition approaches resort to image priors to model cartoon and texture respectively.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/image-cartoon-texture-decomposition-using" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/image-cartoon-texture-decomposition-using#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/enhanced-cnn-for-image-denoising"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1810.11834.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/enhanced-cnn-for-image-denoising">Enhanced CNN for image denoising</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/enhanced-cnn-for-image-denoising#code">no code implementations</a> • <span class="author-name-text item-date-pub">28 Oct 2018</span> • <span class="author-span "> <a href="/author/chunwei-tian">Chunwei Tian</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/lunke-fei">Lunke Fei</a></span>, <span class="author-span "> <a href="/author/junqian-wang">Junqian Wang</a></span>, <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/nan-luo">Nan Luo</a></span> </p> <p class="item-strip-abstract">Owing to flexible architectures of deep convolutional neural networks (CNNs), CNNs are successfully used for image denoising.</p> <div class="sota"> </div> <p> <a href="/task/image-denoising"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000714-068a8901_2PQwzdm.jpg"> <span>Image Denoising</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/enhanced-cnn-for-image-denoising" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/enhanced-cnn-for-image-denoising#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/deep-learning-for-image-denoising-a-survey"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1810.05052.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/deep-learning-for-image-denoising-a-survey">Deep Learning for Image Denoising: A Survey</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/deep-learning-for-image-denoising-a-survey#code">no code implementations</a> • <span class="author-name-text item-date-pub">11 Oct 2018</span> • <span class="author-span "> <a href="/author/chunwei-tian">Chunwei Tian</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/lunke-fei">Lunke Fei</a></span>, <span class="author-span "> <a href="/author/ke-yan">Ke Yan</a></span> </p> <p class="item-strip-abstract">Since the proposal of big data analysis and Graphic Processing Unit (GPU), the deep learning technology has received a great deal of attention and has been widely applied in the field of imaging processing.</p> <div class="sota"> </div> <p> <a href="/task/machine-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>BIG-bench Machine Learning</span> </span> </a> <a href="/task/deep-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Deep Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/deep-learning-for-image-denoising-a-survey#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/deep-learning-for-image-denoising-a-survey" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/deep-learning-for-image-denoising-a-survey#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 40720 --> <div class="col-lg-3 item-image-col"> <a href="/paper/lasot-a-high-quality-benchmark-for-large"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1809.07845.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/lasot-a-high-quality-benchmark-for-large">LaSOT: A High-quality Benchmark for Large-scale Single Object Tracking</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/lasot-a-high-quality-benchmark-for-large#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2019-6"> CVPR 2019 </a> </span> • <span class="author-span "> <a href="/author/heng-fan">Heng Fan</a></span>, <span class="author-span "> <a href="/author/liting-lin">Liting Lin</a></span>, <span class="author-span "> <a href="/author/fan-yang">Fan Yang</a></span>, <span class="author-span "> <a href="/author/peng-chu">Peng Chu</a></span>, <span class="author-span "> <a href="/author/ge-deng">Ge Deng</a></span>, <span class="author-span "> <a href="/author/sijia-yu">Sijia Yu</a></span>, <span class="author-span "> <a href="/author/hexin-bai">Hexin Bai</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/chunyuan-liao">Chunyuan Liao</a></span>, <span class="author-span "> <a href="/author/haibin-ling">Haibin Ling</a></span> </p> <p class="item-strip-abstract">In this paper, we present LaSOT, a high-quality benchmark for Large-scale Single Object Tracking.</p> <div class="sota"> </div> <p> <a href="/task/object-tracking"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000553-467cdf5d_SvoYQZ2.jpg"> <span>Object Tracking</span> </span> </a> <a href="/task/high"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Vocal Bursts Intensity Prediction</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 117</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/lasot-a-high-quality-benchmark-for-large" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/lasot-a-high-quality-benchmark-for-large#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/highly-economized-multi-view-binary"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1809.05992.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/highly-economized-multi-view-binary">Highly-Economized Multi-View Binary Compression for Scalable Image Clustering</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/highly-economized-multi-view-binary#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/eccv-2018-9"> ECCV 2018 </a> </span> • <span class="author-span "> <a href="/author/zheng-zhang">Zheng Zhang</a></span>, <span class="author-span "> <a href="/author/li-liu">Li Liu</a></span>, <span class="author-span "> <a href="/author/jie-qin">Jie Qin</a></span>, <span class="author-span "> <a href="/author/fan-zhu">Fan Zhu</a></span>, <span class="author-span "> <a href="/author/fumin-shen">Fumin Shen</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/ling-shao">Ling Shao</a></span>, <span class="author-span "> <a href="/author/heng-tao-shen">Heng Tao Shen</a></span> </p> <p class="item-strip-abstract">How to economically cluster large-scale multi-view images is a long-standing problem in computer vision.</p> <div class="sota"> </div> <p> <a href="/task/clustering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001594-3ce5d6d8.jpg"> <span>Clustering</span> </span> </a> <a href="/task/image-clustering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000702-3c2f553a.jpg"> <span>Image Clustering</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/highly-economized-multi-view-binary#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/highly-economized-multi-view-binary" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/highly-economized-multi-view-binary#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/incomplete-multi-view-clustering-via-graph"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1809.05998.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/incomplete-multi-view-clustering-via-graph">Incomplete Multi-view Clustering via Graph Regularized Matrix Factorization</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/incomplete-multi-view-clustering-via-graph#code">no code implementations</a> • <span class="author-name-text item-date-pub">17 Sep 2018</span> • <span class="author-span "> <a href="/author/jie-wen">Jie Wen</a></span>, <span class="author-span "> <a href="/author/zheng-zhang">Zheng Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/zuofeng-zhong">Zuofeng Zhong</a></span> </p> <p class="item-strip-abstract">Clustering with incomplete views is a challenge in multi-view clustering.</p> <div class="sota"> </div> <p> <a href="/task/clustering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001594-3ce5d6d8.jpg"> <span>Clustering</span> </span> </a> <a href="/task/graph-embedding"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/4867272e-8232-4786-b735-41b4dccb5adf.jpg"> <span>Graph Embedding</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/incomplete-multi-view-clustering-via-graph#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/incomplete-multi-view-clustering-via-graph" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/incomplete-multi-view-clustering-via-graph#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 163667 --> <div class="col-lg-3 item-image-col"> <a href="/paper/sound-event-detection-and-time-frequency"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/pgr-0000163667-a88c7ac9.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/sound-event-detection-and-time-frequency">Sound Event Detection and Time-Frequency Segmentation from Weakly Labelled Data</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/sound-event-detection-and-time-frequency#code">2 code implementations</a> • <span class="author-name-text item-date-pub">12 Apr 2018</span> • <span class="author-span "> <a href="/author/qiuqiang-kong">Qiuqiang Kong</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/iwona-sobieraj">Iwona Sobieraj</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span>, <span class="author-span "> <a href="/author/mark-d-plumbley">Mark D. Plumbley</a></span> </p> <p class="item-strip-abstract">Sound event detection (SED) aims to detect when and recognize what sound events happen in an audio clip.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Sound</span> <span class="badge badge-primary badge-primary-nohover">Audio and Speech Processing</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 45</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/sound-event-detection-and-time-frequency" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/sound-event-detection-and-time-frequency#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1471289 --> <div class="col-lg-3 item-image-col"> <a href="/paper/bidirectional-attentive-fusion-with-context"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/pgr-0001471289-9dcf20b3.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/bidirectional-attentive-fusion-with-context">Bidirectional Attentive Fusion with Context Gating for Dense Video Captioning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/bidirectional-attentive-fusion-with-context#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2018-6"> CVPR 2018 </a> </span> • <span class="author-span "> <a href="/author/jingwen-wang">Jingwen Wang</a></span>, <span class="author-span "> <a href="/author/wenhao-jiang">Wenhao Jiang</a></span>, <span class="author-span "> <a href="/author/lin-ma">Lin Ma</a></span>, <span class="author-span "> <a href="/author/wei-liu">Wei Liu</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span> </p> <p class="item-strip-abstract">We propose a bidirectional proposal method that effectively exploits both past and future contexts to make proposal predictions.</p> <div class="sota"> </div> <p> <a href="/task/decoder"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Decoder</span> </span> </a> <a href="/task/dense-video-captioning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Dense Video Captioning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 149</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/bidirectional-attentive-fusion-with-context" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/bidirectional-attentive-fusion-with-context#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 163400 --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-joint-separation-classification-model-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/pgr-0000163400-d5b2908a.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-joint-separation-classification-model-for">A joint separation-classification model for sound event detection of weakly labelled data</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-joint-separation-classification-model-for#code">2 code implementations</a> • <span class="author-name-text item-date-pub">8 Nov 2017</span> • <span class="author-span "> <a href="/author/qiuqiang-kong">Qiuqiang Kong</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span>, <span class="author-span "> <a href="/author/mark-d-plumbley">Mark D. Plumbley</a></span> </p> <p class="item-strip-abstract">First, we propose a separation mapping from the time-frequency (T-F) representation of an audio to the T-F segmentation masks of the audio events.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Sound</span> <span class="badge badge-primary badge-primary-nohover">Audio and Speech Processing</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 37</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-joint-separation-classification-model-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-joint-separation-classification-model-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 27562 --> <div class="col-lg-3 item-image-col"> <a href="/paper/audio-set-classification-with-attention-model"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/pgr-0000027562-53bca35e.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/audio-set-classification-with-attention-model">Audio Set classification with attention model: A probabilistic perspective</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/audio-set-classification-with-attention-model#code">5 code implementations</a> • <span class="author-name-text item-date-pub">2 Nov 2017</span> • <span class="author-span "> <a href="/author/qiuqiang-kong">Qiuqiang Kong</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span>, <span class="author-span "> <a href="/author/mark-d-plumbley">Mark D. Plumbley</a></span> </p> <p class="item-strip-abstract">Then the classification of a bag is the expectation of the classification output of the instances in the bag with respect to the learned probability measure.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Sound</span> <span class="badge badge-primary badge-primary-nohover">Audio and Speech Processing</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 153</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/audio-set-classification-with-attention-model" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/audio-set-classification-with-attention-model#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 96368 --> <div class="col-lg-3 item-image-col"> <a href="/paper/large-scale-weakly-supervised-audio"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/pgr-0000096368-a418f59a.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/large-scale-weakly-supervised-audio">Large-scale weakly supervised audio classification using gated convolutional neural network</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/large-scale-weakly-supervised-audio#code">3 code implementations</a> • <span class="author-name-text item-date-pub">1 Oct 2017</span> • <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/qiuqiang-kong">Qiuqiang Kong</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span>, <span class="author-span "> <a href="/author/mark-d-plumbley">Mark D. Plumbley</a></span> </p> <p class="item-strip-abstract">In this paper, we present a gated convolutional neural network and a temporal attention-based localization method for audio classification, which won the 1st place in the large-scale weakly supervised sound event detection task of Detection and Classification of Acoustic Scenes and Events (DCASE) 2017 challenge.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Sound</span> <span class="badge badge-primary badge-primary-nohover">Audio and Speech Processing</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 9</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/large-scale-weakly-supervised-audio" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/large-scale-weakly-supervised-audio#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/discriminative-block-diagonal-representation"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1707.03548.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/discriminative-block-diagonal-representation">Discriminative Block-Diagonal Representation Learning for Image Recognition</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/discriminative-block-diagonal-representation#code">no code implementations</a> • <span class="author-name-text item-date-pub">12 Jul 2017</span> • <span class="author-span "> <a href="/author/zheng-zhang">Zheng Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/ling-shao">Ling Shao</a></span>, <span class="author-span "> <a href="/author/jian-yang">Jian Yang</a></span> </p> <p class="item-strip-abstract">In particular, the elaborate BDLRR is formulated as a joint optimization problem of shrinking the unfavorable representation from off-block-diagonal elements and strengthening the compact block-diagonal representation under the semi-supervised framework of low-rank representation.</p> <div class="sota"> </div> <p> <a href="/task/representation-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000228-3131cfbf_nx72Tly.jpg"> <span>Representation Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/discriminative-block-diagonal-representation" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/discriminative-block-diagonal-representation#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1828774 --> <div class="col-lg-3 item-image-col"> <a href="/paper/mind-the-class-weight-bias-weighted-maximum"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/414c80a7-64d4-46d8-8530-d415d4c510d2.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/mind-the-class-weight-bias-weighted-maximum">Mind the Class Weight Bias: Weighted Maximum Mean Discrepancy for Unsupervised Domain Adaptation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/mind-the-class-weight-bias-weighted-maximum#code">3 code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2017-7"> CVPR 2017 </a> </span> • <span class="author-span "> <a href="/author/hongliang-yan">Hongliang Yan</a></span>, <span class="author-span "> <a href="/author/yukang-ding">Yukang Ding</a></span>, <span class="author-span "> <a href="/author/peihua-li">Peihua Li</a></span>, <span class="author-span "> <a href="/author/qilong-wang">Qilong Wang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/wangmeng-zuo">WangMeng Zuo</a></span> </p> <p class="item-strip-abstract">Specifically, we introduce class-specific auxiliary weights into the original MMD for exploiting the class prior probability on source and target domains, whose challenge lies in the fact that the class label in target domain is unavailable.</p> <div class="sota"> </div> <p> <a href="/task/unsupervised-domain-adaptation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Unsupervised Domain Adaptation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 47</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/mind-the-class-weight-bias-weighted-maximum" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/mind-the-class-weight-bias-weighted-maximum#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-inverse-mapping-by-autoencoder-based"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1703.10094.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-inverse-mapping-by-autoencoder-based">Learning Inverse Mapping by Autoencoder based Generative Adversarial Nets</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-inverse-mapping-by-autoencoder-based#code">no code implementations</a> • <span class="author-name-text item-date-pub">29 Mar 2017</span> • <span class="author-span "> <a href="/author/junyu-luo">Junyu Luo</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/chenwei-tang">Chenwei Tang</a></span>, <span class="author-span "> <a href="/author/jiancheng-lv-1">Jiancheng Lv</a></span> </p> <p class="item-strip-abstract">The inverse mapping of GANs'(Generative Adversarial Nets) generator has a great potential value. Hence, some works have been developed to construct the inverse function of generator by directly learning or adversarial learning. While the results are encouraging, the problem is highly challenging and the existing ways of training inverse models of GANs have many disadvantages, such as hard to train or poor performance. Due to these reasons, we propose a new approach based on using inverse generator ($IG$) model as encoder and pre-trained generator ($G$) as decoder of an AutoEncoder network to train the $IG$ model.</p> <div class="sota"> </div> <p> <a href="/task/decoder"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Decoder</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-inverse-mapping-by-autoencoder-based" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-inverse-mapping-by-autoencoder-based#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/multi-objective-learning-and-mask-based-post"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1703.07172.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/multi-objective-learning-and-mask-based-post">Multi-Objective Learning and Mask-Based Post-Processing for Deep Neural Network Based Speech Enhancement</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/multi-objective-learning-and-mask-based-post#code">no code implementations</a> • <span class="author-name-text item-date-pub">21 Mar 2017</span> • <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/jun-du">Jun Du</a></span>, <span class="author-span "> <a href="/author/zhen-huang">Zhen Huang</a></span>, <span class="author-span "> <a href="/author/li-rong-dai">Li-Rong Dai</a></span>, <span class="author-span "> <a href="/author/chin-hui-lee">Chin-Hui Lee</a></span> </p> <p class="item-strip-abstract">We propose a multi-objective framework to learn both secondary targets not directly related to the intended task of speech enhancement (SE) and the primary target of the clean log-power spectra (LPS) features to be used directly for constructing the enhanced speech signals.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Sound</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/multi-objective-learning-and-mask-based-post" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/multi-objective-learning-and-mask-based-post#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1647489 --> <div class="col-lg-3 item-image-col"> <a href="/paper/attention-and-localization-based-on-a-deep"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/pgr-0001647489-f50ac887.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/attention-and-localization-based-on-a-deep">Attention and Localization based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/attention-and-localization-based-on-a-deep#code">1 code implementation</a> • <span class="author-name-text item-date-pub">17 Mar 2017</span> • <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/qiuqiang-kong">Qiuqiang Kong</a></span>, <span class="author-span "> <a href="/author/qiang-huang">Qiang Huang</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span>, <span class="author-span "> <a href="/author/mark-d-plumbley">Mark D. Plumbley</a></span> </p> <p class="item-strip-abstract">Audio tagging aims to perform multi-label classification on audio chunks and it is a newly proposed task in the Detection and Classification of Acoustic Scenes and Events 2016 (DCASE 2016) challenge.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Sound</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 1</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/attention-and-localization-based-on-a-deep" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/attention-and-localization-based-on-a-deep#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 8011 --> <div class="col-lg-3 item-image-col"> <a href="/paper/convolutional-gated-recurrent-neural-network"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1702.07787.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/convolutional-gated-recurrent-neural-network">Convolutional Gated Recurrent Neural Network Incorporating Spatial Features for Audio Tagging</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/convolutional-gated-recurrent-neural-network#code">2 code implementations</a> • <span class="author-name-text item-date-pub">24 Feb 2017</span> • <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/qiuqiang-kong">Qiuqiang Kong</a></span>, <span class="author-span "> <a href="/author/qiang-huang">Qiang Huang</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span>, <span class="author-span "> <a href="/author/mark-d-plumbley">Mark D. Plumbley</a></span> </p> <p class="item-strip-abstract">In this paper, we propose to use a convolutional neural network (CNN) to extract robust features from mel-filter banks (MFBs), spectrograms or even raw waveforms for audio tagging.</p> <div class="sota"> </div> <p> <a href="/task/audio-tagging"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Audio Tagging</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 7</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/convolutional-gated-recurrent-neural-network" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/convolutional-gated-recurrent-neural-network#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 93560 --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-joint-detection-classification-model-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1610.01797.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-joint-detection-classification-model-for">A Joint Detection-Classification Model for Audio Tagging of Weakly Labelled Data</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-joint-detection-classification-model-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">6 Oct 2016</span> • <span class="author-span "> <a href="/author/qiuqiang-kong">Qiuqiang Kong</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span>, <span class="author-span "> <a href="/author/mark-plumbley">Mark Plumbley</a></span> </p> <p class="item-strip-abstract">The labeling of an audio clip is often based on the audio events in the clip and no event level label is provided to the user.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Sound</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 6</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-joint-detection-classification-model-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-joint-detection-classification-model-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/hierarchical-learning-for-dnn-based-acoustic"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1607.03682.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/hierarchical-learning-for-dnn-based-acoustic">Hierarchical learning for DNN-based acoustic scene classification</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/hierarchical-learning-for-dnn-based-acoustic#code">no code implementations</a> • <span class="author-name-text item-date-pub">13 Jul 2016</span> • <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/qiang-huang">Qiang Huang</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span>, <span class="author-span "> <a href="/author/mark-d-plumbley">Mark D. Plumbley</a></span> </p> <p class="item-strip-abstract">In this paper, we present a deep neural network (DNN)-based acoustic scene classification framework.</p> <div class="sota"> </div> <p> <a href="/task/acoustic-scene-classification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000414-7ab98c7f.jpg"> <span>Acoustic Scene Classification</span> </span> </a> <a href="/task/classification-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/d0eafcb3-1a12-430b-8bb5-6f6bbff1a4b3.jpg"> <span>Classification</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/hierarchical-learning-for-dnn-based-acoustic#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/hierarchical-learning-for-dnn-based-acoustic" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/hierarchical-learning-for-dnn-based-acoustic#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 214051 --> <div class="col-lg-3 item-image-col"> <a href="/paper/unsupervised-feature-learning-based-on-deep"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1607.03681.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/unsupervised-feature-learning-based-on-deep">Unsupervised Feature Learning Based on Deep Models for Environmental Audio Tagging</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/unsupervised-feature-learning-based-on-deep#code">2 code implementations</a> • <span class="author-name-text item-date-pub">13 Jul 2016</span> • <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/qiang-huang">Qiang Huang</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span>, <span class="author-span "> <a href="/author/peter-foster">Peter Foster</a></span>, <span class="author-span "> <a href="/author/siddharth-sigtia">Siddharth Sigtia</a></span>, <span class="author-span "> <a href="/author/philip-j-b-jackson">Philip J. B. Jackson</a></span>, <span class="author-span "> <a href="/author/mark-d-plumbley">Mark D. Plumbley</a></span> </p> <p class="item-strip-abstract">For the unsupervised feature learning, we propose to use a symmetric or asymmetric deep de-noising auto-encoder (sDAE or aDAE) to generate new data-driven features from the Mel-Filter Banks (MFBs) features.</p> <div class="sota"> </div> <p> <a href="/task/audio-tagging"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Audio Tagging</span> </span> </a> <a href="/task/classification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001592-811f0118_3TU7fCb.jpg"> <span>General Classification</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/unsupervised-feature-learning-based-on-deep#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 1</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/unsupervised-feature-learning-based-on-deep" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/unsupervised-feature-learning-based-on-deep#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/lecture-bilingue-augment-ee-par-des"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/lecture-bilingue-augment-ee-par-des">Lecture bilingue augment\'ee par des alignements multi-niveaux (Augmenting bilingual reading with alignment information)</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/lecture-bilingue-augment-ee-par-des#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/jeptalnrecital-2016-7"> JEPTALNRECITAL 2016 </a> </span> • <span class="author-span "> <a href="/author/fran-c-c-ois-yvon">Fran{\c{c}}ois Yvon</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/marianna-apidianaki">Marianna Apidianaki</a></span>, <span class="author-span "> <a href="/author/cl-e-ment-pillias">Cl{\'e}ment Pillias</a></span>, <span class="author-span "> <a href="/author/cubaud-pierre">Cubaud Pierre</a></span> </p> <p class="item-strip-abstract">Le travail qui a conduit {\`a} cette d{\'e}monstration combine des outils de traitement des langues multilingues, en particulier l{'}alignement automatique, avec des techniques de visualisation et d{'}interaction.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/lecture-bilingue-augment-ee-par-des" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/lecture-bilingue-augment-ee-par-des#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/fully-dnn-based-multi-label-regression-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1606.07695.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/fully-dnn-based-multi-label-regression-for">Fully DNN-based Multi-label regression for audio tagging</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/fully-dnn-based-multi-label-regression-for#code">no code implementations</a> • <span class="author-name-text item-date-pub">24 Jun 2016</span> • <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/qiang-huang">Qiang Huang</a></span>, <span class="author-span "> <a href="/author/wenwu-wang">Wenwu Wang</a></span>, <span class="author-span "> <a href="/author/philip-j-b-jackson">Philip J. B. Jackson</a></span>, <span class="author-span "> <a href="/author/mark-d-plumbley">Mark D. Plumbley</a></span> </p> <p class="item-strip-abstract">Compared with the conventional Gaussian Mixture Model (GMM) and support vector machine (SVM) methods, the proposed fully DNN-based method could well utilize the long-term temporal information with the whole chunk as the input.</p> <div class="sota"> </div> <p> <a href="/task/audio-tagging"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Audio Tagging</span> </span> </a> <a href="/task/event-detection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Event Detection</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/fully-dnn-based-multi-label-regression-for#tasks"> <span class="badge badge-primary"> <b>+5</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/fully-dnn-based-multi-label-regression-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/fully-dnn-based-multi-label-regression-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/natural-scene-character-recognition-using"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1606.04616.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/natural-scene-character-recognition-using">Natural Scene Character Recognition Using Robust PCA and Sparse Representation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/natural-scene-character-recognition-using#code">no code implementations</a> • <span class="author-name-text item-date-pub">15 Jun 2016</span> • <span class="author-span "> <a href="/author/zheng-zhang">Zheng Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/cheng-lin-liu">Cheng-Lin Liu</a></span> </p> <p class="item-strip-abstract">Natural scene character recognition is challenging due to the cluttered background, which is hard to separate from text.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/natural-scene-character-recognition-using" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/natural-scene-character-recognition-using#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/transread-designing-a-bilingual-reading"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/68816.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/transread-designing-a-bilingual-reading">TransRead: Designing a Bilingual Reading Experience with Machine Translation Technologies</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/transread-designing-a-bilingual-reading#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/naacl-2016-6"> NAACL 2016 </a> </span> • <span class="author-span "> <a href="/author/fran-c-c-ois-yvon">Fran{\c{c}}ois Yvon</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/marianna-apidianaki">Marianna Apidianaki</a></span>, <span class="author-span "> <a href="/author/cl-e-ment-pillias">Cl{\'e}ment Pillias</a></span>, <span class="author-span "> <a href="/author/pierre-cubaud">Pierre Cubaud</a></span> </p> <p class="item-strip-abstract"></p> <div class="sota"> </div> <p> <a href="/task/machine-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000257-2b560008_M7RFnV9.jpg"> <span>Machine Translation</span> </span> </a> <a href="/task/translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Translation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/transread-designing-a-bilingual-reading" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/transread-designing-a-bilingual-reading#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/sparse-coding-for-classification-via"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/90704.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/sparse-coding-for-classification-via">Sparse Coding for Classification via Discrimination Ensemble</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/sparse-coding-for-classification-via#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2016-6"> CVPR 2016 </a> </span> • <span class="author-span "> <a href="/author/yuhui-quan">Yuhui Quan</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/yuping-sun">Yuping Sun</a></span>, <span class="author-span "> <a href="/author/yan-huang">Yan Huang</a></span>, <span class="author-span "> <a href="/author/hui-ji">Hui Ji</a></span> </p> <p class="item-strip-abstract">Discriminative sparse coding has emerged as a promising technique in image analysis and recognition, which couples the process of classifier training and the process of dictionary learning for improving the discriminability of sparse codes.</p> <div class="sota"> </div> <p> <a href="/task/classification-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/d0eafcb3-1a12-430b-8bb5-6f6bbff1a4b3.jpg"> <span>Classification</span> </span> </a> <a href="/task/dictionary-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Dictionary Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/sparse-coding-for-classification-via#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/sparse-coding-for-classification-via" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/sparse-coding-for-classification-via#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/novel-elicitation-and-annotation-schemes-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/140155.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/novel-elicitation-and-annotation-schemes-for">Novel elicitation and annotation schemes for sentential and sub-sentential alignments of bitexts</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/novel-elicitation-and-annotation-schemes-for#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/lrec-2016-5"> LREC 2016 </a> </span> • <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/fran-c-c-ois-yvon">Fran{\c{c}}ois Yvon</a></span> </p> <p class="item-strip-abstract">Resources for evaluating sentence-level and word-level alignment algorithms are unsatisfactory.</p> <div class="sota"> </div> <p> <a href="/task/sentence"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Sentence</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/novel-elicitation-and-annotation-schemes-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/novel-elicitation-and-annotation-schemes-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-survey-of-sparse-representation-algorithms"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1602.07017.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-survey-of-sparse-representation-algorithms">A survey of sparse representation: algorithms and applications</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-survey-of-sparse-representation-algorithms#code">no code implementations</a> • <span class="author-name-text item-date-pub">23 Feb 2016</span> • <span class="author-span "> <a href="/author/zheng-zhang">Zheng Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/jian-yang">Jian Yang</a></span>, <span class="author-span "> <a href="/author/xuelong-li-1">Xuelong. Li</a></span>, <span class="author-span "> <a href="/author/david-zhang">David Zhang</a></span> </p> <p class="item-strip-abstract">The main purpose of this article is to provide a comprehensive study and an updated review on sparse representation and to supply a guidance for researchers.</p> <div class="sota"> </div> <p> <a href="/task/survey"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Survey</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-survey-of-sparse-representation-algorithms" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-survey-of-sparse-representation-algorithms#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/removing-rain-from-a-single-image-via"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/91871.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/removing-rain-from-a-single-image-via">Removing Rain From a Single Image via Discriminative Sparse Coding</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/removing-rain-from-a-single-image-via#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/iccv-2015-12"> ICCV 2015 </a> </span> • <span class="author-span "> <a href="/author/yu-luo">Yu Luo</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/hui-ji">Hui Ji</a></span> </p> <p class="item-strip-abstract">The paper aims at developing an effective algorithm to remove visual effects of rain from a single rainy image, i. e. separate the rain layer and the de-rained image layer from an rainy image.</p> <div class="sota"> </div> <p> <a href="/task/dictionary-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Dictionary Learning</span> </span> </a> <a href="/task/rain-removal"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Rain Removal</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/removing-rain-from-a-single-image-via" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/removing-rain-from-a-single-image-via#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/lacunarity-analysis-on-image-patterns-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/88976.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/lacunarity-analysis-on-image-patterns-for">Lacunarity Analysis on Image Patterns for Texture Classification</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/lacunarity-analysis-on-image-patterns-for#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2014-6"> CVPR 2014 </a> </span> • <span class="author-span "> <a href="/author/yuhui-quan">Yuhui Quan</a></span>, <span class="author-span author-matched"> <a href="/author/yong-xu">Yong Xu</a></span>, <span class="author-span "> <a href="/author/yuping-sun">Yuping Sun</a></span>, <span class="author-span "> <a href="/author/yu-luo">Yu Luo</a></span> </p> <p class="item-strip-abstract">Based on the concept of lacunarity in fractal geometry, we developed a statistical approach to texture description, which yields highly discriminative feature with strong robustness to a wide range of transformations, including photometric changes and geometric changes.</p> <div class="sota"> </div> <p> <a href="/task/classification-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/d0eafcb3-1a12-430b-8bb5-6f6bbff1a4b3.jpg"> <span>Classification</span> </span> </a> <a href="/task/classification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001592-811f0118_3TU7fCb.jpg"> <span>General Classification</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/lacunarity-analysis-on-image-patterns-for#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/lacunarity-analysis-on-image-patterns-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/lacunarity-analysis-on-image-patterns-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> </div> <div class="loading" style="display: none;"> <div class="loader-ellips infinite-scroll-request"> <span class="loader-ellips__dot"></span> <span class="loader-ellips__dot"></span> <span class="loader-ellips__dot"></span> <span class="loader-ellips__dot"></span> </div> </div> <div class="search-submit-paper text-center" style="font-size:16px;padding-bottom:30px;"> Cannot find the paper you are looking for? You can <a href="/submit-paper">Submit</a> a new open access paper. </div> </div> </div> <div class="footer"> <div class="footer-contact"> <span class="footer-contact-item">Contact us on:</span> <a class="footer-contact-item" href="mailto:hello@paperswithcode.com"> <span class=" icon-wrapper icon-ion" data-name="mail"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M424 80H88a56.06 56.06 0 0 0-56 56v240a56.06 56.06 0 0 0 56 56h336a56.06 56.06 0 0 0 56-56V136a56.06 56.06 0 0 0-56-56zm-14.18 92.63l-144 112a16 16 0 0 1-19.64 0l-144-112a16 16 0 1 1 19.64-25.26L256 251.73l134.18-104.36a16 16 0 0 1 19.64 25.26z"/></svg></span> hello@paperswithcode.com </a>. <span class="footer-contact-item"> Papers With Code is a free resource with all data licensed under <a rel="noreferrer" href="https://creativecommons.org/licenses/by-sa/4.0/">CC-BY-SA</a>. </span> </div> <div class="footer-links"> <a href="/site/terms">Terms</a> <a href="/site/data-policy">Data policy</a> <a href="/site/cookies-policy">Cookies policy</a> <a href="/about#team" class="fair-logo"> from <img src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAANAAAAAgCAMAAABU6AZfAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAJcEhZcwAAFiUAABYlAUlSJPAAAABFUExURUdwTBwqMhwqMxsqMhkqMxsqMhwqMgCA+hwrMxJIgBsrMxsqMgJ28AF58wF38BsqMwB58hsqMwF17wF07hwrMwRm4QJz7Wj6SIIAAAAUdFJOUwDP87wcPIT+4A1tVti1Ta0smZVzG3JP8wAABR9JREFUWMO1memWpCoMgF0QxX1//0e9kCAkAadq5tzKjzndQmM+szNFEWQ9puu6xn02BXm4j23bTsdapKJAMguFgRVT/Ejyx4uH5hgvL1PUfm69jEd6bN05GTJvXF5X/hfRcPyWe2kTLDFdRA4ENVMbZZJGMt3ppEttNMDC2X/Qa7MK1OrveZoKz2/445I+U4znuvaExxKZLFCqtym/A6rzn+OjbHj8ubwDmfESslvtgWea13WeckQPUKJTf/4USHkDnVXzCrT74DnmeX+8rjgcxA4QBmPpyAKdOm+5XwFpgHH/bG9AMzLMqM9DxxCQaM0qLr7U4xE/AgIDVRBHlcoDeYd7lFee6GZOBvaaskD8S6nut0Dg0ItZEt+IQAfjseIzRDvS/WCxWQJ17phqEGqepQBS/VaXZa0H/4XUYMVt6nr309DEjYvduPT2gWELQTr0iQbC1+SADOg/kjVvspGqX6zSRAgEKbqOf6zgd82AVB+8s0YNm5NL6Y8MGzttwKt0krP9+9A/+hzQTALoUX5MnxW7iCIEUmD7IVZb8G0G1HRE9UqbWKkEUFPSR0MWqH5eB65XmgzQdN3WGjxReROxPD2LROeBIEiD7UGLraBAjMcS9W9AquTPckBgoMqEWG1SIGN57otn5KO9Y30N4rq6MQFC5TX1cEWBfJLY+mbQ5ZMUm8UK7F1A9GNc90T3enkpCZhCdUzfdQq0Wp774gnZao55YU3SgkmAVBez1eDfR4BABd/XqY36ichyaLUnyJZ8jatimUBjqQTouK2M3OGs4miiiduN5bkHCL15C9Zw7heBRMHYSMRxIGyYFsPqpwTqactT8w0P0OSA9iRY9jQvrDyIAhCoAjrrR90I1PNCpcivHEh+cATUmS5xoCaNB3ggMzqgRO/RYPIb1WviDkB4sv22kB8ghQcgUIFWzyUmaQ6kpf5DCoTFh5fwQQCt493e9ypD5Xjq7S5cMQeEubpBf2oKCoSMohPzduBAi2yimhRIc3NvrOd+gCxPexvhcGPM3SRoJpbmIhAGSudTNgNCR+qIRL05UCebsxTIiAYOX6sEkONphRkw9A9ZjADIZIDg857we5MBSiQHVMlWJgXyeTBIyVpGD4RttHC4yVtENHn7K5ASdeM3QGX2sKcKBCBmITYmrGii9TOQT7JYwxOgrhbyby4XJrvs54kuR8vlCg4XEgEOEs8Q8R5DYZboCwEESpTmi/Hhc1Lo8zxPlghZjpbLqWVGUGxSes1y4W2lkkC+Wf0C6GPaxtZo0VQW4nOhsJLqAg01HXqgGN0+083MegKoYLdisbDqzHVG1iZJYe0EUDoB+dj149gDRCCgt2lZ1zA5nhvCyEwvrc/b3N/HiZlMgINmZaR/aX3MJluf7Kepo8+F5tRfUh1wR0odzg8Srnm9w7L5SyB/p6H9Ptt0Vj310ngAlDHbnLo3mGc00sJiQ+4KEM+I8xC7fWv5VGcz3Y0C2ZCa70sgf0tXbnbY1jXpln3W6jYXDG4jNthdrfVWn8n4gAVAZe+0GgaEaeGFx4XRQyTM9yWQnNuIAy5/HPAWPuDJ8Yc66sYvSeY/8dhlYqH0kuQzkFQ03nnHCyI/gtc0GfM7BVPmL5J0yHPkXm6d3u6v/TLw3GL5ayDr6WW47awHYmS1VC+XJOVQcCCZBPk13SCvgmcb8uI/UqjqdvlOlk3j5OU20C0putdO1ZWNo0a8oumXslx0vMYaNrfPURt2hnp5G2rhtsEP5j/3Wqt0fQd1YgAAAABJRU5ErkJggg=="> </a> </div> </div> <script> run_after_frontend_loaded.push(() => { var infinite = new Waypoint.Infinite({ element: $('.infinite-container')[0], onBeforePageLoad: function () { $('.loading').show(); }, onAfterPageLoad: function ($items) { $('.loading').hide(); } }); }); function toggleTweets(paper_id){ var element_id = "#tweets-" + paper_id; $( element_id ).toggle("slow"); } function fullHeight(paper_id){ var element_id = "#tweets-" + paper_id; // hide read more element var read_more = $("#see-more-"+paper_id); read_more.fadeOut(); $(element_id).get(0).style.setProperty("max-height", "9999px") } </script> <script> run_after_frontend_loaded.push(() => { $(function() { $.fn.modal.Constructor.prototype._enforceFocus = function() {}; $.widget( "custom.catcomplete", $.ui.autocomplete, { _create: function() { this._super(); this.widget().menu( "option", "items", "> :not(.ui-autocomplete-category)" ); }, /** Overrides the _renderItem method in jquery to allow for search result images and icons **/ _renderItem: function( ul, item ) { /** If we have an image in the seearch item then render it; if no task image available, use default **/ if ( "image" in item ) { if ( item.image ) { var image_url = item.image; } else { var image_url = "https://production-media.paperswithcode.com/" + "tasks/default.gif"; } return $( "<li>" ) .append( $( "<div>" ).text( item.label ).prepend( $( "<img src=" + image_url + ">") ) ) .appendTo( ul ); } else { return $( "<li>" ) .append($("<div>").text(item.label)) .appendTo( ul ); } }, _renderMenu: function( ul, items ) { var that = this, currentCategory = ""; $.each( items, function( index, item ) { var li; if ( item.category != currentCategory ) { ul.append( "<li class='ui-autocomplete-category'>" + item.category + "</li>" ); currentCategory = item.category; } li = that._renderItemData( ul, item ); if (item.meta !== null) { li.attr('data-qmeta', item.meta); } if ( item.category ) { li.attr( "aria-label", item.category + " : " + item.label ); } }); } }); $( "#id_global_search_input" ).catcomplete({ minLength: 2, source: function( request, response ) { var term = request.term; $.get( "/api/search-autocomplete/", {"q": term}, function(data){ let t = data.tasks, lb = data.leaderboards, p = data.papers, d = data.datasets, m = data.methods; let ts = [], lbs = [], ps = [], ds = [], ms = []; let total = 0; let maxItems = 12; for (let i=0; i<5 && total < maxItems; i++) { if (t.length && total < maxItems) { ts.push({ label: t[0].name, image: t[0].image, category: "Tasks", meta: null, }); t.shift(); total ++; } if (lb.length && total < maxItems) { lbs.push({ label: lb[0].name, image: lb[0].image, category: "Leaderboards", meta: lb[0].slug }); lb.shift(); total ++; } if (p.length && total < maxItems) { ps.push({label: p[0].title, category: "Papers", meta: null}); p.shift(); total ++; } if (d.length && total < maxItems) { ds.push({ label: d[0].name, image: d[0].image, category: "Datasets", meta: d[0].slug, }); d.shift(); total ++; } if (m.length && total < maxItems) { ms.push({ label: m[0].name, image: m[0].image, category: "Methods", meta: null }); m.shift(); total ++; } } let searchData = ts.concat(lbs, ps, ds, ms); response(searchData); }); }, select: function(event, ui) { $("#id_global_search_input").val(ui.item.label); if (typeof gtag !== 'undefined') { gtag('event', 'SiteActions', { 'event_category': 'Search', 'event_label': ui.item.category, }); } if (ui.item.meta === null) { $('#q_meta').val(''); $('#q_meta').removeAttr('name'); } else { if(!$('#q_meta').attr("name")) { $('#q_meta').attr('name', 'q_meta'); } $('#q_meta').val(ui.item.meta); } $('#q_type').val(ui.item.category.toLowerCase()); $("#id_global_search_form").submit(); } }); if ($(window).width() < 1200 && $(window).width() > 992 ) { $("#id_global_search_input").attr("placeholder", "Search"); } // Setup csrf token for ajax requests let getCookie = (name) => { var cookieValue = null; if (document.cookie && document.cookie !== '') { var cookies = document.cookie.split(';'); for (var i = 0; i < cookies.length; i++) { var cookie = jQuery.trim(cookies[i]); // Does this cookie string begin with the name we want? if (cookie.substring(0, name.length + 1) === (name + '=')) { cookieValue = decodeURIComponent(cookie.substring(name.length + 1)); break; } } } return cookieValue; }; let csrftoken = getCookie('csrftoken'); // Make sure we use the most up-to-date CSRF token $("input[name='csrfmiddlewaretoken']").val(csrftoken); function csrfSafeMethod(method) { // these HTTP methods do not require CSRF protection return (/^(GET|HEAD|OPTIONS|TRACE)$/.test(method)); } $.ajaxSetup({ beforeSend: function(xhr, settings) { if (!csrfSafeMethod(settings.type) && !this.crossDomain) { xhr.setRequestHeader("X-CSRFToken", csrftoken); } } }); }); }); </script> <script>!function(e){function t(t){for(var n,a,s=t[0],u=t[1],f=t[2],i=0,d=[];i<s.length;i++)a=s[i],Object.prototype.hasOwnProperty.call(o,a)&&o[a]&&d.push(o[a][0]),o[a]=0;for(n in u)Object.prototype.hasOwnProperty.call(u,n)&&(e[n]=u[n]);for(l&&l(t);d.length;)d.shift()();return c.push.apply(c,f||[]),r()}function r(){for(var e,t=0;t<c.length;t++){for(var r=c[t],n=!0,a=1;a<r.length;a++){var u=r[a];0!==o[u]&&(n=!1)}n&&(c.splice(t--,1),e=s(s.s=r[0]))}return e}var n={},a={11:0},o={11:0},c=[];function s(t){if(n[t])return n[t].exports;var r=n[t]={i:t,l:!1,exports:{}};return e[t].call(r.exports,r,r.exports,s),r.l=!0,r.exports}s.e=function(e){var t=[];a[e]?t.push(a[e]):0!==a[e]&&{2:1,3:1,5:1,6:1,8:1,9:1,10:1}[e]&&t.push(a[e]=new Promise((function(t,r){for(var n="static/css/"+({4:"chart",5:"conference-page",6:"example-page",8:"newsletters-create-page",9:"newsletters-edit-page",10:"newsletters-list-page",12:"table"}[e]||e)+"."+{0:"31d6cfe0",1:"31d6cfe0",2:"5745a9fd",3:"05600cd7",4:"31d6cfe0",5:"67565070",6:"8444f163",8:"f8a273b3",9:"f8a273b3",10:"db3e0a85",12:"31d6cfe0",14:"31d6cfe0",15:"31d6cfe0"}[e]+".chunk.css",o=s.p+n,c=document.getElementsByTagName("link"),u=0;u<c.length;u++){var f=(l=c[u]).getAttribute("data-href")||l.getAttribute("href");if("stylesheet"===l.rel&&(f===n||f===o))return t()}var i=document.getElementsByTagName("style");for(u=0;u<i.length;u++){var l;if((f=(l=i[u]).getAttribute("data-href"))===n||f===o)return t()}var d=document.createElement("link");d.rel="stylesheet",d.type="text/css",d.onload=t,d.onerror=function(t){var n=t&&t.target&&t.target.src||o,c=new Error("Loading CSS chunk "+e+" failed.\n("+n+")");c.code="CSS_CHUNK_LOAD_FAILED",c.request=n,delete a[e],d.parentNode.removeChild(d),r(c)},d.href=o,document.getElementsByTagName("head")[0].appendChild(d)})).then((function(){a[e]=0})));var r=o[e];if(0!==r)if(r)t.push(r[2]);else{var n=new Promise((function(t,n){r=o[e]=[t,n]}));t.push(r[2]=n);var c,u=document.createElement("script");u.charset="utf-8",u.timeout=120,s.nc&&u.setAttribute("nonce",s.nc),u.src=function(e){return s.p+"static/js/"+({4:"chart",5:"conference-page",6:"example-page",8:"newsletters-create-page",9:"newsletters-edit-page",10:"newsletters-list-page",12:"table"}[e]||e)+"."+{0:"041a0327",1:"eb8f85bf",2:"57df0e43",3:"dd682e9c",4:"934a42ca",5:"ddc33be8",6:"f5234ef0",8:"c76f72bd",9:"aa24afbf",10:"a749f71a",12:"c5756280",14:"be7b1031",15:"b8393014"}[e]+".chunk.js"}(e);var f=new Error;c=function(t){u.onerror=u.onload=null,clearTimeout(i);var r=o[e];if(0!==r){if(r){var n=t&&("load"===t.type?"missing":t.type),a=t&&t.target&&t.target.src;f.message="Loading chunk "+e+" failed.\n("+n+": "+a+")",f.name="ChunkLoadError",f.type=n,f.request=a,r[1](f)}o[e]=void 0}};var i=setTimeout((function(){c({type:"timeout",target:u})}),12e4);u.onerror=u.onload=c,document.head.appendChild(u)}return Promise.all(t)},s.m=e,s.c=n,s.d=function(e,t,r){s.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},s.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},s.t=function(e,t){if(1&t&&(e=s(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(s.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var n in e)s.d(r,n,function(t){return e[t]}.bind(null,n));return r},s.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return s.d(t,"a",t),t},s.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},s.p="https://production-assets.paperswithcode.com/",s.oe=function(e){throw console.error(e),e};var u=this.webpackJsonpfrontend=this.webpackJsonpfrontend||[],f=u.push.bind(u);u.push=t,u=u.slice();for(var i=0;i<u.length;i++)t(u[i]);var l=f;r()}([])</script><script src="https://production-assets.paperswithcode.com/static/js/13.aa3fa037.chunk.js"></script><script src="https://production-assets.paperswithcode.com/static/js/main.99ee382b.chunk.js"></script> </body> </html>