CINXE.COM
Lei Yu | Papers With Code
<!doctype html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"> <script> const GTAG_ENABLED = true ; const GTAG_TRACKING_ID = "UA-121182717-1"; const SENTRY_DSN_FRONTEND = "".trim(); const GLOBAL_CSRF_TOKEN = 'U1P8pBYMxJJOzbivbgnBZ6RVuq8ZaU34srPJbdia8cNlsdiU9JsXglXYMNF3af2v'; const MEDIA_URL = "https://production-media.paperswithcode.com/"; const ASSETS_URL = "https://production-assets.paperswithcode.com"; run_after_frontend_loaded = window.run_after_frontend_loaded || []; </script> <link rel="preconnect" href="https://production-assets.paperswithcode.com"><link rel="dns-prefetch" href="https://production-assets.paperswithcode.com"><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/65e877e527022735c1a1.woff2" crossorigin><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/917632e36982ca7933c8.woff2" crossorigin><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/f1405bd8a987c2ea8a67.woff2" crossorigin><script>(()=>{if(GTAG_ENABLED){const t=document.createElement("script");function n(){window.dataLayer.push(arguments)}t.src=`https://www.googletagmanager.com/gtag/js?id=${GTAG_TRACKING_ID}`,document.head.appendChild(t),window.dataLayer=window.dataLayer||[],window.gtag=n,n("js",new Date),n("config",GTAG_TRACKING_ID),window.captureOutboundLink=function(t){n("event","click",{event_category:"outbound",event_label:t})}}else window.captureOutboundLink=function(n){document.location=n}})();</script><style>:root{--bs-blue: #0d6efd;--bs-indigo: #6610f2;--bs-purple: #6f42c1;--bs-pink: #d63384;--bs-red: #dc3545;--bs-orange: #fd7e14;--bs-yellow: #ffc107;--bs-green: #198754;--bs-teal: #20c997;--bs-cyan: #21cbce;--bs-white: #fff;--bs-gray: #6c757d;--bs-gray-dark: #343a40;--bs-primary: #0d6efd;--bs-secondary: #6c757d;--bs-success: #198754;--bs-info: #21cbce;--bs-warning: #ffc107;--bs-danger: #dc3545;--bs-light: #f8f9fa;--bs-dark: #212529;--bs-font-sans-serif: system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", "Liberation Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";--bs-font-monospace: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;--bs-gradient: linear-gradient(180deg, rgba(255, 255, 255, 0.15), rgba(255, 255, 255, 0))}@font-face{font-family:"Lato";font-style:normal;font-weight:300;font-display:swap;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/917632e36982ca7933c8.woff2) format("woff2")}@font-face{font-family:"Lato";font-style:normal;font-weight:400;font-display:swap;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/65e877e527022735c1a1.woff2) format("woff2")}@font-face{font-family:"Lato";font-style:normal;font-weight:700;font-display:swap;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/f1405bd8a987c2ea8a67.woff2) format("woff2")}@font-face{font-family:"Computer Modern Serif";src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/b63de31899ec71cfb870.woff) format("woff");font-display:swap;font-weight:normal;font-style:normal}@font-face{font-family:"Computer Modern Serif";src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/5d5c7512cb539fb279b2.woff) format("woff");font-display:swap;font-weight:bold;font-style:normal}@font-face{font-family:"Computer Modern Serif";src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/357ce3503c6299bc1b58.woff) format("woff");font-display:swap;font-weight:normal;font-style:italic}@font-face{font-family:"Computer Modern Serif";src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/c83e6f15d4c7568ee872.woff) format("woff");font-display:swap;font-weight:bold;font-style:italic}@font-face{font-family:"Exo";font-style:normal;font-weight:100;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/729c812ee9989426abb1.woff2) format("woff2");font-display:swap}@font-face{font-family:"Nunito";font-style:normal;font-weight:400;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/39a18f443d434999b89b.woff2) format("woff2");font-display:swap}@font-face{font-family:"Nunito";font-style:normal;font-weight:700;src:local(""),url(https://production-assets.paperswithcode.com/perf/fonts/4ad349571e28bb59c5a5.woff2) format("woff2");font-display:swap}*,*::before,*::after{box-sizing:border-box}@media(prefers-reduced-motion: no-preference){:root{scroll-behavior:smooth}}body{margin:0;font-family:system-ui,-apple-system,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans","Liberation Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";font-size:1rem;font-weight:400;line-height:1.5;color:#212529;background-color:#fff;-webkit-text-size-adjust:100%;-webkit-tap-highlight-color:rgba(0,0,0,0)}[tabindex="-1"]:focus:not(:focus-visible){outline:0 !important}hr{margin:1rem 0;color:#000;background-color:currentColor;border:0;opacity:.1}hr:not([size]){height:1px}h6,h5,h4,h3,h2,h1{margin-top:0;margin-bottom:.5rem;font-weight:500;line-height:1.2}h1{font-size:calc(1.375rem + 1.5vw)}@media(min-width: 1200px){h1{font-size:2.5rem}}h2{font-size:calc(1.325rem + 0.9vw)}@media(min-width: 1200px){h2{font-size:2rem}}h3{font-size:calc(1.3rem + 0.6vw)}@media(min-width: 1200px){h3{font-size:1.75rem}}h4{font-size:calc(1.275rem + 0.3vw)}@media(min-width: 1200px){h4{font-size:1.5rem}}h5{font-size:1.25rem}h6{font-size:1rem}p{margin-top:0;margin-bottom:1rem}ol,ul{padding-left:2rem}ol,ul{margin-top:0;margin-bottom:1rem}ul ul{margin-bottom:0}b,strong{font-weight:bolder}small{font-size:0.875em}a{color:#0d6efd;text-decoration:none}a:hover{color:#0a58ca;text-decoration:none}a:not([href]):not([class]),a:not([href]):not([class]):hover{color:inherit;text-decoration:none}pre,code{font-family:var(--bs-font-monospace);font-size:1em;direction:ltr /* rtl:ignore */;unicode-bidi:bidi-override}pre{display:block;margin-top:0;margin-bottom:1rem;overflow:auto;font-size:0.875em}code{font-size:0.875em;color:#d63384;word-wrap:break-word}a>code{color:inherit}figure{margin:0 0 1rem}img,svg{vertical-align:middle}table{caption-side:bottom;border-collapse:collapse}th{text-align:inherit;text-align:-webkit-match-parent}thead,tbody,tfoot,tr,td,th{border-color:inherit;border-style:solid;border-width:0}label{display:inline-block}button{border-radius:0}button:focus:not(:focus-visible){outline:0}input,button,select,optgroup,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,select{text-transform:none}[role=button]{cursor:pointer}select{word-wrap:normal}[list]::-webkit-calendar-picker-indicator{display:none}button,[type=button],[type=reset],[type=submit]{-webkit-appearance:button}button:not(:disabled),[type=button]:not(:disabled),[type=reset]:not(:disabled),[type=submit]:not(:disabled){cursor:pointer}::-moz-focus-inner{padding:0;border-style:none}textarea{resize:vertical}::-webkit-datetime-edit-fields-wrapper,::-webkit-datetime-edit-text,::-webkit-datetime-edit-minute,::-webkit-datetime-edit-hour-field,::-webkit-datetime-edit-day-field,::-webkit-datetime-edit-month-field,::-webkit-datetime-edit-year-field{padding:0}::-webkit-inner-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:textfield}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-color-swatch-wrapper{padding:0}::file-selector-button{font:inherit}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}iframe{border:0}[hidden]{display:none !important}.list-unstyled{padding-left:0;list-style:none}small,.small{font-size:0.875em;font-weight:400}.footer{display:block;margin-top:30px;padding:15px;border-top:1px solid #e0e0e0;font-size:13px;color:#aaa;text-align:center}.footer a{color:#999}.footer-contact{margin-bottom:5px}.footer-contact-item{display:inline-block}.footer-links>*:not(:last-child){margin-right:1rem}.icon-wrapper{display:inline-block;width:1em;height:1em;contain:strict;fill:currentcolor;box-sizing:content-box !important}.icon-wrapper.icon-fa{position:relative;top:2px}.icon-wrapper svg{display:block;height:100%;width:100%}.icon-wrapper[data-name=slack] svg{width:200%;height:200%;transform:translate(-25%, -25%)}.icon-wrapper:not(.icon-color)>svg>*{stroke:currentColor}.navbar-brand .icon-wrapper{color:#21cbce;width:30px;height:30px;vertical-align:middle}.navbar-mobile-twitter{margin-right:18px !important;padding-top:1px}.navbar-mobile-twitter a{color:#1d9bf0}.navbar-mobile-twitter .icon-wrapper{width:23px;height:23px}.header-search{margin-bottom:26px}.header-search form{position:relative}.header-search .icon{color:gray;position:absolute !important;top:50% !important;left:initial !important;padding-right:0 !important;transform:translateY(-50%);right:22px;padding:0;height:20px;width:20px}.header-search .icon .icon-wrapper{width:100%;height:100%;top:0}.nav-link-social-icon{color:#1d9bf0;width:25px;height:25px}.nav-link-social-icon-slack{vertical-align:middle}@media(min-width: 992px){.header-search{margin:0}.header-search .icon{right:10px}.nav-link-social-icon{width:20px;height:20px}.nav-link-social-icon-slack{width:22px;height:22px}} </style><link href="https://production-assets.paperswithcode.com/static/css/13.a0e289cc.chunk.css" rel="stylesheet"><link href="https://production-assets.paperswithcode.com/static/css/main.cd7ec85b.chunk.css" rel="stylesheet"> <!-- Metadata --> <title>Lei Yu | Papers With Code</title> <meta name="description" content="Papers by Lei Yu with links to code and results." /> <!-- Open Graph protocol metadata --> <meta property="og:title" content="Papers with Code - Lei Yu"> <meta property="og:description" content="Papers by Lei Yu with links to code and results."> <meta property="og:image" content="https://paperswithcode.com/static/index.jpeg"> <meta property="og:url" content="https://paperswithcode.com/search?q=author%3ALei+Yu"> <!-- Twitter metadata --> <meta name="twitter:card" content="summary_large_image"> <meta name="twitter:site" content="@paperswithcode"> <meta name="twitter:title" content="Papers with Code - Lei Yu"> <meta name="twitter:description" content="Papers by Lei Yu with links to code and results."> <meta name="twitter:creator" content="@paperswithcode"> <meta name="twitter:url" content="https://paperswithcode.com/search?q=author%3ALei+Yu"> <meta name="twitter:domain" content="paperswithcode.com"> <!-- JSON LD --> <script type="application/ld+json">{ "@context": "http://schema.org", "@graph": { "@type": "WebPage", "name": "Lei Yu", "description": "Papers by Lei Yu with links to code and results.", "url": "https://paperswithcode.com/search?q=author%3ALei+Yu", "image": "https://paperswithcode.com/static/index.jpeg", "headline": "Lei Yu" } }</script> <meta name="theme-color" content="#fff"/> <link rel="manifest" href="https://production-assets.paperswithcode.com/static/manifest.web.json"> </head> <body> <nav class="navbar navbar-expand-lg navbar-light header"> <a class="navbar-brand" href="/"> <span class=" icon-wrapper" data-name="pwc"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path d="M88 128h48v256H88zm144 0h48v256h-48zm-72 16h48v224h-48zm144 0h48v224h-48zm72-16h48v256h-48z"/><path d="M104 104V56H16v400h88v-48H64V104zm304-48v48h40v304h-40v48h88V56z"/></svg></span> </a> <div class="navbar-mobile-twitter d-lg-none"> <a rel="noreferrer" href="https://twitter.com/paperswithcode"> <span class=" icon-wrapper icon-fa icon-fa-brands" data-name="twitter"><svg viewBox="0 0 512.001 515.25" xmlns="http://www.w3.org/2000/svg"><path d="M459.37 152.016c.326 4.548.326 9.097.326 13.645 0 138.72-105.583 298.558-298.559 298.558C101.685 464.22 46.457 447 0 417.114c8.447.973 16.568 1.298 25.34 1.298 49.054 0 94.213-16.568 130.274-44.832-46.132-.975-84.792-31.188-98.113-72.772 6.499.975 12.996 1.624 19.819 1.624 9.42 0 18.843-1.3 27.613-3.573-48.08-9.747-84.142-51.98-84.142-102.984v-1.3c13.968 7.798 30.213 12.67 47.43 13.32-28.263-18.843-46.78-51.006-46.78-87.391 0-19.492 5.196-37.36 14.294-52.954 51.654 63.674 129.3 105.258 216.364 109.807-1.624-7.797-2.599-15.918-2.599-24.04 0-57.827 46.782-104.934 104.934-104.934 30.214 0 57.502 12.67 76.671 33.136 23.715-4.548 46.455-13.319 66.599-25.34-7.798 24.367-24.366 44.834-46.132 57.828 21.117-2.274 41.584-8.122 60.426-16.244-14.292 20.791-32.161 39.309-52.628 54.253z"/></svg></span> </a> </div> <button class="navbar-toggler" type="button" data-toggle="collapse" data-bs-toggle="collapse" data-target="#top-menu" data-bs-target="#top-menu" aria-controls="top-menu" aria-expanded="false" aria-label="Toggle navigation" > <span class="navbar-toggler-icon"></span> </button> <div class="collapse navbar-collapse" id="top-menu"> <ul class="navbar-nav mr-auto navbar-nav__left light-header"> <li class="nav-item header-search"> <form action="/search" method="get" id="id_global_search_form" autocomplete="off"> <input type="text" name="q_meta" style="display:none" id="q_meta" /> <input type="hidden" name="q_type" id="q_type" /> <input id="id_global_search_input" autocomplete="off" value="" name='q' class="global-search" type="search" placeholder='Search'/> <button type="submit" class="icon"><span class=" icon-wrapper icon-fa icon-fa-light" data-name="search"><svg viewBox="0 0 512.025 520.146" xmlns="http://www.w3.org/2000/svg"><path d="M508.5 482.6c4.7 4.7 4.7 12.3 0 17l-9.9 9.9c-4.7 4.7-12.3 4.7-17 0l-129-129c-2.2-2.3-3.5-5.3-3.5-8.5v-10.2C312 396 262.5 417 208 417 93.1 417 0 323.9 0 209S93.1 1 208 1s208 93.1 208 208c0 54.5-21 104-55.3 141.1H371c3.2 0 6.2 1.2 8.5 3.5zM208 385c97.3 0 176-78.7 176-176S305.3 33 208 33 32 111.7 32 209s78.7 176 176 176z"/></svg></span></button> </form> </li> <li class="nav-item"> <a class="nav-link" href="/sota"> Browse State-of-the-Art </a> </li> <li class="nav-item"> <a class="nav-link" href="/datasets"> Datasets </a> </li> <li class="nav-item"> <a class="nav-link" href="/methods">Methods</a> </li> <li class="nav-item dropdown"> <a class="nav-link dropdown-toggle" role="button" id="navbarDropdownRepro" data-toggle="dropdown" data-bs-toggle="dropdown" aria-haspopup="true" aria-expanded="false" > More </a> <div class="dropdown-menu" aria-labelledby="navbarDropdownRepro"> <a class="dropdown-item" href="/newsletter">Newsletter</a> <a class="dropdown-item" href="/rc2022">RC2022</a> <div class="dropdown-divider"></div> <a class="dropdown-item" href="/about">About</a> <a class="dropdown-item" href="/trends">Trends</a> <a class="dropdown-item" href="https://portal.paperswithcode.com/"> Portals </a> <a class="dropdown-item" href="/libraries"> Libraries </a> </div> </li> </ul> <ul class="navbar-nav ml-auto navbar-nav__right navbar-subscribe justify-content-center align-items-center"> <li class="nav-item"> <a class="nav-link" rel="noreferrer" href="https://twitter.com/paperswithcode"> <span class="nav-link-social-icon icon-wrapper icon-fa icon-fa-brands" data-name="twitter"><svg viewBox="0 0 512.001 515.25" xmlns="http://www.w3.org/2000/svg"><path d="M459.37 152.016c.326 4.548.326 9.097.326 13.645 0 138.72-105.583 298.558-298.559 298.558C101.685 464.22 46.457 447 0 417.114c8.447.973 16.568 1.298 25.34 1.298 49.054 0 94.213-16.568 130.274-44.832-46.132-.975-84.792-31.188-98.113-72.772 6.499.975 12.996 1.624 19.819 1.624 9.42 0 18.843-1.3 27.613-3.573-48.08-9.747-84.142-51.98-84.142-102.984v-1.3c13.968 7.798 30.213 12.67 47.43 13.32-28.263-18.843-46.78-51.006-46.78-87.391 0-19.492 5.196-37.36 14.294-52.954 51.654 63.674 129.3 105.258 216.364 109.807-1.624-7.797-2.599-15.918-2.599-24.04 0-57.827 46.782-104.934 104.934-104.934 30.214 0 57.502 12.67 76.671 33.136 23.715-4.548 46.455-13.319 66.599-25.34-7.798 24.367-24.366 44.834-46.132 57.828 21.117-2.274 41.584-8.122 60.426-16.244-14.292 20.791-32.161 39.309-52.628 54.253z"/></svg></span> </a> </li> <li class="nav-item"> <a id="signin-link" class="nav-link" href="/accounts/login?next=/search">Sign In</a> </li> </ul> </div> </nav> <!-- Page modals --> <div class="modal fade" id="emailModal" tabindex="-1" role="dialog" aria-labelledby="emailModalLabel" aria-hidden="true"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <h3 class="modal-title" id="emailModalLabel">Subscribe to the PwC Newsletter</h3> <button type="button" class="close" data-dismiss="modal" data-bs-dismiss="modal" aria-label="Close"> <span aria-hidden="true">×</span> </button> </div> <form action="" method="post"> <div class="modal-body"> <div class="modal-body-info-text"> Stay informed on the latest trending ML papers with code, research developments, libraries, methods, and datasets.<br/><br/> <a href="/newsletter">Read previous issues</a> </div> <input type="hidden" name="csrfmiddlewaretoken" value="U1P8pBYMxJJOzbivbgnBZ6RVuq8ZaU34srPJbdia8cNlsdiU9JsXglXYMNF3af2v"> <input placeholder="Enter your email" type="email" class="form-control pwc-email" name="address" id="id_address" max_length="100" required> </div> <div class="modal-footer"> <button type="submit" class="btn btn-primary">Subscribe</button> </div> </form> </div> </div> </div> <!-- Login --> <div class="modal fade" id="loginModal" tabindex="-1" role="dialog" aria-labelledby="loginModalLabel" aria-hidden="true"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <h5 class="modal-title" id="loginModalLabel">Join the community</h5> <button type="button" class="close btn-close" data-dismiss="modal" data-bs-dismiss="modal" aria-label="Close"> <span aria-hidden="true">×</span> </button> </div> <div class="login-modal-message"> You need to <a href="/accounts/login?next=/search">log in</a> to edit.<br/> You can <a href="/accounts/register?next=/search">create a new account</a> if you don't have one.<br/><br/> </div> </div> </div> </div> <div class="container content content-buffer "> <div class="author-search-page"> <div class="title home-page-header"> <div class="row"> <div class="col-lg-6"> <h2 class="home-page-title"> Search Results for author: <span class="author-name">Lei Yu</span> </h2> <h3 class="home-page-subtitle">Found <b>132</b> papers, <b>53</b> papers with code</h3> </div> <div class="col-lg-6 index-group"> <div id="authorSortBy" style="float: right;" class="btn-group pull-right search-page-order-by" role="group"> <div class="btn-group dropdown" role="group"> <button id="btnGroupDrop1" type="button" class="btn btn-outline-secondary dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false"> Date Published <span class=" icon-wrapper icon-fa icon-fa-regular" data-name="chevron-down"><svg viewBox="0 0 448 513.795" xmlns="http://www.w3.org/2000/svg"><path d="M441.9 168.28c4.7 4.7 4.7 12.3 0 17l-209.4 209.4c-4.7 4.7-12.3 4.7-17 0L6.1 185.28c-4.7-4.7-4.7-12.3 0-17l19.8-19.8c4.7-4.7 12.3-4.7 17 0L224 329.18l181.1-180.7c4.701-4.7 12.302-4.7 17 0z"/></svg></span> </button> <div class="dropdown-menu" aria-labelledby="btnGroupDrop1"> <a class="dropdown-item" href="?q=author%3ALei+Yu&order_by=date">Date Published</a> <a class="dropdown-item" href="?q=author%3ALei+Yu&order_by=stars">Github Stars</a> </div> </div> <a href="?q=author%3ALei+Yu&order=asc" type="button" class="btn btn-outline-secondary"> <span class=" icon-wrapper icon-fa icon-fa-regular" data-name="arrow-down"><svg viewBox="0 0 448 513.795" xmlns="http://www.w3.org/2000/svg"><path d="M441.9 251.08c4.7 4.7 4.7 12.3 0 17l-209.4 209.4c-4.7 4.7-12.3 4.7-17 0L6.1 268.08c-4.7-4.7-4.7-12.3 0-17l19.8-19.8c4.7-4.7 12.3-4.7 17 0L198 386.38V44.98c0-6.599 5.401-12 12-12h28c6.6 0 12 5.401 12 12v341.4l155.1-155.1c4.701-4.7 12.302-4.7 17 0z"/></svg></span> </a> </div> </div> </div> </div> <div class="infinite-container text-center"> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/the-deepmind-chinese-english-document"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/904663.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/the-deepmind-chinese-english-document">The DeepMind Chinese–English Document Translation System at WMT2020</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/the-deepmind-chinese-english-document#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/wmt-emnlp-2020-11"> WMT (EMNLP) 2020 </a> </span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/laurent-sartran">Laurent Sartran</a></span>, <span class="author-span "> <a href="/author/po-sen-huang">Po-Sen Huang</a></span>, <span class="author-span "> <a href="/author/wojciech-stokowiec">Wojciech Stokowiec</a></span>, <span class="author-span "> <a href="/author/domenic-donato">Domenic Donato</a></span>, <span class="author-span "> <a href="/author/srivatsan-srinivasan">Srivatsan Srinivasan</a></span>, <span class="author-span "> <a href="/author/alek-andreev">Alek Andreev</a></span>, <span class="author-span "> <a href="/author/wang-ling">Wang Ling</a></span>, <span class="author-span "> <a href="/author/sona-mokra">Sona Mokra</a></span>, <span class="author-span "> <a href="/author/agustin-dal-lago">Agustin Dal Lago</a></span>, <span class="author-span "> <a href="/author/yotam-doron">Yotam Doron</a></span>, <span class="author-span "> <a href="/author/susannah-young">Susannah Young</a></span>, <span class="author-span "> <a href="/author/phil-blunsom">Phil Blunsom</a></span>, <span class="author-span "> <a href="/author/chris-dyer">Chris Dyer</a></span> </p> <p class="item-strip-abstract">This paper describes the DeepMind submission to the Chinese\rightarrowEnglish constrained data track of the WMT2020 Shared Task on News Translation.</p> <div class="sota"> </div> <p> <a href="/task/document-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Document Translation</span> </span> </a> <a href="/task/sentence"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Sentence</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/the-deepmind-chinese-english-document#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/the-deepmind-chinese-english-document" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/the-deepmind-chinese-english-document#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/homomatcher-dense-feature-matching-results"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/homomatcher-dense-feature-matching-results">HomoMatcher: Dense Feature Matching Results with Semi-Dense Efficiency by Homography Estimation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/homomatcher-dense-feature-matching-results#code">no code implementations</a> • <span class="author-name-text item-date-pub">11 Nov 2024</span> • <span class="author-span "> <a href="/author/xiaolong-wang">Xiaolong Wang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/yingying-zhang">Yingying Zhang</a></span>, <span class="author-span "> <a href="/author/jiangwei-lao">Jiangwei Lao</a></span>, <span class="author-span "> <a href="/author/lixiang-ru">Lixiang Ru</a></span>, <span class="author-span "> <a href="/author/liheng-zhong">Liheng Zhong</a></span>, <span class="author-span "> <a href="/author/jingdong-chen">Jingdong Chen</a></span>, <span class="author-span "> <a href="/author/yu-zhang">Yu Zhang</a></span>, <span class="author-span "> <a href="/author/ming-yang">Ming Yang</a></span> </p> <p class="item-strip-abstract">To address this limitation, this paper concentrates on enhancing the fine-matching module in the semi-dense matching framework.</p> <div class="sota"> </div> <p> <a href="/task/homography-estimation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Homography Estimation</span> </span> </a> <a href="/task/patch-matching"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Patch Matching</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/homomatcher-dense-feature-matching-results" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/homomatcher-dense-feature-matching-results#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/smart-llama-two-stage-post-training-of-large"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/smart-llama-two-stage-post-training-of-large">Smart-LLaMA: Two-Stage Post-Training of Large Language Models for Smart Contract Vulnerability Detection and Explanation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/smart-llama-two-stage-post-training-of-large#code">no code implementations</a> • <span class="author-name-text item-date-pub">9 Nov 2024</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/shiqi-chen">Shiqi Chen</a></span>, <span class="author-span "> <a href="/author/hang-yuan">Hang Yuan</a></span>, <span class="author-span "> <a href="/author/peng-wang">Peng Wang</a></span>, <span class="author-span "> <a href="/author/zhirong-huang">Zhirong Huang</a></span>, <span class="author-span "> <a href="/author/jingyuan-zhang">Jingyuan Zhang</a></span>, <span class="author-span "> <a href="/author/chenjie-shen">Chenjie Shen</a></span>, <span class="author-span "> <a href="/author/fengjun-zhang">Fengjun Zhang</a></span>, <span class="author-span "> <a href="/author/li-yang">Li Yang</a></span>, <span class="author-span "> <a href="/author/jiajia-ma">Jiajia Ma</a></span> </p> <p class="item-strip-abstract">Existing smart contract vulnerability detection methods face three main issues: (1) Insufficient quality of datasets, lacking detailed explanations and precise vulnerability locations.</p> <div class="sota"> </div> <p> <a href="/task/vulnerability-detection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/236518ef-12d4-47ee-b06b-728efa01912b.jpg"> <span>Vulnerability Detection</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/smart-llama-two-stage-post-training-of-large" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/smart-llama-two-stage-post-training-of-large#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2441277 --> <div class="col-lg-3 item-image-col"> <a href="/paper/boosting-imperceptibility-of-stable-diffusion"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/boosting-imperceptibility-of-stable-diffusion">Boosting Imperceptibility of Stable Diffusion-based Adversarial Examples Generation with Momentum</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/boosting-imperceptibility-of-stable-diffusion#code">1 code implementation</a> • <span class="author-name-text item-date-pub">17 Oct 2024</span> • <span class="author-span "> <a href="/author/nashrah-haque">Nashrah Haque</a></span>, <span class="author-span "> <a href="/author/xiang-li">Xiang Li</a></span>, <span class="author-span "> <a href="/author/zhehui-chen">Zhehui Chen</a></span>, <span class="author-span "> <a href="/author/yanzhao-wu">Yanzhao Wu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/arun-iyengar">Arun Iyengar</a></span>, <span class="author-span "> <a href="/author/wenqi-wei">Wenqi Wei</a></span> </p> <p class="item-strip-abstract">We propose a novel framework, Stable Diffusion-based Momentum Integrated Adversarial Examples (SD-MIAE), for generating adversarial examples that can effectively mislead neural network classifiers while maintaining visual imperceptibility and preserving the semantic similarity to the original class label.</p> <div class="sota"> </div> <p> <a href="/task/semantic-similarity"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Semantic Similarity</span> </span> </a> <a href="/task/semantic-textual-similarity"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000398-44035c5e.jpg"> <span>Semantic Textual Similarity</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/boosting-imperceptibility-of-stable-diffusion#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 0</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/boosting-imperceptibility-of-stable-diffusion" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/boosting-imperceptibility-of-stable-diffusion#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/geometric-signatures-of-compositionality"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/geometric-signatures-of-compositionality">Geometric Signatures of Compositionality Across a Language Model's Lifetime</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/geometric-signatures-of-compositionality#code">no code implementations</a> • <span class="author-name-text item-date-pub">2 Oct 2024</span> • <span class="author-span "> <a href="/author/jin-hwa-lee">Jin Hwa Lee</a></span>, <span class="author-span "> <a href="/author/thomas-jiralerspong">Thomas Jiralerspong</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/yoshua-bengio">Yoshua Bengio</a></span>, <span class="author-span "> <a href="/author/emily-cheng">Emily Cheng</a></span> </p> <p class="item-strip-abstract">Compositionality, the notion that the meaning of an expression is constructed from the meaning of its parts and syntactic rules, permits the infinite productivity of human language.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/geometric-signatures-of-compositionality" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/geometric-signatures-of-compositionality#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/robust-llm-safeguarding-via-refusal-feature"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/robust-llm-safeguarding-via-refusal-feature">Robust LLM safeguarding via refusal feature adversarial training</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/robust-llm-safeguarding-via-refusal-feature#code">no code implementations</a> • <span class="author-name-text item-date-pub">30 Sep 2024</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/virginie-do">Virginie Do</a></span>, <span class="author-span "> <a href="/author/karen-hambardzumyan">Karen Hambardzumyan</a></span>, <span class="author-span "> <a href="/author/nicola-cancedda">Nicola Cancedda</a></span> </p> <p class="item-strip-abstract">Large language models (LLMs) are vulnerable to adversarial attacks that can elicit harmful responses.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/robust-llm-safeguarding-via-refusal-feature" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/robust-llm-safeguarding-via-refusal-feature#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2428346 --> <div class="col-lg-3 item-image-col"> <a href="/paper/effective-diffusion-transformer-architecture"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/effective-diffusion-transformer-architecture">Effective Diffusion Transformer Architecture for Image Super-Resolution</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/effective-diffusion-transformer-architecture#code">1 code implementation</a> • <span class="author-name-text item-date-pub">29 Sep 2024</span> • <span class="author-span "> <a href="/author/kun-cheng">Kun Cheng</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/zhijun-tu">Zhijun Tu</a></span>, <span class="author-span "> <a href="/author/xiao-he">Xiao He</a></span>, <span class="author-span "> <a href="/author/liyu-chen">Liyu Chen</a></span>, <span class="author-span "> <a href="/author/yong-guo">Yong Guo</a></span>, <span class="author-span "> <a href="/author/mingrui-zhu">Mingrui Zhu</a></span>, <span class="author-span "> <a href="/author/nannan-wang">Nannan Wang</a></span>, <span class="author-span "> <a href="/author/xinbo-gao">Xinbo Gao</a></span>, <span class="author-span "> <a href="/author/jie-hu">Jie Hu</a></span> </p> <p class="item-strip-abstract">In this work, we design an effective diffusion transformer for image super-resolution (DiT-SR) that achieves the visual quality of prior-based methods, but through a training-from-scratch manner.</p> <div class="sota"> </div> <p> <a href="/task/image-generation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/5ac09bd9-8785-4253-8cf4-4412dcd36426.jpg"> <span>Image Generation</span> </span> </a> <a href="/task/image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/87198f6e-219c-4651-ba73-c17f794d9a7e.jpg"> <span>Image Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 6</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/effective-diffusion-transformer-architecture" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/effective-diffusion-transformer-architecture#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-quantized-adaptive-conditions-for"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-quantized-adaptive-conditions-for">Learning Quantized Adaptive Conditions for Diffusion Models</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-quantized-adaptive-conditions-for#code">no code implementations</a> • <span class="author-name-text item-date-pub">26 Sep 2024</span> • <span class="author-span "> <a href="/author/yuchen-liang">Yuchen Liang</a></span>, <span class="author-span "> <a href="/author/yuchuan-tian">Yuchuan Tian</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/huao-tang">Huao Tang</a></span>, <span class="author-span "> <a href="/author/jie-hu">Jie Hu</a></span>, <span class="author-span "> <a href="/author/xiangzhong-fang">Xiangzhong Fang</a></span>, <span class="author-span "> <a href="/author/hanting-chen">Hanting Chen</a></span> </p> <p class="item-strip-abstract">The curvature of ODE trajectories in diffusion models hinders their ability to generate high-quality images in a few number of function evaluations (NFE).</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-quantized-adaptive-conditions-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-quantized-adaptive-conditions-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2419273 --> <div class="col-lg-3 item-image-col"> <a href="/paper/mhad-multimodal-home-activity-dataset-with"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/6a5ddf9f-d2bb-41f3-9697-1d4e2d9cd242.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/mhad-multimodal-home-activity-dataset-with">MHAD: Multimodal Home Activity Dataset with Multi-Angle Videos and Synchronized Physiological Signals</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/mhad-multimodal-home-activity-dataset-with#code">1 code implementation</a> • <span class="author-name-text item-date-pub">14 Sep 2024</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/jintao-fei">Jintao Fei</a></span>, <span class="author-span "> <a href="/author/xinyi-liu">Xinyi Liu</a></span>, <span class="author-span "> <a href="/author/yang-yao">Yang Yao</a></span>, <span class="author-span "> <a href="/author/jun-zhao">Jun Zhao</a></span>, <span class="author-span "> <a href="/author/guoxin-wang">Guoxin Wang</a></span>, <span class="author-span "> <a href="/author/xin-li">Xin Li</a></span> </p> <p class="item-strip-abstract">This non-contact, real-time monitoring method holds great potential for home settings.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 8</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/mhad-multimodal-home-activity-dataset-with" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/mhad-multimodal-home-activity-dataset-with#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/gaussian-rate-distortion-perception-coding"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/gaussian-rate-distortion-perception-coding">Gaussian Rate-Distortion-Perception Coding and Entropy-Constrained Scalar Quantization</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/gaussian-rate-distortion-perception-coding#code">no code implementations</a> • <span class="author-name-text item-date-pub">4 Sep 2024</span> • <span class="author-span "> <a href="/author/li-xie">Li Xie</a></span>, <span class="author-span "> <a href="/author/liangyan-li">Liangyan Li</a></span>, <span class="author-span "> <a href="/author/jun-chen">Jun Chen</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/zhongshan-zhang">Zhongshan Zhang</a></span> </p> <p class="item-strip-abstract">This paper investigates the best known bounds on the quadratic Gaussian distortion-rate-perception function with limited common randomness for the Kullback-Leibler divergence-based perception measure, as well as their counterparts for the squared Wasserstein-2 distance-based perception measure, recently established by Xie et al.</p> <div class="sota"> </div> <p> <a href="/task/quantization"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Quantization</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/gaussian-rate-distortion-perception-coding" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/gaussian-rate-distortion-perception-coding#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2411731 --> <div class="col-lg-3 item-image-col"> <a href="/paper/swe-bench-java-a-github-issue-resolving"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/3e6eddb6-5b8e-4319-8b49-963d5957bf36.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/swe-bench-java-a-github-issue-resolving">SWE-bench-java: A GitHub Issue Resolving Benchmark for Java</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/swe-bench-java-a-github-issue-resolving#code">2 code implementations</a> • <span class="author-name-text item-date-pub">26 Aug 2024</span> • <span class="author-span "> <a href="/author/daoguang-zan">Daoguang Zan</a></span>, <span class="author-span "> <a href="/author/zhirong-huang">Zhirong Huang</a></span>, <span class="author-span "> <a href="/author/ailun-yu">Ailun Yu</a></span>, <span class="author-span "> <a href="/author/shaoxin-lin">Shaoxin Lin</a></span>, <span class="author-span "> <a href="/author/yifan-shi">Yifan Shi</a></span>, <span class="author-span "> <a href="/author/wei-liu">Wei Liu</a></span>, <span class="author-span "> <a href="/author/dong-chen">Dong Chen</a></span>, <span class="author-span "> <a href="/author/zongshuai-qi">Zongshuai Qi</a></span>, <span class="author-span "> <a href="/author/hao-yu">Hao Yu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/dezhi-ran">Dezhi Ran</a></span>, <span class="author-span "> <a href="/author/muhan-zeng">Muhan Zeng</a></span>, <span class="author-span "> <a href="/author/bo-shen">Bo Shen</a></span>, <span class="author-span "> <a href="/author/pan-bian">Pan Bian</a></span>, <span class="author-span "> <a href="/author/guangtai-liang">Guangtai Liang</a></span>, <span class="author-span "> <a href="/author/bei-guan">Bei guan</a></span>, <span class="author-span "> <a href="/author/pengjie-huang">Pengjie Huang</a></span>, <span class="author-span "> <a href="/author/tao-xie">Tao Xie</a></span>, <span class="author-span "> <a href="/author/yongji-wang">Yongji Wang</a></span>, <span class="author-span "> <a href="/author/qianxiang-wang">Qianxiang Wang</a></span> </p> <p class="item-strip-abstract">GitHub issue resolving is a critical task in software engineering, recently gaining significant attention in both industry and academia.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 153</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/swe-bench-java-a-github-issue-resolving" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/swe-bench-java-a-github-issue-resolving#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/exploring-scene-coherence-for-semi-supervised"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/exploring-scene-coherence-for-semi-supervised">Exploring Scene Affinity for Semi-Supervised LiDAR Semantic Segmentation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/exploring-scene-coherence-for-semi-supervised#code">no code implementations</a> • <span class="author-name-text item-date-pub">21 Aug 2024</span> • <span class="author-span "> <a href="/author/chuandong-liu">Chuandong Liu</a></span>, <span class="author-span "> <a href="/author/xingxing-weng">Xingxing Weng</a></span>, <span class="author-span "> <a href="/author/shuguo-jiang">Shuguo Jiang</a></span>, <span class="author-span "> <a href="/author/pengcheng-li">Pengcheng Li</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">Unlike most methods that include all points in pseudo-labeled scenes for forward propagation but only pseudo-labeled points for backpropagation, AIScene removes points without pseudo-labels, ensuring consistency in both forward and backward propagation within the scene.</p> <div class="sota"> </div> <p> <a href="/task/3d-semantic-segmentation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/ed554fc5-ed7e-4bb2-9abb-ea0c385e0892.jpg"> <span>3D Semantic Segmentation</span> </span> </a> <a href="/task/data-augmentation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001560-029cbc00.jpg"> <span>Data Augmentation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/exploring-scene-coherence-for-semi-supervised#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/exploring-scene-coherence-for-semi-supervised" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/exploring-scene-coherence-for-semi-supervised#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/first-activations-matter-training-free"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/first-activations-matter-training-free">First Activations Matter: Training-Free Methods for Dynamic Activation in Large Language Models</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/first-activations-matter-training-free#code">no code implementations</a> • <span class="author-name-text item-date-pub">21 Aug 2024</span> • <span class="author-span "> <a href="/author/chi-ma">Chi Ma</a></span>, <span class="author-span "> <a href="/author/mincong-huang">Mincong Huang</a></span>, <span class="author-span "> <a href="/author/ying-zhang">Ying Zhang</a></span>, <span class="author-span "> <a href="/author/chao-wang">Chao Wang</a></span>, <span class="author-span "> <a href="/author/yujie-wang">Yujie Wang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/chuan-liu">Chuan Liu</a></span>, <span class="author-span "> <a href="/author/wei-lin-1">Wei Lin</a></span> </p> <p class="item-strip-abstract">Dynamic activation (DA) techniques, such as DejaVu and MoEfication, have demonstrated their potential to significantly enhance the inference efficiency of large language models (LLMs).</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/first-activations-matter-training-free" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/first-activations-matter-training-free#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2397153 --> <div class="col-lg-3 item-image-col"> <a href="/paper/2408-01031"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/2408-01031">POA: Pre-training Once for Models of All Sizes</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/2408-01031#code">1 code implementation</a> • <span class="author-name-text item-date-pub">2 Aug 2024</span> • <span class="author-span "> <a href="/author/yingying-zhang">Yingying Zhang</a></span>, <span class="author-span "> <a href="/author/xin-guo">Xin Guo</a></span>, <span class="author-span "> <a href="/author/jiangwei-lao">Jiangwei Lao</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/lixiang-ru">Lixiang Ru</a></span>, <span class="author-span "> <a href="/author/jian-wang">Jian Wang</a></span>, <span class="author-span "> <a href="/author/guo-ye">Guo Ye</a></span>, <span class="author-span "> <a href="/author/huimei-he">Huimei He</a></span>, <span class="author-span "> <a href="/author/jingdong-chen">Jingdong Chen</a></span>, <span class="author-span "> <a href="/author/ming-yang">Ming Yang</a></span> </p> <p class="item-strip-abstract">Once pre-trained, POA allows the extraction of pre-trained models of diverse sizes for downstream tasks.</p> <div class="sota"> </div> <p> <a href="/task/representation-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000228-3131cfbf_nx72Tly.jpg"> <span>Representation Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 24</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/2408-01031" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/2408-01031#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/functional-faithfulness-in-the-wild-circuit"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/functional-faithfulness-in-the-wild-circuit">Functional Faithfulness in the Wild: Circuit Discovery with Differentiable Computation Graph Pruning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/functional-faithfulness-in-the-wild-circuit#code">no code implementations</a> • <span class="author-name-text item-date-pub">4 Jul 2024</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/jingcheng-niu">Jingcheng Niu</a></span>, <span class="author-span "> <a href="/author/zining-zhu">Zining Zhu</a></span>, <span class="author-span "> <a href="/author/gerald-penn">Gerald Penn</a></span> </p> <p class="item-strip-abstract">In this paper, we introduce a comprehensive reformulation of the task known as Circuit Discovery, along with DiscoGP, a novel and effective algorithm based on differentiable masking for discovering circuits.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/functional-faithfulness-in-the-wild-circuit" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/functional-faithfulness-in-the-wild-circuit#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/on-the-robustness-of-graph-reduction-against"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2407.02431.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/on-the-robustness-of-graph-reduction-against">On the Robustness of Graph Reduction Against GNN Backdoor</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/on-the-robustness-of-graph-reduction-against#code">no code implementations</a> • <span class="author-name-text item-date-pub">2 Jul 2024</span> • <span class="author-span "> <a href="/author/yuxuan-zhu">Yuxuan Zhu</a></span>, <span class="author-span "> <a href="/author/michael-mandulak">Michael Mandulak</a></span>, <span class="author-span "> <a href="/author/kerui-wu">Kerui Wu</a></span>, <span class="author-span "> <a href="/author/george-slota">George Slota</a></span>, <span class="author-span "> <a href="/author/yuseok-jeon">Yuseok Jeon</a></span>, <span class="author-span "> <a href="/author/ka-ho-chow">Ka-Ho Chow</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Meanwhile, graph reduction techniques, including coarsening and sparsification, which have long been employed to improve the scalability of large graph computational tasks, have recently emerged as effective methods for accelerating GNN training on large-scale graphs.</p> <div class="sota"> </div> <p> <a href="/task/computational-efficiency"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Computational Efficiency</span> </span> </a> <a href="/task/data-poisoning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Data Poisoning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/on-the-robustness-of-graph-reduction-against" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/on-the-robustness-of-graph-reduction-against#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/moyu-a-theoretical-study-on-massive-over"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2406.12569.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/moyu-a-theoretical-study-on-massive-over">MOYU: A Theoretical Study on Massive Over-activation Yielded Uplifts in LLMs</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/moyu-a-theoretical-study-on-massive-over#code">no code implementations</a> • <span class="author-name-text item-date-pub">18 Jun 2024</span> • <span class="author-span "> <a href="/author/chi-ma">Chi Ma</a></span>, <span class="author-span "> <a href="/author/mincong-huang">Mincong Huang</a></span>, <span class="author-span "> <a href="/author/chao-wang">Chao Wang</a></span>, <span class="author-span "> <a href="/author/yujie-wang">Yujie Wang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Massive Over-activation Yielded Uplifts(MOYU) is an inherent property of large language models, and dynamic activation(DA) based on the MOYU property is a clever yet under-explored strategy designed to accelerate inference in these models.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/moyu-a-theoretical-study-on-massive-over" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/moyu-a-theoretical-study-on-massive-over#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2361643 --> <div class="col-lg-3 item-image-col"> <a href="/paper/intrinsic-evaluation-of-unlearning-using"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/a54858ca-af4d-4b05-997d-3bb72171af31.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/intrinsic-evaluation-of-unlearning-using">Intrinsic Evaluation of Unlearning Using Parametric Knowledge Traces</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/intrinsic-evaluation-of-unlearning-using#code">1 code implementation</a> • <span class="author-name-text item-date-pub">17 Jun 2024</span> • <span class="author-span "> <a href="/author/yihuai-hong">Yihuai Hong</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/haiqin-yang">Haiqin Yang</a></span>, <span class="author-span "> <a href="/author/shauli-ravfogel">Shauli Ravfogel</a></span>, <span class="author-span "> <a href="/author/mor-geva">Mor Geva</a></span> </p> <p class="item-strip-abstract">We use this approach to localize "concept vectors" - parameter vectors that encode concrete concepts - and construct ConceptVectors, a benchmark dataset containing hundreds of common concepts and their parametric knowledge traces within two open-source LLMs.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 31</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/intrinsic-evaluation-of-unlearning-using" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/intrinsic-evaluation-of-unlearning-using#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2360928 --> <div class="col-lg-3 item-image-col"> <a href="/paper/qqq-quality-quattuor-bit-quantization-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/606cc5e4-6498-4f3f-8c22-c833a1607c70.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/qqq-quality-quattuor-bit-quantization-for">QQQ: Quality Quattuor-Bit Quantization for Large Language Models</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/qqq-quality-quattuor-bit-quantization-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">14 Jun 2024</span> • <span class="author-span "> <a href="/author/ying-zhang">Ying Zhang</a></span>, <span class="author-span "> <a href="/author/peng-zhang">Peng Zhang</a></span>, <span class="author-span "> <a href="/author/mincong-huang">Mincong Huang</a></span>, <span class="author-span "> <a href="/author/jingyang-xiang">Jingyang Xiang</a></span>, <span class="author-span "> <a href="/author/yujie-wang">Yujie Wang</a></span>, <span class="author-span "> <a href="/author/chao-wang">Chao Wang</a></span>, <span class="author-span "> <a href="/author/yineng-zhang">Yineng Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/chuan-liu">Chuan Liu</a></span>, <span class="author-span "> <a href="/author/wei-lin-1">Wei Lin</a></span> </p> <p class="item-strip-abstract">Quantization is a proven effective method for compressing large language models.</p> <div class="sota"> </div> <p> <a href="/task/quantization"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Quantization</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 91</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/qqq-quality-quattuor-bit-quantization-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/qqq-quality-quattuor-bit-quantization-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2355218 --> <div class="col-lg-3 item-image-col"> <a href="/paper/mts-net-dual-enhanced-positional-multi-head"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/mts-net-dual-enhanced-positional-multi-head">MTS-Net: Dual-Enhanced Positional Multi-Head Self-Attention for 3D CT Diagnosis of May-Thurner Syndrome</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/mts-net-dual-enhanced-positional-multi-head#code">1 code implementation</a> • <span class="author-name-text item-date-pub">7 Jun 2024</span> • <span class="author-span "> <a href="/author/yixin-huang">Yixin Huang</a></span>, <span class="author-span "> <a href="/author/yiqi-jin">Yiqi Jin</a></span>, <span class="author-span "> <a href="/author/ke-tao">Ke Tao</a></span>, <span class="author-span "> <a href="/author/kaijian-xia">Kaijian Xia</a></span>, <span class="author-span "> <a href="/author/jianfeng-gu">Jianfeng Gu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/lan-du">Lan Du</a></span>, <span class="author-span "> <a href="/author/cunjian-chen">Cunjian Chen</a></span> </p> <p class="item-strip-abstract">In this paper, we present a 3D-based deep learning approach called MTS-Net for diagnosing May-Thurner Syndrome using CT scans.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 0</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/mts-net-dual-enhanced-positional-multi-head" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/mts-net-dual-enhanced-positional-multi-head#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/dynamic-activation-pitfalls-in-llama-models"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2405.09274.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/dynamic-activation-pitfalls-in-llama-models">Dynamic Activation Pitfalls in LLaMA Models: An Empirical Study</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/dynamic-activation-pitfalls-in-llama-models#code">no code implementations</a> • <span class="author-name-text item-date-pub">15 May 2024</span> • <span class="author-span "> <a href="/author/chi-ma">Chi Ma</a></span>, <span class="author-span "> <a href="/author/mincong-huang">Mincong Huang</a></span>, <span class="author-span "> <a href="/author/chao-wang">Chao Wang</a></span>, <span class="author-span "> <a href="/author/yujie-wang">Yujie Wang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">In this work, we systematically investigate the efficacy of dynamic activation mechanisms within the LLaMA family of language models.</p> <div class="sota"> </div> <p> <a href="/task/attribute"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Attribute</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/dynamic-activation-pitfalls-in-llama-models" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/dynamic-activation-pitfalls-in-llama-models#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/super-resolving-blurry-images-with-events"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/super-resolving-blurry-images-with-events">Super-Resolving Blurry Images with Events</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/super-resolving-blurry-images-with-events#code">no code implementations</a> • <span class="author-name-text item-date-pub">11 May 2024</span> • <span class="author-span "> <a href="/author/chi-zhang">Chi Zhang</a></span>, <span class="author-span "> <a href="/author/mingyuan-lin">Mingyuan Lin</a></span>, <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span "> <a href="/author/chenxu-jiang">Chenxu Jiang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Super-resolution from motion-blurred images poses a significant challenge due to the combined effects of motion blur and low spatial resolution.</p> <div class="sota"> </div> <p> <a href="/task/super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000032-0f0cf3b2.jpg"> <span>Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/super-resolving-blurry-images-with-events" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/super-resolving-blurry-images-with-events#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-monocular-depth-from-focus-with"> <div class="item-image" style="background-image: url('data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoKCgoKCgsMDAsPEA4QDxYUExMUFiIYGhgaGCIzICUgICUgMy03LCksNy1RQDg4QFFeT0pPXnFlZXGPiI+7u/v/wgALCACcAPIBAREA/8QAGgABAAMBAQEAAAAAAAAAAAAAAAECAwQFB//aAAgBAQAAAAD7MzwvSN7iZFOeUa6BYIACQgASHP5ukY6RefVkMfLFs7q9vcGPlWvvtEW6JDHx4tEWW6duwMuCnVvdC0yMfMrMXrMOr0DPQK8/RYBh460VSjv9Ew3cfNE1tEdfYGHjwiQ7fTMdnJ5ts9cbW6/RDHx9J00hMdPSYbuHzwT1+kGHlVshC3X6Jlq4OBW0J6/TDHyEpRDt9A5ulw+fIpPd6IYeTNaXgju9I5+hzZkyrtuGHjXotEJ7vRMds+N0JsrG1hh5GsRInp9Apfl8nSkaRER6PeMPHibUmYd3ohycFF6TbLXt7Rl49kxFkdfoByeXE2gPQ7hj5kDTVHR1By+Ulbalo7esZePVJEz2eiHNwTRpES7eoYeNMm0zXr7QQASCAkAAAAAA/8QARBAAAQIEBAMDCQYCCAcAAAAAAQIRAAMSIQQiMUETUWEgMnEUQlJTgZGSodIQI2JyscEzggUVMEOywtHhJVBjg6Li8P/aAAgBAQABPwD7JsxEpNS3Z9hAxshW6vamFY6QBSCpmcmk3jy3Du1RdvRMeWyPxb7QifKmJfiAD8RAMcSX61HxCOJL9aj4hHEl+tR8QjiS/Wo+IQlSVXBB8OytaZaSsuw5XMDGyDur4THlmHBFSlN0EDG4Z0grVf8ACY8tkA+d7ujxLnypossJu2a36xxJfrUfEI4kv1qPiEcSX61HxCOJL9aj4hCVoVooFuRB7bQ0MIaGENDQ0N22hoaGhoaGhobt4pKjKSUpWpQWCySBcDrBkTuIU8HEFIQKSJkCStCFAyMSSTS3EHi4e4jgTV/3E8fdqDmYNnMS5U8S01YfEA0s3FG8GXNKlPhp7gG9Y3uYRJmAk+T4q3OcCCDAkrRUgycQb+s6C14lCdJWFeS4hwNFTX1iWoqQklNJa437WIJEhZqIYbf7EQVTEqvMWvnmJPzMCYqnvzXI0BVcDdnhJWy0ibNuARmLc2N4K10AJVMaom5O1ucKWpOi16g6n3C8GYqkATJhdIUTUf8AWAtZDmYsBRYgEi7WJvFazetYZ9CQ55i+ggLXQ9czVjc7XBGbpeMGqYVLClL0diS3uJPaxC1ysPMmpF0pJDgm/gI/rOeXHDRYA9xfytH9Y4kG0lB0d0riVjcTNWEhEpibuFJYjqee0FeNSWrwju2qof8ApIKZYw4TyBLiP+IpAfyZwDuqHx/o4d3NnVD/ANIecnDMlmuq/Ptzx9xMgIUSrJdSg3QcrogBbVUlh063bJApNR4YJoZ2azajJCwqsIEogPZmb/BAdRYS7MNRy3aiAkpJFD1ADTYl7mmEpdYGjHMKf/SFoqUAlDOCdAO9sTSbQcJNopCUhyTqDrt3YwsmZJWpUwAkjof0A7U8VSljZoTIFKqjt01282BJQGdStDsD/lhpRDDCyc7+b/tAQ1+BLChcEc/GAuaRdCQwtfeHnAAGUBfZUBU0A5Eu/PURVOBfhoZvSiqfSAZaQOioS7XDHs4h+BMpd+j/ALQFYipSq5j1EtmaxgzJ6iDXNA/mEVYgZeLMcHV1N1hUydmzzRcDzm5XhM2ezErIALd57wlWJ0rVozur3nqI404pAK5hsdXcPAXPA/iTDcEOVN1fpzjiT1IU0+a4OgKmbk3XaMEpajMqUs6M7sx8ewpZC0JYZnv2lEJSSdADAxkj0lb+araEqC0gh/7HEgnDzbPaACDdDAj0X/ywAaVEAkOLFIZjcuKYTSSoLTqb5dhv3YZnZPeHogOeRywpx5jdKdnd3pgXD8N7uMov7aYZyAUX3yjU6EZYSbPQXceaNGZjkhgKshII9EC4Gtk7RgaUTJlIbKGdIHzAHYmBPFlO7uw+3GTZklMpSDS5IuAQfiIjyycxzp0GyPq32jyueQghY3csnQFnOaPLcQBaYnfZLBjzq3g4ue7CaguAQQE8nI70HGTtRMTSSbCm3JzVHlU/O01BbonX4oGMnN30gnQsn6ow09cxS0LLlIHIfIE9rFMJExzYC52+ZECkJJtrrlZjoO9vAoc0KRoXLp+qGsHYpb8IcbF6oKdVOl30NPi9lQwcl0aVG6d+ZqiuWQTUlrFnT4t3oK0VIUogP1T9UAJrOZJb8rtyOYQVpJLs2pNSfec0YFwuYOmzbnoT2FpPElnl1+3GAkSmPnOYZRSbqYEWYwmu916cjpAE0AKClM+jK8XhKZgUkVZb830eEhdR7zvfKq29oNYSKTMuepNrgRnIIzBm5xgCqpddVwOfuv2p5AkTCz87tBnKyjhKuLffubmEzJJCCAus1Onim3K7bwFyyShpgDH+931iVikSyQlBU4vVMew8YONQkkcNOo86DjkDWSjR+8x0sIOPSADwUFiXFUeXJJJEpDc6oTjk+pR8Vud4w+JE5JAQlJ5gu47E5IM2UerdT9uOAaUpQBYksQP3BikLBLAgBzYaD+WAUJqJSl0EAaAC9/NgSwQ6ikWdIZJ6N3YCEM+VmLlh4ADLBSjLQ1RA2S9v5YNBALJABdwA7kad2DQVnKDUXGn0xgQApdLMwGwvzLAX7WI/gTS7MIrCSkVMFdSzfFCiR3Cq4uXP1wlVNSnLgMG32Nq4JCqVIW6SATc2ItfNFVyxNLDe/Mh64Qq9lE2F3JAHUFcBgSKncWAVt1zwTQs0qcbXP1xUXJQTpZy4Jf8ANGBupTLfLe5+pXYXTUhwdbfbjikCU5bMWNtvEiEmSUqU6fykpfqRmMApWoqWpABFjlNvi2gKTW6KEtoTT7hmgkOFClgAxBToLP3rw8oVBJSU+KSTyHeitIAKVIcuGdL2/mjKCFGit+afq2jAlNS6WYgOA37E9rEXkTNTbaGXSbKPiFQEqEtIZT39ODWbivXep290KQqsd9ir8UfeubHR/OZjztAExKu6dCfOOsULJCVAsx9IfNoomFQSkHT8XtctBrKRlUGJvnFjtaMI4mTHfu9dSeoHYmPxpXRzq32492lBKtyDc/sRCipqRU4Ja5frvAKl00k3e9SmbbeApVBcKIW7ZiSCC/OAVl1Mpw1gSA24Z4SCFJJfTYq92sJMzM9TXu6oeYALKuPSPsa/vjAllzE3YJDFyR89+1iQ+Hm+EBCQFOhLAg2A3tsmKQKXS4f0R+yYEpgykC4CgaRpo/djLQAEpITfuh3NzamGTnYOLWpAB/8AGGSAKqXYgApH0wJYXYAEhJLsNtfNgJS4pCdC2UajbuxQhLg0/CPpjAUoUqlrJ2ABHuA7E0gTpIa/24qRMmpllIFlbtf3vAwMtkitT1Pon/SPIZQdpqz1pTbwtBwEtOk1TX2T8rQMDKRUAslwdUps5flBwaCqqshmsEpa3sg4GVRQJqtX0SfZpHkMpg61Fgdkt+kSZCcO9BOjdrEgnDzOohwprjvDcOwDenBpzmtJYgBlA7vuoawKarFg97gsDv34eqkunW5qBDnfvxlD5gq17jc7OuAZa1K00bUe+yoKgzq97j64fapKrhr/ADGeHpW+uYt4gOWzbbxgjdTEHKD1f4ldiZSZso31YfZMXLQkKXMpFQ9p5QuZIU5GPWASSACGD7B9hClyWvj1jxIdoTicMkGqcDSA79dCTBxWGFuOh+Tx5ThnI4qXFm3eBisIUvx0k9DHlWG0M5IGpg4nDil5ycwcHZo8qwrPx0MG35wiZLWMiwbAn26dnEmnDzVUuw0qp35xxZYYmUpNrffJvFeHPDFKiqlVSeJoRdoE2Tmsq/8A1RYQlclxZTOX+9EBcmwCVEjfijeEqkrWkUqur1oLEwJkpZahVyA3FHhBXLZJSkn/ALoF4E2TsFNofvRvGBWhSpgTsH79XYXKUtcpaQLK15Dp9mMLSX0zD3+wiBMprZdyDurm+lWkJmyyUlSrgAakfuYTNSE1BZqL8/qgTE0jPZR5qsxv50GclVQqLFtzzf0oM1NKVPoOov8AFCZwpRUvUHcmzsbFUCZa6wlNYa523JqgTUJ881NuSR4gVRgS6ZmaoOObc9yeziLSJhJZh+/QiEzaTaYk1CxrNiDuCreOLRMAE3QjckN8UKmKa0zQ2zKcj4orBBHE5XdXzIVHGJDVl9NVfVCZpQjvVOB5x9zFUFaQtbLVswqJv8UJnJNIqOYWuoC5/NHECVBlr7ujk33PejCF5s3M7dSf1J7WMfg2d6hcP+wMCtySP8XuAaE1oqF9WqdRitQU4QXc+kRDTSQdAWsaoVxGDVAPap39whllKmqLE2ze0aQagpikgsQ92gVM33tifNUzn3wtJzXU9JbXV9hGDJKVkvc7uwO7A9md/Ama6bPz6RQqnKVi93CopUaznYFwCFOx5W1EC6Ug197XNoTvaAVBSXqZ9KVaO1zCEqBAIL30qIZnNzA4h0SagTso9eUFCwVJShbubmqGnJAFKyRoWURbq0JQthlWHd1Mva9i1jGCCwZhJmMQLF2J53btY5uAHSGKhqx/V4QwS4Sip2DAC50L0wlIpUEoTUw2Gu75YSxCaqT/ACi3hlhgKsqanLJ263pgukJypPsAZ7sGTCQkqJpFz3up1bLCSWvTY8hY/DCkprC2DML9fCmFBTOyWYt0cuwyxgmoWKnzPpz9g7M8pEiY6QrmGe0Fctv4EuncUX5WvFaEkEYeWwGyNzqNYUZaWPk0km+iLatzipAAeRKY7BHXxh0BlDDShfZG2+8JWJc0FMmSDoFBNx1d4GMnsFAJ5EU38dYVjZ4SCEpYbUm/zgYyaQkMH/L+t4w8+ZOKgsJAADMGL+89rHCiQKtah7PmIUoNUVgXA2DsPzRfYakdf0XAWSgFw1Vm2O7mqEswLPbRwLbHvQhKpgKpSSWIdm+qBJmgh5KwQD0v8UHD4hiOHM6KIt8lQJGINVcte75WLt0VaPJppPcWLlzZmO/etGFRMlhYUCASGGn7nsz0k4eYWgoTsDcFstgfhhQVSzZnG23wQgppFSdzsbvsRRtBQzdW832sMsFICj0u9L9LimGSbeaw835HLttFIYkCm92HsFqIUAUWAcFrp2ezZYIBWSALAKulnL/ljAUpK2QAGFv/AJKXHaxa+HKCn84Ws3zhWIWCKlpLEOcjGFYgs6VpsTsnePKFKy1jk2RyYOJLJNYFiPMhGJmFQZYFrjKSwudI8tm6ibQCXDlNgdhBxeILATyHc1Oi40eDjJ2qZoFNjdELxU4FuKxYAd2MJOM5MxSluxDANy5p7OJKfJprnboPmY+7LVKCRbdIc6iF2KGUgAh3NNhyEKUkE5k3A9GKkoPeSXAOqYSkzCEICSQNHTdi0HDT1XMq9ncJctaPJcSpLcKk7d0uXYuIGGxA1kh7MclgIGDxANpJKf5RbxjCypktRrQ2UDbbw/s97AdkBu0w+1hyjcMN7/8AKP/Z');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-monocular-depth-from-focus-with">Learning Monocular Depth from Focus with Event Focal Stack</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-monocular-depth-from-focus-with#code">no code implementations</a> • <span class="author-name-text item-date-pub">11 May 2024</span> • <span class="author-span "> <a href="/author/chenxu-jiang">Chenxu Jiang</a></span>, <span class="author-span "> <a href="/author/mingyuan-lin">Mingyuan Lin</a></span>, <span class="author-span "> <a href="/author/chi-zhang">Chi Zhang</a></span>, <span class="author-span "> <a href="/author/zhenghai-wang">Zhenghai Wang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Depth from Focus estimates depth by determining the moment of maximum focus from multiple shots at different focal distances, i. e. the Focal Stack.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-monocular-depth-from-focus-with" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-monocular-depth-from-focus-with#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/non-uniform-exposure-imaging-via-neuromorphic"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2404.13972.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/non-uniform-exposure-imaging-via-neuromorphic">Non-Uniform Exposure Imaging via Neuromorphic Shutter Control</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/non-uniform-exposure-imaging-via-neuromorphic#code">no code implementations</a> • <span class="author-name-text item-date-pub">22 Apr 2024</span> • <span class="author-span "> <a href="/author/mingyuan-lin">Mingyuan Lin</a></span>, <span class="author-span "> <a href="/author/jian-liu">Jian Liu</a></span>, <span class="author-span "> <a href="/author/chi-zhang">Chi Zhang</a></span>, <span class="author-span "> <a href="/author/zibo-zhao">Zibo Zhao</a></span>, <span class="author-span "> <a href="/author/chu-he">Chu He</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">To address this challenge, we propose a novel Neuromorphic Shutter Control (NSC) system to avoid motion blurs and alleviate instant noises, where the extremely low latency of events is leveraged to monitor the real-time motion and facilitate the scene-adaptive exposure.</p> <div class="sota"> </div> <p> <a href="/task/image-denoising"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000714-068a8901_2PQwzdm.jpg"> <span>Image Denoising</span> </span> </a> <a href="/task/self-supervised-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001882-b4b42454.jpg"> <span>Self-Supervised Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/non-uniform-exposure-imaging-via-neuromorphic" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/non-uniform-exposure-imaging-via-neuromorphic#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/mtga-multi-view-temporal-granularity-aligned"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2404.11979.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/mtga-multi-view-temporal-granularity-aligned">MTGA: Multi-view Temporal Granularity aligned Aggregation for Event-based Lip-reading</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/mtga-multi-view-temporal-granularity-aligned#code">no code implementations</a> • <span class="author-name-text item-date-pub">18 Apr 2024</span> • <span class="author-span "> <a href="/author/wenhao-zhang">WenHao Zhang</a></span>, <span class="author-span "> <a href="/author/jun-wang">Jun Wang</a></span>, <span class="author-span "> <a href="/author/yong-luo">Yong Luo</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/wei-yu">Wei Yu</a></span>, <span class="author-span "> <a href="/author/zheng-he">Zheng He</a></span> </p> <p class="item-strip-abstract">Then we design a spatio-temporal fusion module based on temporal granularity alignment, where the global spatial features extracted from event frames, together with the local relative spatial and temporal features contained in voxel graph list are effectively aligned and integrated.</p> <div class="sota"> </div> <p> <a href="/task/lip-reading"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Lip Reading</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/mtga-multi-view-temporal-granularity-aligned" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/mtga-multi-view-temporal-granularity-aligned#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2316281 --> <div class="col-lg-3 item-image-col"> <a href="/paper/detecting-every-object-from-events"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2404.05285.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/detecting-every-object-from-events">Detecting Every Object from Events</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/detecting-every-object-from-events#code">1 code implementation</a> • <span class="author-name-text item-date-pub">8 Apr 2024</span> • <span class="author-span "> <a href="/author/haitian-zhang">Haitian Zhang</a></span>, <span class="author-span "> <a href="/author/chang-xu">Chang Xu</a></span>, <span class="author-span "> <a href="/author/xinya-wang">Xinya Wang</a></span>, <span class="author-span "> <a href="/author/bingde-liu">Bingde Liu</a></span>, <span class="author-span "> <a href="/author/guang-hua">Guang Hua</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span> </p> <p class="item-strip-abstract">Object detection is critical in autonomous driving, and it is more practical yet challenging to localize objects of unknown categories: an endeavour known as Class-Agnostic Object Detection (CAOD).</p> <div class="sota"> </div> <p> <a href="/task/autonomous-driving"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000363-06d10c79.jpg"> <span>Autonomous Driving</span> </span> </a> <a href="/task/class-agnostic-object-detection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/1adebeb2-b3fb-4a04-9404-353e94474a5b.jpg"> <span>Class-agnostic Object Detection</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/detecting-every-object-from-events#tasks"> <span class="badge badge-primary"> <b>+5</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 14</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/detecting-every-object-from-events" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/detecting-every-object-from-events#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/inclusive-design-insights-from-a-preliminary"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2403.19899.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/inclusive-design-insights-from-a-preliminary">Inclusive Design Insights from a Preliminary Image-Based Conversational Search Systems Evaluation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/inclusive-design-insights-from-a-preliminary#code">no code implementations</a> • <span class="author-name-text item-date-pub">29 Mar 2024</span> • <span class="author-span "> <a href="/author/yue-zheng">Yue Zheng</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/junmian-chen">Junmian Chen</a></span>, <span class="author-span "> <a href="/author/tianyu-xia">Tianyu Xia</a></span>, <span class="author-span "> <a href="/author/yuanyuan-yin">Yuanyuan Yin</a></span>, <span class="author-span "> <a href="/author/shan-wang">Shan Wang</a></span>, <span class="author-span "> <a href="/author/haiming-liu">Haiming Liu</a></span> </p> <p class="item-strip-abstract">The digital realm has witnessed the rise of various search modalities, among which the Image-Based Conversational Search System stands out.</p> <div class="sota"> </div> <p> <a href="/task/conversational-search"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Conversational Search</span> </span> </a> <a href="/task/emotion-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Emotion Recognition</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/inclusive-design-insights-from-a-preliminary" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/inclusive-design-insights-from-a-preliminary#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2309403 --> <div class="col-lg-3 item-image-col"> <a href="/paper/mechanisms-of-non-factual-hallucinations-in"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2403.18167.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/mechanisms-of-non-factual-hallucinations-in">Mechanistic Understanding and Mitigation of Language Model Non-Factual Hallucinations</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/mechanisms-of-non-factual-hallucinations-in#code">1 code implementation</a> • <span class="author-name-text item-date-pub">27 Mar 2024</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/meng-cao">Meng Cao</a></span>, <span class="author-span "> <a href="/author/jackie-chi-kit-cheung">Jackie Chi Kit Cheung</a></span>, <span class="author-span "> <a href="/author/yue-dong">Yue Dong</a></span> </p> <p class="item-strip-abstract">State-of-the-art language models (LMs) sometimes generate non-factual hallucinations that misalign with world knowledge.</p> <div class="sota"> </div> <p> <a href="/task/attribute"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Attribute</span> </span> </a> <a href="/task/hallucination"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Hallucination</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/mechanisms-of-non-factual-hallucinations-in#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 2</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/mechanisms-of-non-factual-hallucinations-in" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/mechanisms-of-non-factual-hallucinations-in#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/big-data-analytics-to-classify-earthwork"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2402.14698.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/big-data-analytics-to-classify-earthwork">Using construction waste hauling trucks' GPS data to classify earthwork-related locations: A Chengdu case study</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/big-data-analytics-to-classify-earthwork#code">no code implementations</a> • <span class="author-name-text item-date-pub">22 Feb 2024</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/ke-han">Ke Han</a></span> </p> <p class="item-strip-abstract">Earthwork-related locations (ERLs), such as construction sites, earth dumping ground, and concrete mixing stations, are major sources of urban dust pollution (particulate matters).</p> <div class="sota"> </div> <p> <a href="/task/management"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Management</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/big-data-analytics-to-classify-earthwork" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/big-data-analytics-to-classify-earthwork#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2286125 --> <div class="col-lg-3 item-image-col"> <a href="/paper/neuromorphic-synergy-for-video-binarization"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2402.12644.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/neuromorphic-synergy-for-video-binarization">Neuromorphic Synergy for Video Binarization</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/neuromorphic-synergy-for-video-binarization#code">1 code implementation</a> • <span class="author-name-text item-date-pub">20 Feb 2024</span> • <span class="author-span "> <a href="/author/shijie-lin">ShiJie Lin</a></span>, <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span "> <a href="/author/lei-yang">Lei Yang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/bin-zhou">Bin Zhou</a></span>, <span class="author-span "> <a href="/author/xiaowei-luo">Xiaowei Luo</a></span>, <span class="author-span "> <a href="/author/wenping-wang">Wenping Wang</a></span>, <span class="author-span "> <a href="/author/jia-pan">Jia Pan</a></span> </p> <p class="item-strip-abstract">We also develop an efficient integration method to propagate this binary image to high frame rate binary video.</p> <div class="sota"> </div> <p> <a href="/task/binarization"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Binarization</span> </span> </a> <a href="/task/camera-calibration"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Camera Calibration</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/neuromorphic-synergy-for-video-binarization#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 31</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/neuromorphic-synergy-for-video-binarization" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/neuromorphic-synergy-for-video-binarization#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/an-llm-maturity-model-for-reliable-and"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2402.14855.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/an-llm-maturity-model-for-reliable-and">An LLM Maturity Model for Reliable and Transparent Text-to-Query</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/an-llm-maturity-model-for-reliable-and#code">no code implementations</a> • <span class="author-name-text item-date-pub">20 Feb 2024</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/abir-ray">Abir Ray</a></span> </p> <p class="item-strip-abstract">Recognizing the imperative to address the reliability and transparency issues of Large Language Models (LLM), this work proposes an LLM maturity model tailored for text-to-query applications.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/an-llm-maturity-model-for-reliable-and" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/an-llm-maturity-model-for-reliable-and#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-survey-of-privacy-threats-and-defense-in"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2402.03688.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-survey-of-privacy-threats-and-defense-in">A Survey of Privacy Threats and Defense in Vertical Federated Learning: From Model Life Cycle Perspective</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-survey-of-privacy-threats-and-defense-in#code">no code implementations</a> • <span class="author-name-text item-date-pub">6 Feb 2024</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/meng-han">Meng Han</a></span>, <span class="author-span "> <a href="/author/yiming-li">Yiming Li</a></span>, <span class="author-span "> <a href="/author/changting-lin">Changting Lin</a></span>, <span class="author-span "> <a href="/author/yao-zhang">Yao Zhang</a></span>, <span class="author-span "> <a href="/author/mingyang-zhang">Mingyang Zhang</a></span>, <span class="author-span "> <a href="/author/yan-liu">Yan Liu</a></span>, <span class="author-span "> <a href="/author/haiqin-weng">Haiqin Weng</a></span>, <span class="author-span "> <a href="/author/yuseok-jeon">Yuseok Jeon</a></span>, <span class="author-span "> <a href="/author/ka-ho-chow">Ka-Ho Chow</a></span>, <span class="author-span "> <a href="/author/stacy-patterson">Stacy Patterson</a></span> </p> <p class="item-strip-abstract">Vertical Federated Learning (VFL) is a federated learning paradigm where multiple participants, who share the same set of samples but hold different features, jointly train machine learning models.</p> <div class="sota"> </div> <p> <a href="/task/survey"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Survey</span> </span> </a> <a href="/task/vertical-federated-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Vertical Federated Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-survey-of-privacy-threats-and-defense-in" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-survey-of-privacy-threats-and-defense-in#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/drlc-reinforcement-learning-with-dense"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2401.07382.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/drlc-reinforcement-learning-with-dense">Beyond Sparse Rewards: Enhancing Reinforcement Learning with Language Model Critique in Text Generation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/drlc-reinforcement-learning-with-dense#code">no code implementations</a> • <span class="author-name-text item-date-pub">14 Jan 2024</span> • <span class="author-span "> <a href="/author/meng-cao">Meng Cao</a></span>, <span class="author-span "> <a href="/author/lei-shu">Lei Shu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/yun-zhu">Yun Zhu</a></span>, <span class="author-span "> <a href="/author/nevan-wichers">Nevan Wichers</a></span>, <span class="author-span "> <a href="/author/yinxiao-liu">Yinxiao Liu</a></span>, <span class="author-span "> <a href="/author/lei-meng">Lei Meng</a></span> </p> <p class="item-strip-abstract">We investigate this approach under two different settings: one where the policy model is smaller and is paired with a more powerful critic model, and another where a single language model fulfills both roles.</p> <div class="sota"> </div> <p> <a href="/task/language-modelling"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000267-8df06634.jpg"> <span>Language Modelling</span> </span> </a> <a href="/task/reinforcement-learning-2"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>reinforcement-learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/drlc-reinforcement-learning-with-dense#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/drlc-reinforcement-learning-with-dense" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/drlc-reinforcement-learning-with-dense#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/re-evaluating-the-memory-balanced-pipeline"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2401.02088.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/re-evaluating-the-memory-balanced-pipeline">Re-evaluating the Memory-balanced Pipeline Parallelism: BPipe</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/re-evaluating-the-memory-balanced-pipeline#code">no code implementations</a> • <span class="author-name-text item-date-pub">4 Jan 2024</span> • <span class="author-span "> <a href="/author/mincong-huang">Mincong Huang</a></span>, <span class="author-span "> <a href="/author/chao-wang">Chao Wang</a></span>, <span class="author-span "> <a href="/author/chi-ma">Chi Ma</a></span>, <span class="author-span "> <a href="/author/yineng-zhang">Yineng Zhang</a></span>, <span class="author-span "> <a href="/author/peng-zhang">Peng Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Pipeline parallelism is an essential technique in the training of large-scale Transformer models.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/re-evaluating-the-memory-balanced-pipeline" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/re-evaluating-the-memory-balanced-pipeline#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/imperio-language-guided-backdoor-attacks-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2401.01085.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/imperio-language-guided-backdoor-attacks-for">Imperio: Language-Guided Backdoor Attacks for Arbitrary Model Control</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/imperio-language-guided-backdoor-attacks-for#code">no code implementations</a> • <span class="author-name-text item-date-pub">2 Jan 2024</span> • <span class="author-span "> <a href="/author/ka-ho-chow">Ka-Ho Chow</a></span>, <span class="author-span "> <a href="/author/wenqi-wei">Wenqi Wei</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">This paper proposes Imperio, which harnesses the language understanding capabilities of NLP models to enrich backdoor attacks.</p> <div class="sota"> </div> <p> <a href="/task/backdoor-attack"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Backdoor Attack</span> </span> </a> <a href="/task/image-classification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/7a146e71-bbf8-4137-bf25-a3618bd043a0.jpg"> <span>Image Classification</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/imperio-language-guided-backdoor-attacks-for#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/imperio-language-guided-backdoor-attacks-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/imperio-language-guided-backdoor-attacks-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2268990 --> <div class="col-lg-3 item-image-col"> <a href="/paper/rolecraft-glm-advancing-personalized-role"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2401.09432.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/rolecraft-glm-advancing-personalized-role">RoleCraft-GLM: Advancing Personalized Role-Playing in Large Language Models</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/rolecraft-glm-advancing-personalized-role#code">1 code implementation</a> • <span class="author-name-text item-date-pub">17 Dec 2023</span> • <span class="author-span "> <a href="/author/meiling-tao">Meiling Tao</a></span>, <span class="author-span "> <a href="/author/xuechen-liang">Xuechen Liang</a></span>, <span class="author-span "> <a href="/author/tianyu-shi">Tianyu Shi</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/yiting-xie">Yiting Xie</a></span> </p> <p class="item-strip-abstract">This study presents RoleCraft-GLM, an innovative framework aimed at enhancing personalized role-playing with Large Language Models (LLMs).</p> <div class="sota"> </div> <p> <a href="/task/language-modelling"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000267-8df06634.jpg"> <span>Language Modelling</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 17</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/rolecraft-glm-advancing-personalized-role" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/rolecraft-glm-advancing-personalized-role#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2377003 --> <div class="col-lg-3 item-image-col"> <a href="/paper/skysense-a-multi-modal-remote-sensing"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2312.10115.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/skysense-a-multi-modal-remote-sensing">SkySense: A Multi-Modal Remote Sensing Foundation Model Towards Universal Interpretation for Earth Observation Imagery</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/skysense-a-multi-modal-remote-sensing#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2024-1"> CVPR 2024 </a> </span> • <span class="author-span "> <a href="/author/xin-guo">Xin Guo</a></span>, <span class="author-span "> <a href="/author/jiangwei-lao">Jiangwei Lao</a></span>, <span class="author-span "> <a href="/author/bo-dang">Bo Dang</a></span>, <span class="author-span "> <a href="/author/yingying-zhang">Yingying Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/lixiang-ru">Lixiang Ru</a></span>, <span class="author-span "> <a href="/author/liheng-zhong">Liheng Zhong</a></span>, <span class="author-span "> <a href="/author/ziyuan-huang">Ziyuan Huang</a></span>, <span class="author-span "> <a href="/author/kang-wu">Kang Wu</a></span>, <span class="author-span "> <a href="/author/dingxiang-hu">Dingxiang Hu</a></span>, <span class="author-span "> <a href="/author/huimei-he">Huimei He</a></span>, <span class="author-span "> <a href="/author/jian-wang">Jian Wang</a></span>, <span class="author-span "> <a href="/author/jingdong-chen">Jingdong Chen</a></span>, <span class="author-span "> <a href="/author/ming-yang">Ming Yang</a></span>, <span class="author-span "> <a href="/author/yongjun-zhang">Yongjun Zhang</a></span>, <span class="author-span "> <a href="/author/yansheng-li">Yansheng Li</a></span> </p> <p class="item-strip-abstract">Prior studies on Remote Sensing Foundation Model (RSFM) reveal immense potential towards a generic model for Earth Observation.</p> <div class="sota"> </div> <p> <a href="/task/contrastive-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/a2dca842-c6b6-4209-b2a8-dffeac2ef283.jpg"> <span>Contrastive Learning</span> </span> </a> <a href="/task/earth-observation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Earth Observation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/skysense-a-multi-modal-remote-sensing#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 970</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/skysense-a-multi-modal-remote-sensing" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/skysense-a-multi-modal-remote-sensing#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2234005 --> <div class="col-lg-3 item-image-col"> <a href="/paper/systematic-word-meta-sense-extension"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2311.13029.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/systematic-word-meta-sense-extension">Systematic word meta-sense extension</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/systematic-word-meta-sense-extension#code">1 code implementation</a> • <span class="author-name-text item-date-pub">21 Nov 2023</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">The meaning of polysemous words often varies in a highly productive yet predictable way.</p> <div class="sota"> </div> <p> <a href="/task/language-modelling"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000267-8df06634.jpg"> <span>Language Modelling</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 0</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/systematic-word-meta-sense-extension" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/systematic-word-meta-sense-extension#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2205710 --> <div class="col-lg-3 item-image-col"> <a href="/paper/crosszoom-simultaneously-motion-deblurring"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2309.16949.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/crosszoom-simultaneously-motion-deblurring">CrossZoom: Simultaneously Motion Deblurring and Event Super-Resolving</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/crosszoom-simultaneously-motion-deblurring#code">1 code implementation</a> • <span class="author-name-text item-date-pub">29 Sep 2023</span> • <span class="author-span "> <a href="/author/chi-zhang">Chi Zhang</a></span>, <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span "> <a href="/author/mingyuan-lin">Mingyuan Lin</a></span>, <span class="author-span "> <a href="/author/cheng-li">Cheng Li</a></span>, <span class="author-span "> <a href="/author/chu-he">Chu He</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Even though the collaboration between traditional and neuromorphic event cameras brings prosperity to frame-event based vision applications, the performance is still confined by the resolution gap crossing two modalities in both spatial and temporal domains.</p> <div class="sota"> </div> <p> <a href="/task/deblurring"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000014-82fe8b62.jpg"> <span>Deblurring</span> </span> </a> <a href="/task/event-based-vision"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/6eed1778-18b3-4c03-905a-c0fb68947541.jpg"> <span>Event-based vision</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 1</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/crosszoom-simultaneously-motion-deblurring" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/crosszoom-simultaneously-motion-deblurring#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-parallax-for-stereo-event-based"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2309.09513.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-parallax-for-stereo-event-based">Learning Parallax for Stereo Event-based Motion Deblurring</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-parallax-for-stereo-event-based#code">no code implementations</a> • <span class="author-name-text item-date-pub">18 Sep 2023</span> • <span class="author-span "> <a href="/author/mingyuan-lin">Mingyuan Lin</a></span>, <span class="author-span "> <a href="/author/chi-zhang">Chi Zhang</a></span>, <span class="author-span "> <a href="/author/chu-he">Chu He</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">To tackle this problem, we propose a novel coarse-to-fine framework, named NETwork of Event-based motion Deblurring with STereo event and intensity cameras (St-EDNet), to recover high-quality images directly from the misaligned inputs, consisting of a single blurry image and the concurrent event streams.</p> <div class="sota"> </div> <p> <a href="/task/deblurring"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000014-82fe8b62.jpg"> <span>Deblurring</span> </span> </a> <a href="/task/stereo-matching-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Stereo Matching</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-parallax-for-stereo-event-based" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-parallax-for-stereo-event-based#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2196441 --> <div class="col-lg-3 item-image-col"> <a href="/paper/circle-feature-graphormer-can-circle-features"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2309.06574.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/circle-feature-graphormer-can-circle-features">Circle Feature Graphormer: Can Circle Features Stimulate Graph Transformer?</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/circle-feature-graphormer-can-circle-features#code">1 code implementation</a> • <span class="author-name-text item-date-pub">11 Sep 2023</span> • <span class="author-span "> <a href="/author/jingsong-lv">Jingsong Lv</a></span>, <span class="author-span "> <a href="/author/hongyang-chen">Hongyang Chen</a></span>, <span class="author-span "> <a href="/author/yao-qi">Yao Qi</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Firstly, we define the first circle feature as modified swing for common graph, which comes from bipartite graph.</p> <div class="sota"> <p> <a href="/sota/link-property-prediction-on-ogbl-citation2"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/link-property-prediction-on-ogbl-citation2-small_7f3246d9.png"/> </a> Ranked #1 on <a class="sota-task" href="/sota/link-property-prediction-on-ogbl-citation2"> Link Property Prediction on ogbl-citation2 </a> </p> </div> <p> <a href="/task/link-prediction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000031-326cd034.jpg"> <span>Link Prediction</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 2</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/circle-feature-graphormer-can-circle-features" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/circle-feature-graphormer-can-circle-features#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/llama-reviewer-advancing-code-review"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2308.11148.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/llama-reviewer-advancing-code-review">LLaMA-Reviewer: Advancing Code Review Automation with Large Language Models through Parameter-Efficient Fine-Tuning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/llama-reviewer-advancing-code-review#code">no code implementations</a> • <span class="author-name-text item-date-pub">22 Aug 2023</span> • <span class="author-span "> <a href="/author/junyi-lu">Junyi Lu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/xiaojia-li">Xiaojia Li</a></span>, <span class="author-span "> <a href="/author/li-yang">Li Yang</a></span>, <span class="author-span "> <a href="/author/chun-zuo">Chun Zuo</a></span> </p> <p class="item-strip-abstract">In response to this research gap, we present LLaMA-Reviewer, an innovative framework that leverages the capabilities of LLaMA, a popular LLM, in the realm of code review.</p> <div class="sota"> </div> <p> <a href="/task/parameter-efficient-fine-tuning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>parameter-efficient fine-tuning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/llama-reviewer-advancing-code-review" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/llama-reviewer-advancing-code-review#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2178581 --> <div class="col-lg-3 item-image-col"> <a href="/paper/generalizing-event-based-motion-deblurring-in"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/ad2cd467-0e6a-4820-8fa0-28dc8617cae8.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/generalizing-event-based-motion-deblurring-in">Generalizing Event-Based Motion Deblurring in Real-World Scenarios</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/generalizing-event-based-motion-deblurring-in#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/iccv-2023-1"> ICCV 2023 </a> </span> • <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/jianzhuang-liu">Jianzhuang Liu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">Event-based motion deblurring has shown promising results by exploiting low-latency events.</p> <div class="sota"> </div> <p> <a href="/task/deblurring"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000014-82fe8b62.jpg"> <span>Deblurring</span> </span> </a> <a href="/task/self-supervised-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001882-b4b42454.jpg"> <span>Self-Supervised Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 32</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/generalizing-event-based-motion-deblurring-in" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/generalizing-event-based-motion-deblurring-in#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/video-frame-interpolation-with-stereo-event"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2307.08228.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/video-frame-interpolation-with-stereo-event">Video Frame Interpolation with Stereo Event and Intensity Camera</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/video-frame-interpolation-with-stereo-event#code">no code implementations</a> • <span class="author-name-text item-date-pub">17 Jul 2023</span> • <span class="author-span "> <a href="/author/chao-ding">Chao Ding</a></span>, <span class="author-span "> <a href="/author/mingyuan-lin">Mingyuan Lin</a></span>, <span class="author-span "> <a href="/author/haijian-zhang">Haijian Zhang</a></span>, <span class="author-span "> <a href="/author/jianzhuang-liu">Jianzhuang Liu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">The stereo event-intensity camera setup is widely applied to leverage the advantages of both event cameras with low latency and intensity cameras that capture accurate brightness and texture information.</p> <div class="sota"> </div> <p> <a href="/task/disparity-estimation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Disparity Estimation</span> </span> </a> <a href="/task/optical-flow-estimation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/7d284309-a745-4a6b-a71d-a81a6ec8f7fa.jpg"> <span>Optical Flow Estimation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/video-frame-interpolation-with-stereo-event#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/video-frame-interpolation-with-stereo-event" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/video-frame-interpolation-with-stereo-event#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2150778 --> <div class="col-lg-3 item-image-col"> <a href="/paper/word-sense-extension"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2306.05609.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/word-sense-extension">Word sense extension</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/word-sense-extension#code">1 code implementation</a> • <span class="author-name-text item-date-pub">9 Jun 2023</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/yang-xu">Yang Xu</a></span> </p> <p class="item-strip-abstract">Humans often make creative use of words to express novel senses.</p> <div class="sota"> </div> <p> <a href="/task/language-modelling"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000267-8df06634.jpg"> <span>Language Modelling</span> </span> </a> <a href="/task/word-sense-disambiguation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Word Sense Disambiguation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 6</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/word-sense-extension" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/word-sense-extension#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2157058 --> <div class="col-lg-3 item-image-col"> <a href="/paper/seeing-electric-network-frequency-from-events"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/6f8bfc80-cc9e-4d4b-b6b7-9961810fac12.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/seeing-electric-network-frequency-from-events">"Seeing'' Electric Network Frequency from Events</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/seeing-electric-network-frequency-from-events#code">1 code implementation</a> • <span class="author-name-text item-date-pub">4 May 2023</span> • <span class="author-span "> <a href="/author/lexuan-xu">Lexuan Xu</a></span>, <span class="author-span "> <a href="/author/guang-hua">Guang Hua</a></span>, <span class="author-span "> <a href="/author/haijian-zhang">Haijian Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/ning-qiao">Ning Qiao</a></span> </p> <p class="item-strip-abstract">Most of the artificial lights fluctuate in response to the grid's alternating current and exhibit subtle variations in terms of both intensity and spectrum, providing the potential to estimate the Electric Network Frequency (ENF) from conventional frame-based videos.</p> <div class="sota"> </div> <p> <a href="/task/enf-electric-network-frequency-extraction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>ENF (Electric Network Frequency) Extraction</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 9</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/seeing-electric-network-frequency-from-events" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/seeing-electric-network-frequency-from-events#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2121736 --> <div class="col-lg-3 item-image-col"> <a href="/paper/dynamic-coarse-to-fine-learning-for-oriented"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/71ddf40d-c4da-4743-b0d1-fde5c861d13b.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/dynamic-coarse-to-fine-learning-for-oriented">Dynamic Coarse-to-Fine Learning for Oriented Tiny Object Detection</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/dynamic-coarse-to-fine-learning-for-oriented#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2023-1"> CVPR 2023 </a> </span> • <span class="author-span "> <a href="/author/chang-xu">Chang Xu</a></span>, <span class="author-span "> <a href="/author/jian-ding">Jian Ding</a></span>, <span class="author-span "> <a href="/author/jinwang-wang">Jinwang Wang</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/huai-yu">Huai Yu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">Despite the exploration of adaptive label assignment in recent oriented object detectors, the extreme geometry shape and limited feature of oriented tiny objects still induce severe mismatch and imbalance issues.</p> <div class="sota"> <p> <a href="/sota/oriented-object-detection-on-dota-2-0"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/oriented-object-detection-on-dota-2-0-small_c2355e34.png"/> </a> Ranked #4 on <a href="/sota/oriented-object-detection-on-dota-2-0"> Oriented Object Detection on DOTA 2.0 </a> </p> </div> <p> <a href="/task/object-detection-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>object-detection</span> </span> </a> <a href="/task/object-detection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/b72ce63a-6ec5-4dc1-9d9c-f3a5c7d9629c.jpg"> <span>Object Detection</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/dynamic-coarse-to-fine-learning-for-oriented#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 106</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/dynamic-coarse-to-fine-learning-for-oriented" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/dynamic-coarse-to-fine-learning-for-oriented#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/dipnet-efficiency-distillation-and-iterative"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2304.07018.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/dipnet-efficiency-distillation-and-iterative">DIPNet: Efficiency Distillation and Iterative Pruning for Image Super-Resolution</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/dipnet-efficiency-distillation-and-iterative#code">no code implementations</a> • <span class="author-name-text item-date-pub">14 Apr 2023</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/xinpeng-li">Xinpeng Li</a></span>, <span class="author-span "> <a href="/author/youwei-li">Youwei Li</a></span>, <span class="author-span "> <a href="/author/ting-jiang">Ting Jiang</a></span>, <span class="author-span "> <a href="/author/qi-wu">Qi Wu</a></span>, <span class="author-span "> <a href="/author/haoqiang-fan">Haoqiang Fan</a></span>, <span class="author-span "> <a href="/author/shuaicheng-liu">Shuaicheng Liu</a></span> </p> <p class="item-strip-abstract">To address this issue, we propose a novel multi-stage lightweight network boosting method, which can enable lightweight networks to achieve outstanding performance.</p> <div class="sota"> </div> <p> <a href="/task/image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/87198f6e-219c-4651-ba73-c17f794d9a7e.jpg"> <span>Image Super-Resolution</span> </span> </a> <a href="/task/network-pruning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Network Pruning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/dipnet-efficiency-distillation-and-iterative" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/dipnet-efficiency-distillation-and-iterative#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/self-supervised-scene-dynamic-recovery-from"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2304.06930.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/self-supervised-scene-dynamic-recovery-from">Self-Supervised Scene Dynamic Recovery from Rolling Shutter Images and Events</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/self-supervised-scene-dynamic-recovery-from#code">no code implementations</a> • <span class="author-name-text item-date-pub">14 Apr 2023</span> • <span class="author-span "> <a href="/author/yangguang-wang">Yangguang Wang</a></span>, <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span "> <a href="/author/mingyuan-lin">Mingyuan Lin</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/boxin-shi">Boxin Shi</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">Scene Dynamic Recovery (SDR) by inverting distorted Rolling Shutter (RS) images to an undistorted high frame-rate Global Shutter (GS) video is a severely ill-posed problem due to the missing temporal dynamic information in both RS intra-frame scanlines and inter-frame exposures, particularly when prior knowledge about camera/object motions is unavailable.</p> <div class="sota"> </div> <p> <a href="/task/self-supervised-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001882-b4b42454.jpg"> <span>Self-Supervised Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/self-supervised-scene-dynamic-recovery-from" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/self-supervised-scene-dynamic-recovery-from#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/recovering-continuous-scene-dynamics-from-a"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2304.02695.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/recovering-continuous-scene-dynamics-from-a">Recovering Continuous Scene Dynamics from A Single Blurry Image with Events</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/recovering-continuous-scene-dynamics-from-a#code">no code implementations</a> • <span class="author-name-text item-date-pub">5 Apr 2023</span> • <span class="author-span "> <a href="/author/zhangyi-cheng">Zhangyi Cheng</a></span>, <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/jianzhuang-liu">Jianzhuang Liu</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">This paper aims at demystifying a single motion-blurred image with events and revealing temporally continuous scene dynamics encrypted behind motion blurs.</p> <div class="sota"> </div> <p> <a href="/task/image-restoration"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Restoration</span> </span> </a> <a href="/task/ssim"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>SSIM</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/recovering-continuous-scene-dynamics-from-a" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/recovering-continuous-scene-dynamics-from-a#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2095978 --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-to-super-resolve-blurry-images-with"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2302.13766.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-to-super-resolve-blurry-images-with">Learning to Super-Resolve Blurry Images with Events</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-to-super-resolve-blurry-images-with#code">1 code implementation</a> • <span class="author-name-text item-date-pub">27 Feb 2023</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/bishan-wang">Bishan Wang</a></span>, <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span "> <a href="/author/haijian-zhang">Haijian Zhang</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/jianzhuang-liu">Jianzhuang Liu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">Super-Resolution from a single motion Blurred image (SRB) is a severely ill-posed problem due to the joint degradation of motion blurs and low spatial resolution.</p> <div class="sota"> </div> <p> <a href="/task/sparse-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Sparse Learning</span> </span> </a> <a href="/task/super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000032-0f0cf3b2.jpg"> <span>Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 16</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-to-super-resolve-blurry-images-with" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-to-super-resolve-blurry-images-with#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2090040 --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-reparameterized-discrete-diffusion-model"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2302.05737.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-reparameterized-discrete-diffusion-model">A Reparameterized Discrete Diffusion Model for Text Generation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-reparameterized-discrete-diffusion-model#code">1 code implementation</a> • <span class="author-name-text item-date-pub">11 Feb 2023</span> • <span class="author-span "> <a href="/author/lin-zheng">Lin Zheng</a></span>, <span class="author-span "> <a href="/author/jianbo-yuan">Jianbo Yuan</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/lingpeng-kong">Lingpeng Kong</a></span> </p> <p class="item-strip-abstract">This work studies discrete diffusion probabilistic models with applications to natural language generation.</p> <div class="sota"> </div> <p> <a href="/task/text-generation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/2a04ff75-1032-4f9f-911b-f7198faeb6f5.jpg"> <span>Text Generation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 91</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-reparameterized-discrete-diffusion-model" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-reparameterized-discrete-diffusion-model#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-comparison-of-decision-forest-inference"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2302.04430.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-comparison-of-decision-forest-inference">A Comparison of Decision Forest Inference Platforms from A Database Perspective</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-comparison-of-decision-forest-inference#code">no code implementations</a> • <span class="author-name-text item-date-pub">9 Feb 2023</span> • <span class="author-span "> <a href="/author/hong-guan">Hong Guan</a></span>, <span class="author-span "> <a href="/author/mahidhar-reddy-dwarampudi">Mahidhar Reddy Dwarampudi</a></span>, <span class="author-span "> <a href="/author/venkatesh-gunda">Venkatesh Gunda</a></span>, <span class="author-span "> <a href="/author/hong-min">Hong Min</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/jia-zou">Jia Zou</a></span> </p> <p class="item-strip-abstract">In addition, the relation-centric representation we proposed significantly improved netsDB's performance in handling large-scale models, while the model reuse optimization we proposed further improved netsDB's performance in handling small-scale datasets.</p> <div class="sota"> </div> <p> <a href="/task/fraud-detection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Fraud Detection</span> </span> </a> <a href="/task/management"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Management</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-comparison-of-decision-forest-inference" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-comparison-of-decision-forest-inference#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/high-level-semantic-feature-matters-few-shot"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2301.01956.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/high-level-semantic-feature-matters-few-shot">High-level semantic feature matters few-shot unsupervised domain adaptation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/high-level-semantic-feature-matters-few-shot#code">no code implementations</a> • <span class="author-name-text item-date-pub">5 Jan 2023</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/wanqi-yang">Wanqi Yang</a></span>, <span class="author-span "> <a href="/author/shengqi-huang">Shengqi Huang</a></span>, <span class="author-span "> <a href="/author/lei-wang">Lei Wang</a></span>, <span class="author-span "> <a href="/author/ming-yang">Ming Yang</a></span> </p> <p class="item-strip-abstract">However, the goal of FS-UDA and FSL are relevant yet distinct, since FS-UDA aims to classify the samples in target domain rather than source domain.</p> <div class="sota"> </div> <p> <a href="/task/few-shot-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Few-Shot Learning</span> </span> </a> <a href="/task/unsupervised-domain-adaptation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Unsupervised Domain Adaptation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/high-level-semantic-feature-matters-few-shot#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/high-level-semantic-feature-matters-few-shot" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/high-level-semantic-feature-matters-few-shot#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2135306 --> <div class="col-lg-3 item-image-col"> <a href="/paper/seeing-electric-network-frequency-from-events-1"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1210278.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/seeing-electric-network-frequency-from-events-1">"Seeing" Electric Network Frequency From Events</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/seeing-electric-network-frequency-from-events-1#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2023-1"> CVPR 2023 </a> </span> • <span class="author-span "> <a href="/author/lexuan-xu">Lexuan Xu</a></span>, <span class="author-span "> <a href="/author/guang-hua">Guang Hua</a></span>, <span class="author-span "> <a href="/author/haijian-zhang">Haijian Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/ning-qiao">Ning Qiao</a></span> </p> <p class="item-strip-abstract">Most of the artificial lights fluctuate in response to the grid's alternating current and exhibit subtle variations in terms of both intensity and spectrum, providing the potential to estimate the Electric Network Frequency (ENF) from conventional frame-based videos.</p> <div class="sota"> </div> <p> <a href="/task/enf-electric-network-frequency-extraction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>ENF (Electric Network Frequency) Extraction</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 9</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/seeing-electric-network-frequency-from-events-1" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/seeing-electric-network-frequency-from-events-1#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2070616 --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-from-training-dynamics-identifying"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2212.09321.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-from-training-dynamics-identifying">Learning from Training Dynamics: Identifying Mislabeled Data Beyond Manually Designed Features</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-from-training-dynamics-identifying#code">1 code implementation</a> • <span class="author-name-text item-date-pub">19 Dec 2022</span> • <span class="author-span "> <a href="/author/qingrui-jia">Qingrui Jia</a></span>, <span class="author-span "> <a href="/author/xuhong-li-1">Xuhong LI</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/jiang-bian">Jiang Bian</a></span>, <span class="author-span "> <a href="/author/penghao-zhao">Penghao Zhao</a></span>, <span class="author-span "> <a href="/author/shupeng-li">Shupeng Li</a></span>, <span class="author-span "> <a href="/author/haoyi-xiong">Haoyi Xiong</a></span>, <span class="author-span "> <a href="/author/dejing-dou">Dejing Dou</a></span> </p> <p class="item-strip-abstract">While mislabeled or ambiguously-labeled samples in the training set could negatively affect the performance of deep models, diagnosing the dataset and identifying mislabeled samples helps to improve the generalization power.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 5</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-from-training-dynamics-identifying" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-from-training-dynamics-identifying#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-natural-bias-for-language-generation-models"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2212.09686.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-natural-bias-for-language-generation-models">A Natural Bias for Language Generation Models</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-natural-bias-for-language-generation-models#code">no code implementations</a> • <span class="author-name-text item-date-pub">19 Dec 2022</span> • <span class="author-span "> <a href="/author/clara-meister">Clara Meister</a></span>, <span class="author-span "> <a href="/author/wojciech-stokowiec">Wojciech Stokowiec</a></span>, <span class="author-span "> <a href="/author/tiago-pimentel">Tiago Pimentel</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/laura-rimell">Laura Rimell</a></span>, <span class="author-span "> <a href="/author/adhiguna-kuncoro">Adhiguna Kuncoro</a></span> </p> <p class="item-strip-abstract">After just a few hundred training updates, a standard probabilistic model for language generation has likely not yet learnt many semantic or syntactic rules of natural language, making it difficult to estimate the probability distribution over next tokens.</p> <div class="sota"> </div> <p> <a href="/task/machine-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000257-2b560008_M7RFnV9.jpg"> <span>Machine Translation</span> </span> </a> <a href="/task/text-generation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/2a04ff75-1032-4f9f-911b-f7198faeb6f5.jpg"> <span>Text Generation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-natural-bias-for-language-generation-models" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-natural-bias-for-language-generation-models#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/biomedical-image-analysis-competitions-the"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2212.08568.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/biomedical-image-analysis-competitions-the">Biomedical image analysis competitions: The state of current participation practice</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/biomedical-image-analysis-competitions-the#code">no code implementations</a> • <span class="author-name-text item-date-pub">16 Dec 2022</span> • <span class="author-span "> <a href="/author/matthias-eisenmann">Matthias Eisenmann</a></span>, <span class="author-span "> <a href="/author/annika-reinke">Annika Reinke</a></span>, <span class="author-span "> <a href="/author/vivienn-weru">Vivienn Weru</a></span>, <span class="author-span "> <a href="/author/minu-dietlinde-tizabi">Minu Dietlinde Tizabi</a></span>, <span class="author-span "> <a href="/author/fabian-isensee">Fabian Isensee</a></span>, <span class="author-span "> <a href="/author/tim-j-adler">Tim J. Adler</a></span>, <span class="author-span "> <a href="/author/patrick-godau">Patrick Godau</a></span>, <span class="author-span "> <a href="/author/veronika-cheplygina">Veronika Cheplygina</a></span>, <span class="author-span "> <a href="/author/michal-kozubek">Michal Kozubek</a></span>, <span class="author-span "> <a href="/author/sharib-ali">Sharib Ali</a></span>, <span class="author-span "> <a href="/author/anubha-gupta">Anubha Gupta</a></span>, <span class="author-span "> <a href="/author/jan-kybic">Jan Kybic</a></span>, <span class="author-span "> <a href="/author/alison-noble">Alison Noble</a></span>, <span class="author-span "> <a href="/author/carlos-ortiz-de-solorzano-2">Carlos Ortiz de Solórzano</a></span>, <span class="author-span "> <a href="/author/samiksha-pachade">Samiksha Pachade</a></span>, <span class="author-span "> <a href="/author/caroline-petitjean">Caroline Petitjean</a></span>, <span class="author-span "> <a href="/author/daniel-sage">Daniel Sage</a></span>, <span class="author-span "> <a href="/author/donglai-wei">Donglai Wei</a></span>, <span class="author-span "> <a href="/author/elizabeth-wilden">Elizabeth Wilden</a></span>, <span class="author-span "> <a href="/author/deepak-alapatt">Deepak Alapatt</a></span>, <span class="author-span "> <a href="/author/vincent-andrearczyk">Vincent Andrearczyk</a></span>, <span class="author-span "> <a href="/author/ujjwal-baid">Ujjwal Baid</a></span>, <span class="author-span "> <a href="/author/spyridon-bakas">Spyridon Bakas</a></span>, <span class="author-span "> <a href="/author/niranjan-balu">Niranjan Balu</a></span>, <span class="author-span "> <a href="/author/sophia-bano">Sophia Bano</a></span>, <span class="author-span "> <a href="/author/vivek-singh-bawa">Vivek Singh Bawa</a></span>, <span class="author-span "> <a href="/author/jorge-bernal">Jorge Bernal</a></span>, <span class="author-span "> <a href="/author/sebastian-bodenstedt">Sebastian Bodenstedt</a></span>, <span class="author-span "> <a href="/author/alessandro-casella">Alessandro Casella</a></span>, <span class="author-span "> <a href="/author/jinwook-choi">Jinwook Choi</a></span>, <span class="author-span "> <a href="/author/olivier-commowick">Olivier Commowick</a></span>, <span class="author-span "> <a href="/author/marie-daum">Marie Daum</a></span>, <span class="author-span "> <a href="/author/adrien-depeursinge">Adrien Depeursinge</a></span>, <span class="author-span "> <a href="/author/reuben-dorent">Reuben Dorent</a></span>, <span class="author-span "> <a href="/author/jan-egger">Jan Egger</a></span>, <span class="author-span "> <a href="/author/hannah-eichhorn">Hannah Eichhorn</a></span>, <span class="author-span "> <a href="/author/sandy-engelhardt">Sandy Engelhardt</a></span>, <span class="author-span "> <a href="/author/melanie-ganz">Melanie Ganz</a></span>, <span class="author-span "> <a href="/author/gabriel-girard">Gabriel Girard</a></span>, <span class="author-span "> <a href="/author/lasse-hansen">Lasse Hansen</a></span>, <span class="author-span "> <a href="/author/mattias-heinrich">Mattias Heinrich</a></span>, <span class="author-span "> <a href="/author/nicholas-heller">Nicholas Heller</a></span>, <span class="author-span "> <a href="/author/alessa-hering">Alessa Hering</a></span>, <span class="author-span "> <a href="/author/arnaud-huaulme">Arnaud Huaulmé</a></span>, <span class="author-span "> <a href="/author/hyunjeong-kim">Hyunjeong Kim</a></span>, <span class="author-span "> <a href="/author/bennett-landman">Bennett Landman</a></span>, <span class="author-span "> <a href="/author/hongwei-bran-li">Hongwei Bran Li</a></span>, <span class="author-span "> <a href="/author/jianning-li">Jianning Li</a></span>, <span class="author-span "> <a href="/author/jun-ma">Jun Ma</a></span>, <span class="author-span "> <a href="/author/anne-martel">Anne Martel</a></span>, <span class="author-span "> <a href="/author/carlos-martin-isla">Carlos Martín-Isla</a></span>, <span class="author-span "> <a href="/author/bjoern-menze">Bjoern Menze</a></span>, <span class="author-span "> <a href="/author/chinedu-innocent-nwoye">Chinedu Innocent Nwoye</a></span>, <span class="author-span "> <a href="/author/valentin-oreiller">Valentin Oreiller</a></span>, <span class="author-span "> <a href="/author/nicolas-padoy">Nicolas Padoy</a></span>, <span class="author-span "> <a href="/author/sarthak-pati">Sarthak Pati</a></span>, <span class="author-span "> <a href="/author/kelly-payette">Kelly Payette</a></span>, <span class="author-span "> <a href="/author/carole-sudre">Carole Sudre</a></span>, <span class="author-span "> <a href="/author/kimberlin-van-wijnen">Kimberlin Van Wijnen</a></span>, <span class="author-span "> <a href="/author/armine-vardazaryan">Armine Vardazaryan</a></span>, <span class="author-span "> <a href="/author/tom-vercauteren">Tom Vercauteren</a></span>, <span class="author-span "> <a href="/author/martin-wagner">Martin Wagner</a></span>, <span class="author-span "> <a href="/author/chuanbo-wang">Chuanbo Wang</a></span>, <span class="author-span "> <a href="/author/moi-hoon-yap">Moi Hoon Yap</a></span>, <span class="author-span "> <a href="/author/zeyun-yu">Zeyun Yu</a></span>, <span class="author-span "> <a href="/author/chun-yuan">Chun Yuan</a></span>, <span class="author-span "> <a href="/author/maximilian-zenk">Maximilian Zenk</a></span>, <span class="author-span "> <a href="/author/aneeq-zia">Aneeq Zia</a></span>, <span class="author-span "> <a href="/author/david-zimmerer">David Zimmerer</a></span>, <span class="author-span "> <a href="/author/rina-bao">Rina Bao</a></span>, <span class="author-span "> <a href="/author/chanyeol-choi">Chanyeol Choi</a></span>, <span class="author-span "> <a href="/author/andrew-cohen">Andrew Cohen</a></span>, <span class="author-span "> <a href="/author/oleh-dzyubachyk">Oleh Dzyubachyk</a></span>, <span class="author-span "> <a href="/author/adrian-galdran">Adrian Galdran</a></span>, <span class="author-span "> <a href="/author/tianyuan-gan">Tianyuan Gan</a></span>, <span class="author-span "> <a href="/author/tianqi-guo">Tianqi Guo</a></span>, <span class="author-span "> <a href="/author/pradyumna-gupta">Pradyumna Gupta</a></span>, <span class="author-span "> <a href="/author/mahmood-haithami">Mahmood Haithami</a></span>, <span class="author-span "> <a href="/author/edward-ho">Edward Ho</a></span>, <span class="author-span "> <a href="/author/ikbeom-jang">Ikbeom Jang</a></span>, <span class="author-span "> <a href="/author/zhili-li">Zhili Li</a></span>, <span class="author-span "> <a href="/author/zhengbo-luo">Zhengbo Luo</a></span>, <span class="author-span "> <a href="/author/filip-lux">Filip Lux</a></span>, <span class="author-span "> <a href="/author/sokratis-makrogiannis">Sokratis Makrogiannis</a></span>, <span class="author-span "> <a href="/author/dominik-muller">Dominik Müller</a></span>, <span class="author-span "> <a href="/author/young-tack-oh">Young-tack Oh</a></span>, <span class="author-span "> <a href="/author/subeen-pang">Subeen Pang</a></span>, <span class="author-span "> <a href="/author/constantin-pape">Constantin Pape</a></span>, <span class="author-span "> <a href="/author/gorkem-polat">Gorkem Polat</a></span>, <span class="author-span "> <a href="/author/charlotte-rosalie-reed">Charlotte Rosalie Reed</a></span>, <span class="author-span "> <a href="/author/kanghyun-ryu">Kanghyun Ryu</a></span>, <span class="author-span "> <a href="/author/tim-scherr">Tim Scherr</a></span>, <span class="author-span "> <a href="/author/vajira-thambawita">Vajira Thambawita</a></span>, <span class="author-span "> <a href="/author/haoyu-wang">Haoyu Wang</a></span>, <span class="author-span "> <a href="/author/xinliang-wang">Xinliang Wang</a></span>, <span class="author-span "> <a href="/author/kele-xu">Kele Xu</a></span>, <span class="author-span "> <a href="/author/hung-yeh">Hung Yeh</a></span>, <span class="author-span "> <a href="/author/doyeob-yeo">Doyeob Yeo</a></span>, <span class="author-span "> <a href="/author/yixuan-yuan">Yixuan Yuan</a></span>, <span class="author-span "> <a href="/author/yan-zeng">Yan Zeng</a></span>, <span class="author-span "> <a href="/author/xin-zhao">Xin Zhao</a></span>, <span class="author-span "> <a href="/author/julian-abbing">Julian Abbing</a></span>, <span class="author-span "> <a href="/author/jannes-adam">Jannes Adam</a></span>, <span class="author-span "> <a href="/author/nagesh-adluru">Nagesh Adluru</a></span>, <span class="author-span "> <a href="/author/niklas-agethen">Niklas Agethen</a></span>, <span class="author-span "> <a href="/author/salman-ahmed">Salman Ahmed</a></span>, <span class="author-span "> <a href="/author/yasmina-al-khalil">Yasmina Al Khalil</a></span>, <span class="author-span "> <a href="/author/mireia-alenya">Mireia Alenyà</a></span>, <span class="author-span "> <a href="/author/esa-alhoniemi">Esa Alhoniemi</a></span>, <span class="author-span "> <a href="/author/chengyang-an">Chengyang An</a></span>, <span class="author-span "> <a href="/author/talha-anwar">Talha Anwar</a></span>, <span class="author-span "> <a href="/author/tewodros-weldebirhan-arega">Tewodros Weldebirhan Arega</a></span>, <span class="author-span "> <a href="/author/netanell-avisdris">Netanell Avisdris</a></span>, <span class="author-span "> <a href="/author/dogu-baran-aydogan">Dogu Baran Aydogan</a></span>, <span class="author-span "> <a href="/author/yingbin-bai">Yingbin Bai</a></span>, <span class="author-span "> <a href="/author/maria-baldeon-calisto">Maria Baldeon Calisto</a></span>, <span class="author-span "> <a href="/author/berke-doga-basaran">Berke Doga Basaran</a></span>, <span class="author-span "> <a href="/author/marcel-beetz">Marcel Beetz</a></span>, <span class="author-span "> <a href="/author/cheng-bian">Cheng Bian</a></span>, <span class="author-span "> <a href="/author/hao-bian">Hao Bian</a></span>, <span class="author-span "> <a href="/author/kevin-blansit">Kevin Blansit</a></span>, <span class="author-span "> <a href="/author/louise-bloch">Louise Bloch</a></span>, <span class="author-span "> <a href="/author/robert-bohnsack">Robert Bohnsack</a></span>, <span class="author-span "> <a href="/author/sara-bosticardo">Sara Bosticardo</a></span>, <span class="author-span "> <a href="/author/jack-breen">Jack Breen</a></span>, <span class="author-span "> <a href="/author/mikael-brudfors">Mikael Brudfors</a></span>, <span class="author-span "> <a href="/author/raphael-brungel-1">Raphael Brüngel</a></span>, <span class="author-span "> <a href="/author/mariano-cabezas">Mariano Cabezas</a></span>, <span class="author-span "> <a href="/author/alberto-cacciola">Alberto Cacciola</a></span>, <span class="author-span "> <a href="/author/zhiwei-chen">Zhiwei Chen</a></span>, <span class="author-span "> <a href="/author/yucong-chen">Yucong Chen</a></span>, <span class="author-span "> <a href="/author/daniel-tianming-chen">Daniel Tianming Chen</a></span>, <span class="author-span "> <a href="/author/minjeong-cho">Minjeong Cho</a></span>, <span class="author-span "> <a href="/author/min-kook-choi">Min-Kook Choi</a></span>, <span class="author-span "> <a href="/author/chuantao-xie-chuantao-xie">Chuantao Xie Chuantao Xie</a></span>, <span class="author-span "> <a href="/author/dana-cobzas">Dana Cobzas</a></span>, <span class="author-span "> <a href="/author/julien-cohen-adad">Julien Cohen-Adad</a></span>, <span class="author-span "> <a href="/author/jorge-corral-acero">Jorge Corral Acero</a></span>, <span class="author-span "> <a href="/author/sujit-kumar-das">Sujit Kumar Das</a></span>, <span class="author-span "> <a href="/author/marcela-de-oliveira">Marcela de Oliveira</a></span>, <span class="author-span "> <a href="/author/hanqiu-deng">Hanqiu Deng</a></span>, <span class="author-span "> <a href="/author/guiming-dong">Guiming Dong</a></span>, <span class="author-span "> <a href="/author/lars-doorenbos">Lars Doorenbos</a></span>, <span class="author-span "> <a href="/author/cory-efird">Cory Efird</a></span>, <span class="author-span "> <a href="/author/sergio-escalera">Sergio Escalera</a></span>, <span class="author-span "> <a href="/author/di-fan">Di Fan</a></span>, <span class="author-span "> <a href="/author/mehdi-fatan-serj">Mehdi Fatan Serj</a></span>, <span class="author-span "> <a href="/author/alexandre-fenneteau">Alexandre Fenneteau</a></span>, <span class="author-span "> <a href="/author/lucas-fidon">Lucas Fidon</a></span>, <span class="author-span "> <a href="/author/patryk-filipiak">Patryk Filipiak</a></span>, <span class="author-span "> <a href="/author/rene-finzel">René Finzel</a></span>, <span class="author-span "> <a href="/author/nuno-r-freitas">Nuno R. Freitas</a></span>, <span class="author-span "> <a href="/author/christoph-m-friedrich">Christoph M. Friedrich</a></span>, <span class="author-span "> <a href="/author/mitchell-fulton">Mitchell Fulton</a></span>, <span class="author-span "> <a href="/author/finn-gaida">Finn Gaida</a></span>, <span class="author-span "> <a href="/author/francesco-galati">Francesco Galati</a></span>, <span class="author-span "> <a href="/author/christoforos-galazis">Christoforos Galazis</a></span>, <span class="author-span "> <a href="/author/chang-hee-gan">Chang Hee Gan</a></span>, <span class="author-span "> <a href="/author/zheyao-gao">Zheyao Gao</a></span>, <span class="author-span "> <a href="/author/shengbo-gao">Shengbo Gao</a></span>, <span class="author-span "> <a href="/author/matej-gazda">Matej Gazda</a></span>, <span class="author-span "> <a href="/author/beerend-gerats">Beerend Gerats</a></span>, <span class="author-span "> <a href="/author/neil-getty">Neil Getty</a></span>, <span class="author-span "> <a href="/author/adam-gibicar">Adam Gibicar</a></span>, <span class="author-span "> <a href="/author/ryan-gifford">Ryan Gifford</a></span>, <span class="author-span "> <a href="/author/sajan-gohil">Sajan Gohil</a></span>, <span class="author-span "> <a href="/author/maria-grammatikopoulou">Maria Grammatikopoulou</a></span>, <span class="author-span "> <a href="/author/daniel-grzech">Daniel Grzech</a></span>, <span class="author-span "> <a href="/author/orhun-guley-1">Orhun Güley</a></span>, <span class="author-span "> <a href="/author/timo-gunnemann">Timo Günnemann</a></span>, <span class="author-span "> <a href="/author/chunxu-guo">Chunxu Guo</a></span>, <span class="author-span "> <a href="/author/sylvain-guy">Sylvain Guy</a></span>, <span class="author-span "> <a href="/author/heonjin-ha">Heonjin Ha</a></span>, <span class="author-span "> <a href="/author/luyi-han">Luyi Han</a></span>, <span class="author-span "> <a href="/author/il-song-han">Il Song Han</a></span>, <span class="author-span "> <a href="/author/ali-hatamizadeh">Ali Hatamizadeh</a></span>, <span class="author-span "> <a href="/author/tian-he">Tian He</a></span>, <span class="author-span "> <a href="/author/jimin-heo">Jimin Heo</a></span>, <span class="author-span "> <a href="/author/sebastian-hitziger">Sebastian Hitziger</a></span>, <span class="author-span "> <a href="/author/seulgi-hong">SeulGi Hong</a></span>, <span class="author-span "> <a href="/author/seungbum-hong">Seungbum Hong</a></span>, <span class="author-span "> <a href="/author/rian-huang">Rian Huang</a></span>, <span class="author-span "> <a href="/author/ziyan-huang">Ziyan Huang</a></span>, <span class="author-span "> <a href="/author/markus-huellebrand">Markus Huellebrand</a></span>, <span class="author-span "> <a href="/author/stephan-huschauer">Stephan Huschauer</a></span>, <span class="author-span "> <a href="/author/mustaffa-hussain">Mustaffa Hussain</a></span>, <span class="author-span "> <a href="/author/tomoo-inubushi">Tomoo Inubushi</a></span>, <span class="author-span "> <a href="/author/ece-isik-polat-1">Ece Isik Polat</a></span>, <span class="author-span "> <a href="/author/mojtaba-jafaritadi">Mojtaba Jafaritadi</a></span>, <span class="author-span "> <a href="/author/seonghun-jeong">SeongHun Jeong</a></span>, <span class="author-span "> <a href="/author/bailiang-jian">Bailiang Jian</a></span>, <span class="author-span "> <a href="/author/yuanhong-jiang">Yuanhong Jiang</a></span>, <span class="author-span "> <a href="/author/zhifan-jiang">Zhifan Jiang</a></span>, <span class="author-span "> <a href="/author/yueming-jin">Yueming Jin</a></span>, <span class="author-span "> <a href="/author/smriti-joshi">Smriti Joshi</a></span>, <span class="author-span "> <a href="/author/abdolrahim-kadkhodamohammadi">Abdolrahim Kadkhodamohammadi</a></span>, <span class="author-span "> <a href="/author/reda-abdellah-kamraoui">Reda Abdellah Kamraoui</a></span>, <span class="author-span "> <a href="/author/inha-kang">Inha Kang</a></span>, <span class="author-span "> <a href="/author/junghwa-kang">Junghwa Kang</a></span>, <span class="author-span "> <a href="/author/davood-karimi">Davood Karimi</a></span>, <span class="author-span "> <a href="/author/april-khademi">April Khademi</a></span>, <span class="author-span "> <a href="/author/muhammad-irfan-khan">Muhammad Irfan Khan</a></span>, <span class="author-span "> <a href="/author/suleiman-a-khan-1">Suleiman A. Khan</a></span>, <span class="author-span "> <a href="/author/rishab-khantwal">Rishab Khantwal</a></span>, <span class="author-span "> <a href="/author/kwang-ju-kim">Kwang-Ju Kim</a></span>, <span class="author-span "> <a href="/author/timothy-kline">Timothy Kline</a></span>, <span class="author-span "> <a href="/author/satoshi-kondo">Satoshi Kondo</a></span>, <span class="author-span "> <a href="/author/elina-kontio">Elina Kontio</a></span>, <span class="author-span "> <a href="/author/adrian-krenzer">Adrian Krenzer</a></span>, <span class="author-span "> <a href="/author/artem-kroviakov">Artem Kroviakov</a></span>, <span class="author-span "> <a href="/author/hugo-kuijf">Hugo Kuijf</a></span>, <span class="author-span "> <a href="/author/satyadwyoom-kumar">Satyadwyoom Kumar</a></span>, <span class="author-span "> <a href="/author/francesco-la-rosa">Francesco La Rosa</a></span>, <span class="author-span "> <a href="/author/abhi-lad">Abhi Lad</a></span>, <span class="author-span "> <a href="/author/doohee-lee">Doohee Lee</a></span>, <span class="author-span "> <a href="/author/minho-lee">Minho Lee</a></span>, <span class="author-span "> <a href="/author/chiara-lena">Chiara Lena</a></span>, <span class="author-span "> <a href="/author/hao-li">Hao Li</a></span>, <span class="author-span "> <a href="/author/ling-li">Ling Li</a></span>, <span class="author-span "> <a href="/author/xingyu-li">Xingyu Li</a></span>, <span class="author-span "> <a href="/author/fuyuan-liao">Fuyuan Liao</a></span>, <span class="author-span "> <a href="/author/kuanlun-liao">Kuanlun Liao</a></span>, <span class="author-span "> <a href="/author/arlindo-limede-oliveira">Arlindo Limede Oliveira</a></span>, <span class="author-span "> <a href="/author/chaonan-lin">Chaonan Lin</a></span>, <span class="author-span "> <a href="/author/shan-lin">Shan Lin</a></span>, <span class="author-span "> <a href="/author/akis-linardos">Akis Linardos</a></span>, <span class="author-span "> <a href="/author/marius-george-linguraru">Marius George Linguraru</a></span>, <span class="author-span "> <a href="/author/han-liu">Han Liu</a></span>, <span class="author-span "> <a href="/author/tao-liu">Tao Liu</a></span>, <span class="author-span "> <a href="/author/di-liu">Di Liu</a></span>, <span class="author-span "> <a href="/author/yanling-liu">Yanling Liu</a></span>, <span class="author-span "> <a href="/author/joao-lourenco-silva-1">João Lourenço-Silva</a></span>, <span class="author-span "> <a href="/author/jingpei-lu">Jingpei Lu</a></span>, <span class="author-span "> <a href="/author/jiangshan-lu">Jiangshan Lu</a></span>, <span class="author-span "> <a href="/author/imanol-luengo">Imanol Luengo</a></span>, <span class="author-span "> <a href="/author/christina-b-lund">Christina B. Lund</a></span>, <span class="author-span "> <a href="/author/huan-minh-luu">Huan Minh Luu</a></span>, <span class="author-span "> <a href="/author/yi-lv">Yi Lv</a></span>, <span class="author-span "> <a href="/author/uzay-macar">Uzay Macar</a></span>, <span class="author-span "> <a href="/author/leon-maechler">Leon Maechler</a></span>, <span class="author-span "> <a href="/author/sina-mansour-l">Sina Mansour L.</a></span>, <span class="author-span "> <a href="/author/kenji-marshall">Kenji Marshall</a></span>, <span class="author-span "> <a href="/author/moona-mazher">Moona Mazher</a></span>, <span class="author-span "> <a href="/author/richard-mckinley">Richard McKinley</a></span>, <span class="author-span "> <a href="/author/alfonso-medela">Alfonso Medela</a></span>, <span class="author-span "> <a href="/author/felix-meissen">Felix Meissen</a></span>, <span class="author-span "> <a href="/author/mingyuan-meng">Mingyuan Meng</a></span>, <span class="author-span "> <a href="/author/dylan-miller">Dylan Miller</a></span>, <span class="author-span "> <a href="/author/seyed-hossein-mirjahanmardi">Seyed Hossein Mirjahanmardi</a></span>, <span class="author-span "> <a href="/author/arnab-mishra">Arnab Mishra</a></span>, <span class="author-span "> <a href="/author/samir-mitha">Samir Mitha</a></span>, <span class="author-span "> <a href="/author/hassan-mohy-ud-din">Hassan Mohy-ud-Din</a></span>, <span class="author-span "> <a href="/author/tony-chi-wing-mok">Tony Chi Wing Mok</a></span>, <span class="author-span "> <a href="/author/gowtham-krishnan-murugesan">Gowtham Krishnan Murugesan</a></span>, <span class="author-span "> <a href="/author/enamundram-naga-karthik">Enamundram Naga Karthik</a></span>, <span class="author-span "> <a href="/author/sahil-nalawade">Sahil Nalawade</a></span>, <span class="author-span "> <a href="/author/jakub-nalepa">Jakub Nalepa</a></span>, <span class="author-span "> <a href="/author/mohamed-naser">Mohamed Naser</a></span>, <span class="author-span "> <a href="/author/ramin-nateghi">Ramin Nateghi</a></span>, <span class="author-span "> <a href="/author/hammad-naveed">Hammad Naveed</a></span>, <span class="author-span "> <a href="/author/quang-minh-nguyen-1">Quang-Minh Nguyen</a></span>, <span class="author-span "> <a href="/author/cuong-nguyen-quoc">Cuong Nguyen Quoc</a></span>, <span class="author-span "> <a href="/author/brennan-nichyporuk">Brennan Nichyporuk</a></span>, <span class="author-span "> <a href="/author/bruno-oliveira">Bruno Oliveira</a></span>, <span class="author-span "> <a href="/author/david-owen">David Owen</a></span>, <span class="author-span "> <a href="/author/jimut-bahan-pal">Jimut Bahan Pal</a></span>, <span class="author-span "> <a href="/author/junwen-pan">Junwen Pan</a></span>, <span class="author-span "> <a href="/author/wentao-pan">Wentao Pan</a></span>, <span class="author-span "> <a href="/author/winnie-pang">Winnie Pang</a></span>, <span class="author-span "> <a href="/author/bogyu-park">Bogyu Park</a></span>, <span class="author-span "> <a href="/author/vivek-pawar">Vivek Pawar</a></span>, <span class="author-span "> <a href="/author/kamlesh-pawar">Kamlesh Pawar</a></span>, <span class="author-span "> <a href="/author/michael-peven">Michael Peven</a></span>, <span class="author-span "> <a href="/author/lena-philipp">Lena Philipp</a></span>, <span class="author-span "> <a href="/author/tomasz-pieciak">Tomasz Pieciak</a></span>, <span class="author-span "> <a href="/author/szymon-plotka-1">Szymon Plotka</a></span>, <span class="author-span "> <a href="/author/marcel-plutat">Marcel Plutat</a></span>, <span class="author-span "> <a href="/author/fattaneh-pourakpour">Fattaneh Pourakpour</a></span>, <span class="author-span "> <a href="/author/domen-preloznik">Domen Preložnik</a></span>, <span class="author-span "> <a href="/author/kumaradevan-punithakumar">Kumaradevan Punithakumar</a></span>, <span class="author-span "> <a href="/author/abdul-qayyum">Abdul Qayyum</a></span>, <span class="author-span "> <a href="/author/sandro-queiros">Sandro Queirós</a></span>, <span class="author-span "> <a href="/author/arman-rahmim">Arman Rahmim</a></span>, <span class="author-span "> <a href="/author/salar-razavi">Salar Razavi</a></span>, <span class="author-span "> <a href="/author/jintao-ren">Jintao Ren</a></span>, <span class="author-span "> <a href="/author/mina-rezaei">Mina Rezaei</a></span>, <span class="author-span "> <a href="/author/jonathan-adam-rico">Jonathan Adam Rico</a></span>, <span class="author-span "> <a href="/author/zunhyan-rieu">ZunHyan Rieu</a></span>, <span class="author-span "> <a href="/author/markus-rink">Markus Rink</a></span>, <span class="author-span "> <a href="/author/johannes-roth">Johannes Roth</a></span>, <span class="author-span "> <a href="/author/yusely-ruiz-gonzalez">Yusely Ruiz-Gonzalez</a></span>, <span class="author-span "> <a href="/author/numan-saeed">Numan Saeed</a></span>, <span class="author-span "> <a href="/author/anindo-saha">Anindo Saha</a></span>, <span class="author-span "> <a href="/author/mostafa-salem">Mostafa Salem</a></span>, <span class="author-span "> <a href="/author/ricardo-sanchez-matilla">Ricardo Sanchez-Matilla</a></span>, <span class="author-span "> <a href="/author/kurt-schilling">Kurt Schilling</a></span>, <span class="author-span "> <a href="/author/wei-shao">Wei Shao</a></span>, <span class="author-span "> <a href="/author/zhiqiang-shen">Zhiqiang Shen</a></span>, <span class="author-span "> <a href="/author/ruize-shi">Ruize Shi</a></span>, <span class="author-span "> <a href="/author/pengcheng-shi">Pengcheng Shi</a></span>, <span class="author-span "> <a href="/author/daniel-sobotka">Daniel Sobotka</a></span>, <span class="author-span "> <a href="/author/theodore-soulier">Théodore Soulier</a></span>, <span class="author-span "> <a href="/author/bella-specktor-fadida">Bella Specktor Fadida</a></span>, <span class="author-span "> <a href="/author/danail-stoyanov">Danail Stoyanov</a></span>, <span class="author-span "> <a href="/author/timothy-sum-hon-mun">Timothy Sum Hon Mun</a></span>, <span class="author-span "> <a href="/author/xiaowu-sun">Xiaowu Sun</a></span>, <span class="author-span "> <a href="/author/rong-tao">Rong Tao</a></span>, <span class="author-span "> <a href="/author/franz-thaler">Franz Thaler</a></span>, <span class="author-span "> <a href="/author/antoine-theberge">Antoine Théberge</a></span>, <span class="author-span "> <a href="/author/felix-thielke">Felix Thielke</a></span>, <span class="author-span "> <a href="/author/helena-torres">Helena Torres</a></span>, <span class="author-span "> <a href="/author/kareem-a-wahid">Kareem A. Wahid</a></span>, <span class="author-span "> <a href="/author/jiacheng-wang">Jiacheng Wang</a></span>, <span class="author-span "> <a href="/author/yifei-wang">Yifei Wang</a></span>, <span class="author-span "> <a href="/author/wei-wang">Wei Wang</a></span>, <span class="author-span "> <a href="/author/xiong-wang">Xiong Wang</a></span>, <span class="author-span "> <a href="/author/jianhui-wen">Jianhui Wen</a></span>, <span class="author-span "> <a href="/author/ning-wen">Ning Wen</a></span>, <span class="author-span "> <a href="/author/marek-wodzinski">Marek Wodzinski</a></span>, <span class="author-span "> <a href="/author/ye-wu">Ye Wu</a></span>, <span class="author-span "> <a href="/author/fangfang-xia">Fangfang Xia</a></span>, <span class="author-span "> <a href="/author/tianqi-xiang">Tianqi Xiang</a></span>, <span class="author-span "> <a href="/author/chen-xiaofei">Chen Xiaofei</a></span>, <span class="author-span "> <a href="/author/lizhan-xu">Lizhan Xu</a></span>, <span class="author-span "> <a href="/author/tingting-xue">Tingting Xue</a></span>, <span class="author-span "> <a href="/author/yuxuan-yang">Yuxuan Yang</a></span>, <span class="author-span "> <a href="/author/lin-yang">Lin Yang</a></span>, <span class="author-span "> <a href="/author/kai-yao">Kai Yao</a></span>, <span class="author-span "> <a href="/author/huifeng-yao">Huifeng Yao</a></span>, <span class="author-span "> <a href="/author/amirsaeed-yazdani">Amirsaeed Yazdani</a></span>, <span class="author-span "> <a href="/author/michael-yip">Michael Yip</a></span>, <span class="author-span "> <a href="/author/hwanseung-yoo">Hwanseung Yoo</a></span>, <span class="author-span "> <a href="/author/fereshteh-yousefirizi">Fereshteh Yousefirizi</a></span>, <span class="author-span "> <a href="/author/shunkai-yu">Shunkai Yu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/jonathan-zamora">Jonathan Zamora</a></span>, <span class="author-span "> <a href="/author/ramy-ashraf-zeineldin">Ramy Ashraf Zeineldin</a></span>, <span class="author-span "> <a href="/author/dewen-zeng">Dewen Zeng</a></span>, <span class="author-span "> <a href="/author/jianpeng-zhang">Jianpeng Zhang</a></span>, <span class="author-span "> <a href="/author/bokai-zhang">Bokai Zhang</a></span>, <span class="author-span "> <a href="/author/jiapeng-zhang">Jiapeng Zhang</a></span>, <span class="author-span "> <a href="/author/fan-zhang">Fan Zhang</a></span>, <span class="author-span "> <a href="/author/huahong-zhang">Huahong Zhang</a></span>, <span class="author-span "> <a href="/author/zhongchen-zhao">Zhongchen Zhao</a></span>, <span class="author-span "> <a href="/author/zixuan-zhao">Zixuan Zhao</a></span>, <span class="author-span "> <a href="/author/jiachen-zhao">Jiachen Zhao</a></span>, <span class="author-span "> <a href="/author/can-zhao">Can Zhao</a></span>, <span class="author-span "> <a href="/author/qingshuo-zheng">Qingshuo Zheng</a></span>, <span class="author-span "> <a href="/author/yuheng-zhi">Yuheng Zhi</a></span>, <span class="author-span "> <a href="/author/ziqi-zhou">Ziqi Zhou</a></span>, <span class="author-span "> <a href="/author/baosheng-zou">Baosheng Zou</a></span>, <span class="author-span "> <a href="/author/klaus-maier-hein">Klaus Maier-Hein</a></span>, <span class="author-span "> <a href="/author/paul-f-jager">Paul F. Jäger</a></span>, <span class="author-span "> <a href="/author/annette-kopp-schneider">Annette Kopp-Schneider</a></span>, <span class="author-span "> <a href="/author/lena-maier-hein">Lena Maier-Hein</a></span> </p> <p class="item-strip-abstract">Of these, 84% were based on standard architectures.</p> <div class="sota"> </div> <p> <a href="/task/benchmarking"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Benchmarking</span> </span> </a> <a href="/task/survey"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Survey</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/biomedical-image-analysis-competitions-the" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/biomedical-image-analysis-competitions-the#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/gt-causin-a-novel-causal-based-insight-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2212.05782.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/gt-causin-a-novel-causal-based-insight-for">GT-CausIn: a novel causal-based insight for traffic prediction</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/gt-causin-a-novel-causal-based-insight-for#code">no code implementations</a> • <span class="author-name-text item-date-pub">12 Dec 2022</span> • <span class="author-span "> <a href="/author/ting-gao">Ting Gao</a></span>, <span class="author-span "> <a href="/author/rodrigo-kappes-marques">Rodrigo Kappes Marques</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">We then present a novel model named Graph Spatial-Temporal Network Based on Causal Insight (GT-CausIn), where prior learned causal information is integrated with graph diffusion layers and temporal convolutional network (TCN) layers.</p> <div class="sota"> </div> <p> <a href="/task/traffic-prediction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000582-a51d2d0f.jpg"> <span>Traffic Prediction</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/gt-causin-a-novel-causal-based-insight-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/gt-causin-a-novel-causal-based-insight-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-to-see-through-with-events"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2212.02219.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-to-see-through-with-events">Learning to See Through with Events</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-to-see-through-with-events#code">no code implementations</a> • <span class="author-name-text item-date-pub">5 Dec 2022</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span "> <a href="/author/wei-liao">Wei Liao</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">Although synthetic aperture imaging (SAI) can achieve the seeing-through effect by blurring out off-focus foreground occlusions while recovering in-focus occluded scenes from multi-view images, its performance is often deteriorated by dense occlusions and extreme lighting conditions.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-to-see-through-with-events" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-to-see-through-with-events#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2053723 --> <div class="col-lg-3 item-image-col"> <a href="/paper/detecting-line-segments-in-motion-blurred"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2211.07365.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/detecting-line-segments-in-motion-blurred">Detecting Line Segments in Motion-blurred Images with Events</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/detecting-line-segments-in-motion-blurred#code">1 code implementation</a> • <span class="author-name-text item-date-pub">14 Nov 2022</span> • <span class="author-span "> <a href="/author/huai-yu">Huai Yu</a></span>, <span class="author-span "> <a href="/author/hao-li">Hao Li</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">To robustly detect line segments over motion blurs, we propose to leverage the complementary information of images and events.</p> <div class="sota"> </div> <p> <a href="/task/3d-reconstruction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000780-3d4e01ee.jpg"> <span>3D Reconstruction</span> </span> </a> <a href="/task/line-segment-detection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Line Segment Detection</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/detecting-line-segments-in-motion-blurred#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 50</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/detecting-line-segments-in-motion-blurred" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/detecting-line-segments-in-motion-blurred#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2128940 --> <div class="col-lg-3 item-image-col"> <a href="/paper/2211-05910"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/513045bc-1018-42cf-8a65-bb5a081ef481.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/2211-05910">Efficient and Accurate Quantized Image Super-Resolution on Mobile NPUs, Mobile AI & AIM 2022 challenge: Report</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/2211-05910#code">2 code implementations</a> • <span class="author-name-text item-date-pub">7 Nov 2022</span> • <span class="author-span "> <a href="/author/andrey-ignatov">Andrey Ignatov</a></span>, <span class="author-span "> <a href="/author/radu-timofte">Radu Timofte</a></span>, <span class="author-span "> <a href="/author/maurizio-denna">Maurizio Denna</a></span>, <span class="author-span "> <a href="/author/abdel-younes">Abdel Younes</a></span>, <span class="author-span "> <a href="/author/ganzorig-gankhuyag">Ganzorig Gankhuyag</a></span>, <span class="author-span "> <a href="/author/jingang-huh">Jingang Huh</a></span>, <span class="author-span "> <a href="/author/myeong-kyun-kim">Myeong Kyun Kim</a></span>, <span class="author-span "> <a href="/author/kihwan-yoon">Kihwan Yoon</a></span>, <span class="author-span "> <a href="/author/hyeon-cheol-moon">Hyeon-Cheol Moon</a></span>, <span class="author-span "> <a href="/author/seungho-lee">Seungho Lee</a></span>, <span class="author-span "> <a href="/author/yoonsik-choe">Yoonsik Choe</a></span>, <span class="author-span "> <a href="/author/jinwoo-jeong">Jinwoo Jeong</a></span>, <span class="author-span "> <a href="/author/sungjei-kim">Sungjei Kim</a></span>, <span class="author-span "> <a href="/author/maciej-smyl">Maciej Smyl</a></span>, <span class="author-span "> <a href="/author/tomasz-latkowski">Tomasz Latkowski</a></span>, <span class="author-span "> <a href="/author/pawel-kubik">Pawel Kubik</a></span>, <span class="author-span "> <a href="/author/michal-sokolski">Michal Sokolski</a></span>, <span class="author-span "> <a href="/author/yujie-ma">Yujie Ma</a></span>, <span class="author-span "> <a href="/author/jiahao-chao">Jiahao Chao</a></span>, <span class="author-span "> <a href="/author/zhou-zhou">Zhou Zhou</a></span>, <span class="author-span "> <a href="/author/hongfan-gao">Hongfan Gao</a></span>, <span class="author-span "> <a href="/author/zhengfeng-yang">Zhengfeng Yang</a></span>, <span class="author-span "> <a href="/author/zhenbing-zeng">Zhenbing Zeng</a></span>, <span class="author-span "> <a href="/author/zhengyang-zhuge">Zhengyang Zhuge</a></span>, <span class="author-span "> <a href="/author/chenghua-li">Chenghua Li</a></span>, <span class="author-span "> <a href="/author/dan-zhu">Dan Zhu</a></span>, <span class="author-span "> <a href="/author/mengdi-sun">Mengdi Sun</a></span>, <span class="author-span "> <a href="/author/ran-duan">Ran Duan</a></span>, <span class="author-span "> <a href="/author/yan-gao">Yan Gao</a></span>, <span class="author-span "> <a href="/author/lingshun-kong">Lingshun Kong</a></span>, <span class="author-span "> <a href="/author/long-sun">Long Sun</a></span>, <span class="author-span "> <a href="/author/xiang-li">Xiang Li</a></span>, <span class="author-span "> <a href="/author/xingdong-zhang">Xingdong Zhang</a></span>, <span class="author-span "> <a href="/author/jiawei-zhang">Jiawei Zhang</a></span>, <span class="author-span "> <a href="/author/yaqi-wu">Yaqi Wu</a></span>, <span class="author-span "> <a href="/author/jinshan-pan">Jinshan Pan</a></span>, <span class="author-span "> <a href="/author/gaocheng-yu">Gaocheng Yu</a></span>, <span class="author-span "> <a href="/author/jin-zhang">Jin Zhang</a></span>, <span class="author-span "> <a href="/author/feng-zhang">Feng Zhang</a></span>, <span class="author-span "> <a href="/author/zhe-ma">Zhe Ma</a></span>, <span class="author-span "> <a href="/author/hongbin-wang">Hongbin Wang</a></span>, <span class="author-span "> <a href="/author/hojin-cho">Hojin Cho</a></span>, <span class="author-span "> <a href="/author/steve-kim">Steve Kim</a></span>, <span class="author-span "> <a href="/author/huaen-li">Huaen Li</a></span>, <span class="author-span "> <a href="/author/yanbo-ma">Yanbo Ma</a></span>, <span class="author-span "> <a href="/author/ziwei-luo">Ziwei Luo</a></span>, <span class="author-span "> <a href="/author/youwei-li">Youwei Li</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/zhihong-wen">Zhihong Wen</a></span>, <span class="author-span "> <a href="/author/qi-wu">Qi Wu</a></span>, <span class="author-span "> <a href="/author/haoqiang-fan">Haoqiang Fan</a></span>, <span class="author-span "> <a href="/author/shuaicheng-liu">Shuaicheng Liu</a></span>, <span class="author-span "> <a href="/author/lize-zhang">Lize Zhang</a></span>, <span class="author-span "> <a href="/author/zhikai-zong">Zhikai Zong</a></span>, <span class="author-span "> <a href="/author/jeremy-kwon">Jeremy Kwon</a></span>, <span class="author-span "> <a href="/author/junxi-zhang">Junxi Zhang</a></span>, <span class="author-span "> <a href="/author/mengyuan-li">Mengyuan Li</a></span>, <span class="author-span "> <a href="/author/nianxiang-fu">Nianxiang Fu</a></span>, <span class="author-span "> <a href="/author/guanchen-ding">Guanchen Ding</a></span>, <span class="author-span "> <a href="/author/han-zhu">Han Zhu</a></span>, <span class="author-span "> <a href="/author/zhenzhong-chen">Zhenzhong Chen</a></span>, <span class="author-span "> <a href="/author/gen-li">Gen Li</a></span>, <span class="author-span "> <a href="/author/yuanfan-zhang">Yuanfan Zhang</a></span>, <span class="author-span "> <a href="/author/lei-sun">Lei Sun</a></span>, <span class="author-span "> <a href="/author/dafeng-zhang">Dafeng Zhang</a></span>, <span class="author-span "> <a href="/author/neo-yang">Neo Yang</a></span>, <span class="author-span "> <a href="/author/fitz-liu">Fitz Liu</a></span>, <span class="author-span "> <a href="/author/jerry-zhao">Jerry Zhao</a></span>, <span class="author-span "> <a href="/author/mustafa-ayazoglu">Mustafa Ayazoglu</a></span>, <span class="author-span "> <a href="/author/bahri-batuhan-bilecen">Bahri Batuhan Bilecen</a></span>, <span class="author-span "> <a href="/author/shota-hirose">Shota Hirose</a></span>, <span class="author-span "> <a href="/author/kasidis-arunruangsirilert">Kasidis Arunruangsirilert</a></span>, <span class="author-span "> <a href="/author/luo-ao">Luo Ao</a></span>, <span class="author-span "> <a href="/author/ho-chun-leung">Ho Chun Leung</a></span>, <span class="author-span "> <a href="/author/andrew-wei">Andrew Wei</a></span>, <span class="author-span "> <a href="/author/jie-liu">Jie Liu</a></span>, <span class="author-span "> <a href="/author/qiang-liu">Qiang Liu</a></span>, <span class="author-span "> <a href="/author/dahai-yu">Dahai Yu</a></span>, <span class="author-span "> <a href="/author/ao-li">Ao Li</a></span>, <span class="author-span "> <a href="/author/lei-luo">Lei Luo</a></span>, <span class="author-span "> <a href="/author/ce-zhu">Ce Zhu</a></span>, <span class="author-span "> <a href="/author/seongmin-hong">Seongmin Hong</a></span>, <span class="author-span "> <a href="/author/dongwon-park">Dongwon Park</a></span>, <span class="author-span "> <a href="/author/joonhee-lee">Joonhee Lee</a></span>, <span class="author-span "> <a href="/author/byeong-hyun-lee">Byeong Hyun Lee</a></span>, <span class="author-span "> <a href="/author/seunggyu-lee">Seunggyu Lee</a></span>, <span class="author-span "> <a href="/author/se-young-chun">Se Young Chun</a></span>, <span class="author-span "> <a href="/author/ruiyuan-he">Ruiyuan He</a></span>, <span class="author-span "> <a href="/author/xuhao-jiang">Xuhao Jiang</a></span>, <span class="author-span "> <a href="/author/haihang-ruan">Haihang Ruan</a></span>, <span class="author-span "> <a href="/author/xinjian-zhang">Xinjian Zhang</a></span>, <span class="author-span "> <a href="/author/jing-liu">Jing Liu</a></span>, <span class="author-span "> <a href="/author/garas-gendy">Garas Gendy</a></span>, <span class="author-span "> <a href="/author/nabil-sabor">Nabil Sabor</a></span>, <span class="author-span "> <a href="/author/jingchao-hou">Jingchao Hou</a></span>, <span class="author-span "> <a href="/author/guanghui-he">Guanghui He</a></span> </p> <p class="item-strip-abstract">While numerous solutions have been proposed for this problem in the past, they are usually not compatible with low-power mobile NPUs having many computational and memory constraints.</p> <div class="sota"> </div> <p> <a href="/task/image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/87198f6e-219c-4651-ba73-c17f794d9a7e.jpg"> <span>Image Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 134</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/2211-05910" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/2211-05910#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2157492 --> <div class="col-lg-3 item-image-col"> <a href="/paper/motion-robust-high-speed-light-weighted"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2208.11602.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/motion-robust-high-speed-light-weighted">Motion Robust High-Speed Light-Weighted Object Detection With Event Camera</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/motion-robust-high-speed-light-weighted#code">1 code implementation</a> • <span class="author-name-text item-date-pub">24 Aug 2022</span> • <span class="author-span "> <a href="/author/bingde-liu">Bingde Liu</a></span>, <span class="author-span "> <a href="/author/chang-xu">Chang Xu</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/huai-yu">Huai Yu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">In this work, we propose a motion robust and high-speed detection pipeline which better leverages the event data.</p> <div class="sota"> </div> <p> <a href="/task/data-augmentation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001560-029cbc00.jpg"> <span>Data Augmentation</span> </span> </a> <a href="/task/object-detection-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>object-detection</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/motion-robust-high-speed-light-weighted#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 21</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/motion-robust-high-speed-light-weighted" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/motion-robust-high-speed-light-weighted#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2012604 --> <div class="col-lg-3 item-image-col"> <a href="/paper/fast-nearest-convolution-for-real-time"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/d75ca53a-c7e8-4b1b-8746-c2ec2744c953.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/fast-nearest-convolution-for-real-time">Fast Nearest Convolution for Real-Time Efficient Image Super-Resolution</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/fast-nearest-convolution-for-real-time#code">2 code implementations</a> • <span class="author-name-text item-date-pub">24 Aug 2022</span> • <span class="author-span "> <a href="/author/ziwei-luo">Ziwei Luo</a></span>, <span class="author-span "> <a href="/author/youwei-li">Youwei Li</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/qi-wu">Qi Wu</a></span>, <span class="author-span "> <a href="/author/zhihong-wen">Zhihong Wen</a></span>, <span class="author-span "> <a href="/author/haoqiang-fan">Haoqiang Fan</a></span>, <span class="author-span "> <a href="/author/shuaicheng-liu">Shuaicheng Liu</a></span> </p> <p class="item-strip-abstract">The proposed nearest convolution has the same performance as the nearest upsampling but is much faster and more suitable for Android NNAPI.</p> <div class="sota"> </div> <p> <a href="/task/image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/87198f6e-219c-4651-ba73-c17f794d9a7e.jpg"> <span>Image Super-Resolution</span> </span> </a> <a href="/task/quantization"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Quantization</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 70</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/fast-nearest-convolution-for-real-time" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/fast-nearest-convolution-for-real-time#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2010176 --> <div class="col-lg-3 item-image-col"> <a href="/paper/rfla-gaussian-receptive-field-based-label"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/2316ee10-cb0f-4b2b-a55d-c8ac0ab62dd8.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/rfla-gaussian-receptive-field-based-label">RFLA: Gaussian Receptive Field based Label Assignment for Tiny Object Detection</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/rfla-gaussian-receptive-field-based-label#code">1 code implementation</a> • <span class="author-name-text item-date-pub">18 Aug 2022</span> • <span class="author-span "> <a href="/author/chang-xu">Chang Xu</a></span>, <span class="author-span "> <a href="/author/jinwang-wang">Jinwang Wang</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/huai-yu">Huai Yu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">Then, instead of assigning samples with IoU or center sampling strategy, a new Receptive Field Distance (RFD) is proposed to directly measure the similarity between the Gaussian receptive field and ground truth.</p> <div class="sota"> <p> <a href="/sota/object-detection-on-ai-tod"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/object-detection-on-ai-tod-small_c72e34d0.png"/> </a> Ranked #2 on <a href="/sota/object-detection-on-ai-tod"> Object Detection on AI-TOD </a> </p> </div> <p> <a href="/task/object"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Object</span> </span> </a> <a href="/task/object-detection-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>object-detection</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/rfla-gaussian-receptive-field-based-label#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 262</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/rfla-gaussian-receptive-field-based-label" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/rfla-gaussian-receptive-field-based-label#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/mad-for-robust-reinforcement-learning-in-1"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2207.08583.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/mad-for-robust-reinforcement-learning-in-1">MAD for Robust Reinforcement Learning in Machine Translation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/mad-for-robust-reinforcement-learning-in-1#code">no code implementations</a> • <span class="author-name-text item-date-pub">18 Jul 2022</span> • <span class="author-span "> <a href="/author/domenic-donato">Domenic Donato</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/wang-ling">Wang Ling</a></span>, <span class="author-span "> <a href="/author/chris-dyer">Chris Dyer</a></span> </p> <p class="item-strip-abstract">We introduce a new distributed policy gradient algorithm and show that it outperforms existing reward-aware training procedures such as REINFORCE, minimum risk training (MRT) and proximal policy optimization (PPO) in terms of training stability and generalization performance when optimizing machine translation models.</p> <div class="sota"> </div> <p> <a href="/task/machine-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000257-2b560008_M7RFnV9.jpg"> <span>Machine Translation</span> </span> </a> <a href="/task/reinforcement-learning-2"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>reinforcement-learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/mad-for-robust-reinforcement-learning-in-1#tasks"> <span class="badge badge-primary"> <b>+4</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/mad-for-robust-reinforcement-learning-in-1" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/mad-for-robust-reinforcement-learning-in-1#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 2129211 --> <div class="col-lg-3 item-image-col"> <a href="/paper/detecting-tiny-objects-in-aerial-images-a"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2206.13996.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/detecting-tiny-objects-in-aerial-images-a">Detecting tiny objects in aerial images: A normalized Wasserstein distance and a new benchmark</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/detecting-tiny-objects-in-aerial-images-a#code">1 code implementation</a> • <span class="author-name-text item-date-pub">28 Jun 2022</span> • <span class="author-span "> <a href="/author/chang-xu">Chang Xu</a></span>, <span class="author-span "> <a href="/author/jinwang-wang">Jinwang Wang</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/huai-yu">Huai Yu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">Tiny object detection (TOD) in aerial images is challenging since a tiny object only contains a few pixels.</p> <div class="sota"> </div> <p> <a href="/task/object"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Object</span> </span> </a> <a href="/task/object-detection-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>object-detection</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/detecting-tiny-objects-in-aerial-images-a#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 55</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/detecting-tiny-objects-in-aerial-images-a" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/detecting-tiny-objects-in-aerial-images-a#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1978201 --> <div class="col-lg-3 item-image-col"> <a href="/paper/exploring-the-potential-of-sar-data-for-cloud"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2206.02850.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/exploring-the-potential-of-sar-data-for-cloud">GLF-CR: SAR-Enhanced Cloud Removal with Global-Local Fusion</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/exploring-the-potential-of-sar-data-for-cloud#code">1 code implementation</a> • <span class="author-name-text item-date-pub">6 Jun 2022</span> • <span class="author-span "> <a href="/author/fang-xu">Fang Xu</a></span>, <span class="author-span "> <a href="/author/yilei-shi">Yilei Shi</a></span>, <span class="author-span "> <a href="/author/patrick-ebel">Patrick Ebel</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/xiao-xiang-zhu">Xiao Xiang Zhu</a></span> </p> <p class="item-strip-abstract">The challenge of the cloud removal task can be alleviated with the aid of Synthetic Aperture Radar (SAR) images that can penetrate cloud cover.</p> <div class="sota"> <p> <a href="/sota/cloud-removal-on-sen12ms-cr"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/cloud-removal-on-sen12ms-cr-small_b1a0e37c.png"/> </a> Ranked #3 on <a href="/sota/cloud-removal-on-sen12ms-cr"> Cloud Removal on SEN12MS-CR </a> </p> </div> <p> <a href="/task/cloud-removal"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/a13f9c98-0b22-4357-8253-2409325d4728.gif"> <span>Cloud Removal</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 44</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/exploring-the-potential-of-sar-data-for-cloud" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/exploring-the-potential-of-sar-data-for-cloud#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1967099 --> <div class="col-lg-3 item-image-col"> <a href="/paper/noun2verb-probabilistic-frame-semantics-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2205.06321.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/noun2verb-probabilistic-frame-semantics-for">Noun2Verb: Probabilistic frame semantics for word class conversion</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/noun2verb-probabilistic-frame-semantics-for#code">1 code implementation</a> • <span class="author-name-text item-date-pub">12 May 2022</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/yang-xu">Yang Xu</a></span> </p> <p class="item-strip-abstract">We present a formal framework, Noun2Verb, that simulates the production and comprehension of novel denominal verb usages by modeling shared knowledge of speaker and listener in semantic frames.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 0</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/noun2verb-probabilistic-frame-semantics-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/noun2verb-probabilistic-frame-semantics-for#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1964956 --> <div class="col-lg-3 item-image-col"> <a href="/paper/ntire-2022-challenge-on-efficient-super"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2205.05675.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/ntire-2022-challenge-on-efficient-super">NTIRE 2022 Challenge on Efficient Super-Resolution: Methods and Results</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/ntire-2022-challenge-on-efficient-super#code">2 code implementations</a> • <span class="author-name-text item-date-pub">11 May 2022</span> • <span class="author-span "> <a href="/author/yawei-li">Yawei Li</a></span>, <span class="author-span "> <a href="/author/kai-zhang">Kai Zhang</a></span>, <span class="author-span "> <a href="/author/radu-timofte">Radu Timofte</a></span>, <span class="author-span "> <a href="/author/luc-van-gool-1">Luc van Gool</a></span>, <span class="author-span "> <a href="/author/fangyuan-kong">Fangyuan Kong</a></span>, <span class="author-span "> <a href="/author/mingxi-li">Mingxi Li</a></span>, <span class="author-span "> <a href="/author/songwei-liu">Songwei Liu</a></span>, <span class="author-span "> <a href="/author/zongcai-du">Zongcai Du</a></span>, <span class="author-span "> <a href="/author/ding-liu">Ding Liu</a></span>, <span class="author-span "> <a href="/author/chenhui-zhou">Chenhui Zhou</a></span>, <span class="author-span "> <a href="/author/jingyi-chen">Jingyi Chen</a></span>, <span class="author-span "> <a href="/author/qingrui-han">Qingrui Han</a></span>, <span class="author-span "> <a href="/author/zheyuan-li">Zheyuan Li</a></span>, <span class="author-span "> <a href="/author/yingqi-liu">Yingqi Liu</a></span>, <span class="author-span "> <a href="/author/xiangyu-chen">Xiangyu Chen</a></span>, <span class="author-span "> <a href="/author/haoming-cai">Haoming Cai</a></span>, <span class="author-span "> <a href="/author/yu-qiao">Yu Qiao</a></span>, <span class="author-span "> <a href="/author/chao-dong">Chao Dong</a></span>, <span class="author-span "> <a href="/author/long-sun">Long Sun</a></span>, <span class="author-span "> <a href="/author/jinshan-pan">Jinshan Pan</a></span>, <span class="author-span "> <a href="/author/yi-zhu">Yi Zhu</a></span>, <span class="author-span "> <a href="/author/zhikai-zong">Zhikai Zong</a></span>, <span class="author-span "> <a href="/author/xiaoxiao-liu">Xiaoxiao Liu</a></span>, <span class="author-span "> <a href="/author/zheng-hui">Zheng Hui</a></span>, <span class="author-span "> <a href="/author/tao-yang">Tao Yang</a></span>, <span class="author-span "> <a href="/author/peiran-ren">Peiran Ren</a></span>, <span class="author-span "> <a href="/author/xuansong-xie">Xuansong Xie</a></span>, <span class="author-span "> <a href="/author/xian-sheng-hua-1">Xian-Sheng Hua</a></span>, <span class="author-span "> <a href="/author/yanbo-wang">Yanbo Wang</a></span>, <span class="author-span "> <a href="/author/xiaozhong-ji">Xiaozhong Ji</a></span>, <span class="author-span "> <a href="/author/chuming-lin">Chuming Lin</a></span>, <span class="author-span "> <a href="/author/donghao-luo">Donghao Luo</a></span>, <span class="author-span "> <a href="/author/ying-tai">Ying Tai</a></span>, <span class="author-span "> <a href="/author/chengjie-wang">Chengjie Wang</a></span>, <span class="author-span "> <a href="/author/zhizhong-zhang">Zhizhong Zhang</a></span>, <span class="author-span "> <a href="/author/yuan-xie">Yuan Xie</a></span>, <span class="author-span "> <a href="/author/shen-cheng">Shen Cheng</a></span>, <span class="author-span "> <a href="/author/ziwei-luo">Ziwei Luo</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/zhihong-wen">Zhihong Wen</a></span>, <span class="author-span "> <a href="/author/qi-wu1">Qi Wu1</a></span>, <span class="author-span "> <a href="/author/youwei-li">Youwei Li</a></span>, <span class="author-span "> <a href="/author/haoqiang-fan">Haoqiang Fan</a></span>, <span class="author-span "> <a href="/author/jian-sun">Jian Sun</a></span>, <span class="author-span "> <a href="/author/shuaicheng-liu">Shuaicheng Liu</a></span>, <span class="author-span "> <a href="/author/yuanfei-huang">Yuanfei Huang</a></span>, <span class="author-span "> <a href="/author/meiguang-jin">Meiguang Jin</a></span>, <span class="author-span "> <a href="/author/hua-huang">Hua Huang</a></span>, <span class="author-span "> <a href="/author/jing-liu">Jing Liu</a></span>, <span class="author-span "> <a href="/author/xinjian-zhang">Xinjian Zhang</a></span>, <span class="author-span "> <a href="/author/yan-wang">Yan Wang</a></span>, <span class="author-span "> <a href="/author/lingshun-long">Lingshun Long</a></span>, <span class="author-span "> <a href="/author/gen-li">Gen Li</a></span>, <span class="author-span "> <a href="/author/yuanfan-zhang">Yuanfan Zhang</a></span>, <span class="author-span "> <a href="/author/zuowei-cao">Zuowei Cao</a></span>, <span class="author-span "> <a href="/author/lei-sun">Lei Sun</a></span>, <span class="author-span "> <a href="/author/panaetov-alexander">Panaetov Alexander</a></span>, <span class="author-span "> <a href="/author/yucong-wang">Yucong Wang</a></span>, <span class="author-span "> <a href="/author/minjie-cai">Minjie Cai</a></span>, <span class="author-span "> <a href="/author/li-wang">Li Wang</a></span>, <span class="author-span "> <a href="/author/lu-tian">Lu Tian</a></span>, <span class="author-span "> <a href="/author/zheyuan-wang">Zheyuan Wang</a></span>, <span class="author-span "> <a href="/author/hongbing-ma">Hongbing Ma</a></span>, <span class="author-span "> <a href="/author/jie-liu">Jie Liu</a></span>, <span class="author-span "> <a href="/author/chao-chen">Chao Chen</a></span>, <span class="author-span "> <a href="/author/yidong-cai">Yidong Cai</a></span>, <span class="author-span "> <a href="/author/jie-tang">Jie Tang</a></span>, <span class="author-span "> <a href="/author/gangshan-wu">Gangshan Wu</a></span>, <span class="author-span "> <a href="/author/weiran-wang">Weiran Wang</a></span>, <span class="author-span "> <a href="/author/shirui-huang">Shirui Huang</a></span>, <span class="author-span "> <a href="/author/honglei-lu">Honglei Lu</a></span>, <span class="author-span "> <a href="/author/huan-liu">Huan Liu</a></span>, <span class="author-span "> <a href="/author/keyan-wang">Keyan Wang</a></span>, <span class="author-span "> <a href="/author/jun-chen">Jun Chen</a></span>, <span class="author-span "> <a href="/author/shi-chen">Shi Chen</a></span>, <span class="author-span "> <a href="/author/yuchun-miao">Yuchun Miao</a></span>, <span class="author-span "> <a href="/author/zimo-huang">Zimo Huang</a></span>, <span class="author-span "> <a href="/author/lefei-zhang">Lefei Zhang</a></span>, <span class="author-span "> <a href="/author/mustafa-ayazoglu-1">Mustafa Ayazoğlu</a></span>, <span class="author-span "> <a href="/author/wei-xiong">Wei Xiong</a></span>, <span class="author-span "> <a href="/author/chengyi-xiong">Chengyi Xiong</a></span>, <span class="author-span "> <a href="/author/fei-wang">Fei Wang</a></span>, <span class="author-span "> <a href="/author/hao-li">Hao Li</a></span>, <span class="author-span "> <a href="/author/ruimian-wen">Ruimian Wen</a></span>, <span class="author-span "> <a href="/author/zhijing-yang">Zhijing Yang</a></span>, <span class="author-span "> <a href="/author/wenbin-zou">Wenbin Zou</a></span>, <span class="author-span "> <a href="/author/weixin-zheng">Weixin Zheng</a></span>, <span class="author-span "> <a href="/author/tian-ye">Tian Ye</a></span>, <span class="author-span "> <a href="/author/yuncheng-zhang">Yuncheng Zhang</a></span>, <span class="author-span "> <a href="/author/xiangzhen-kong">Xiangzhen Kong</a></span>, <span class="author-span "> <a href="/author/aditya-arora">Aditya Arora</a></span>, <span class="author-span "> <a href="/author/syed-waqas-zamir">Syed Waqas Zamir</a></span>, <span class="author-span "> <a href="/author/salman-khan">Salman Khan</a></span>, <span class="author-span "> <a href="/author/munawar-hayat">Munawar Hayat</a></span>, <span class="author-span "> <a href="/author/fahad-shahbaz-khan">Fahad Shahbaz Khan</a></span>, <span class="author-span "> <a href="/author/dandan-gaoand-dengwen-zhouand-qian-ning">Dandan Gaoand Dengwen Zhouand Qian Ning</a></span>, <span class="author-span "> <a href="/author/jingzhu-tang">Jingzhu Tang</a></span>, <span class="author-span "> <a href="/author/han-huang">Han Huang</a></span>, <span class="author-span "> <a href="/author/yufei-wang-1">YuFei Wang</a></span>, <span class="author-span "> <a href="/author/zhangheng-peng">Zhangheng Peng</a></span>, <span class="author-span "> <a href="/author/haobo-li">Haobo Li</a></span>, <span class="author-span "> <a href="/author/wenxue-guan">Wenxue Guan</a></span>, <span class="author-span "> <a href="/author/shenghua-gong">Shenghua Gong</a></span>, <span class="author-span "> <a href="/author/xin-li">Xin Li</a></span>, <span class="author-span "> <a href="/author/jun-liu">Jun Liu</a></span>, <span class="author-span "> <a href="/author/wanjun-wang">Wanjun Wang</a></span>, <span class="author-span "> <a href="/author/dengwen-zhou">Dengwen Zhou</a></span>, <span class="author-span "> <a href="/author/kun-zeng">Kun Zeng</a></span>, <span class="author-span "> <a href="/author/hanjiang-lin">Hanjiang Lin</a></span>, <span class="author-span "> <a href="/author/xinyu-chen">Xinyu Chen</a></span>, <span class="author-span "> <a href="/author/jinsheng-fang">Jinsheng Fang</a></span> </p> <p class="item-strip-abstract">The aim was to design a network for single image super-resolution that achieved improvement of efficiency measured according to several metrics including runtime, parameters, FLOPs, activations, and memory consumption while at least maintaining the PSNR of 29. 00dB on DIV2K validation set.</p> <div class="sota"> </div> <p> <a href="/task/image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/87198f6e-219c-4651-ba73-c17f794d9a7e.jpg"> <span>Image Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 122</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/ntire-2022-challenge-on-efficient-super" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/ntire-2022-challenge-on-efficient-super#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1959524 --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-to-extract-building-footprints-from"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/d8c74b67-9a70-4a62-8113-a84e2f32d8e3.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-to-extract-building-footprints-from">Learning to Extract Building Footprints from Off-Nadir Aerial Images</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-to-extract-building-footprints-from#code">1 code implementation</a> • <span class="author-name-text item-date-pub">28 Apr 2022</span> • <span class="author-span "> <a href="/author/jinwang-wang">Jinwang Wang</a></span>, <span class="author-span "> <a href="/author/lingxuan-meng">Lingxuan Meng</a></span>, <span class="author-span "> <a href="/author/weijia-li">Weijia Li</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">In this paper, we propose an offset vector learning scheme, which turns the building footprint extraction problem in off-nadir images into an instance-level joint prediction problem of the building roof and its corresponding "roof to footprint" offset vector.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 86</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-to-extract-building-footprints-from" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-to-extract-building-footprints-from#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1953002 --> <div class="col-lg-3 item-image-col"> <a href="/paper/bsrt-improving-burst-super-resolution-with"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/58b6134e-7661-45ed-aaa7-3a9c2bf0f166.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/bsrt-improving-burst-super-resolution-with">BSRT: Improving Burst Super-Resolution with Swin Transformer and Flow-Guided Deformable Alignment</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/bsrt-improving-burst-super-resolution-with#code">1 code implementation</a> • <span class="author-name-text item-date-pub">18 Apr 2022</span> • <span class="author-span "> <a href="/author/ziwei-luo">Ziwei Luo</a></span>, <span class="author-span "> <a href="/author/youwei-li">Youwei Li</a></span>, <span class="author-span "> <a href="/author/shen-cheng">Shen Cheng</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/qi-wu">Qi Wu</a></span>, <span class="author-span "> <a href="/author/zhihong-wen">Zhihong Wen</a></span>, <span class="author-span "> <a href="/author/haoqiang-fan">Haoqiang Fan</a></span>, <span class="author-span "> <a href="/author/jian-sun">Jian Sun</a></span>, <span class="author-span "> <a href="/author/shuaicheng-liu">Shuaicheng Liu</a></span> </p> <p class="item-strip-abstract">To overcome the challenges in BurstSR, we propose a Burst Super-Resolution Transformer (BSRT), which can significantly improve the capability of extracting inter-frame information and reconstruction.</p> <div class="sota"> <p> <a href="/sota/burst-image-super-resolution-on"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/burst-image-super-resolution-on-small_c2bdc980.png"/> </a> Ranked #1 on <a class="sota-task" href="/sota/burst-image-super-resolution-on"> Burst Image Super-Resolution on SyntheticBurst </a> </p> </div> <p> <a href="/task/burst-image-reconstruction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Burst Image Reconstruction</span> </span> </a> <a href="/task/burst-image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Burst Image Super-Resolution</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/bsrt-improving-burst-super-resolution-with#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 184</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/bsrt-improving-burst-super-resolution-with" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/bsrt-improving-burst-super-resolution-with#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/autofocus-for-event-cameras"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2203.12321.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/autofocus-for-event-cameras">Autofocus for Event Cameras</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/autofocus-for-event-cameras#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2022-1"> CVPR 2022 </a> </span> • <span class="author-span "> <a href="/author/shijie-lin">ShiJie Lin</a></span>, <span class="author-span "> <a href="/author/yinqiang-zhang">Yinqiang Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/bin-zhou">Bin Zhou</a></span>, <span class="author-span "> <a href="/author/xiaowei-luo">Xiaowei Luo</a></span>, <span class="author-span "> <a href="/author/jia-pan">Jia Pan</a></span> </p> <p class="item-strip-abstract">Focus control (FC) is crucial for cameras to capture sharp images in challenging real-world scenarios.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/autofocus-for-event-cameras" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/autofocus-for-event-cameras#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1951841 --> <div class="col-lg-3 item-image-col"> <a href="/paper/unifying-motion-deblurring-and-frame"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/b4bcc7a8-c658-4c61-ab1d-eccd78b40486.gif');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/unifying-motion-deblurring-and-frame">Unifying Motion Deblurring and Frame Interpolation with Events</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/unifying-motion-deblurring-and-frame#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2022-6"> CVPR 2022 </a> </span> • <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Slow shutter speed and long exposure time of frame-based cameras often cause visual blur and loss of inter-frame information, degenerating the overall quality of captured videos.</p> <div class="sota"> </div> <p> <a href="/task/deblurring"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000014-82fe8b62.jpg"> <span>Deblurring</span> </span> </a> <a href="/task/self-supervised-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001882-b4b42454.jpg"> <span>Self-Supervised Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/unifying-motion-deblurring-and-frame#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 72</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/unifying-motion-deblurring-and-frame" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/unifying-motion-deblurring-and-frame#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/backpropagation-through-time-and-space"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2203.08937.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/backpropagation-through-time-and-space">Backpropagation through Time and Space: Learning Numerical Methods with Multi-Agent Reinforcement Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/backpropagation-through-time-and-space#code">no code implementations</a> • <span class="author-name-text item-date-pub">16 Mar 2022</span> • <span class="author-span "> <a href="/author/elliot-way">Elliot Way</a></span>, <span class="author-span "> <a href="/author/dheeraj-s-k-kapilavai">Dheeraj S. K. Kapilavai</a></span>, <span class="author-span "> <a href="/author/yiwei-fu">Yiwei Fu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">We introduce Backpropagation Through Time and Space (BPTTS), a method for training a recurrent spatio-temporal neural network, that is used in a homogeneous multi-agent reinforcement learning (MARL) setting to learn numerical methods for hyperbolic conservation laws.</p> <div class="sota"> </div> <p> <a href="/task/multi-agent-reinforcement-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Multi-agent Reinforcement Learning</span> </span> </a> <a href="/task/reinforcement-learning-2"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>reinforcement-learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/backpropagation-through-time-and-space#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/backpropagation-through-time-and-space" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/backpropagation-through-time-and-space#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/enabling-arbitrary-translation-objectives-1"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2202.11444.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/enabling-arbitrary-translation-objectives-1">Enabling arbitrary translation objectives with Adaptive Tree Search</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/enabling-arbitrary-translation-objectives-1#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/iclr-2022-4"> ICLR 2022 </a> </span> • <span class="author-span "> <a href="/author/wang-ling">Wang Ling</a></span>, <span class="author-span "> <a href="/author/wojciech-stokowiec">Wojciech Stokowiec</a></span>, <span class="author-span "> <a href="/author/domenic-donato">Domenic Donato</a></span>, <span class="author-span "> <a href="/author/laurent-sartran">Laurent Sartran</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/austin-matthews">Austin Matthews</a></span>, <span class="author-span "> <a href="/author/chris-dyer">Chris Dyer</a></span> </p> <p class="item-strip-abstract">When applied to autoregressive models, our algorithm has different biases than beam search has, which enables a new analysis of the role of decoding bias in autoregressive models.</p> <div class="sota"> </div> <p> <a href="/task/decoder"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Decoder</span> </span> </a> <a href="/task/translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Translation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/enabling-arbitrary-translation-objectives-1" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/enabling-arbitrary-translation-objectives-1#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1936514 --> <div class="col-lg-3 item-image-col"> <a href="/paper/deep-constrained-least-squares-for-blind"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/b6fef532-3797-47a1-81b3-7596f0c57772.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/deep-constrained-least-squares-for-blind">Deep Constrained Least Squares for Blind Image Super-Resolution</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/deep-constrained-least-squares-for-blind#code">2 code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2022-1"> CVPR 2022 </a> </span> • <span class="author-span "> <a href="/author/ziwei-luo">Ziwei Luo</a></span>, <span class="author-span "> <a href="/author/haibin-huang">Haibin Huang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/youwei-li">Youwei Li</a></span>, <span class="author-span "> <a href="/author/haoqiang-fan">Haoqiang Fan</a></span>, <span class="author-span "> <a href="/author/shuaicheng-liu">Shuaicheng Liu</a></span> </p> <p class="item-strip-abstract">In this paper, we tackle the problem of blind image super-resolution(SR) with a reformulated degradation model and two novel modules.</p> <div class="sota"> <p> <a href="/sota/blind-super-resolution-on-div2krk-2x"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/blind-super-resolution-on-div2krk-2x-small_43ff6ac9.png"/> </a> Ranked #1 on <a class="sota-task" href="/sota/blind-super-resolution-on-div2krk-2x"> Blind Super-Resolution on DIV2KRK - 2x upscaling </a> </p> </div> <p> <a href="/task/blind-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Blind Super-Resolution</span> </span> </a> <a href="/task/deblurring"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000014-82fe8b62.jpg"> <span>Deblurring</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/deep-constrained-least-squares-for-blind#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 223</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/deep-constrained-least-squares-for-blind" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/deep-constrained-least-squares-for-blind#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1977160 --> <div class="col-lg-3 item-image-col"> <a href="/paper/synthetic-aperture-imaging-with-events-and"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1022034.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/synthetic-aperture-imaging-with-events-and">Synthetic Aperture Imaging With Events and Frames</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/synthetic-aperture-imaging-with-events-and#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2022-1"> CVPR 2022 </a> </span> • <span class="author-span "> <a href="/author/wei-liao">Wei Liao</a></span>, <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/shijie-lin">ShiJie Lin</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/ning-qiao">Ning Qiao</a></span> </p> <p class="item-strip-abstract">This paper addresses this problem by leveraging the merits of both events and frames, leading to a fusion-based SAI (EF-SAI) that performs consistently under the different densities of occlusions.</p> <div class="sota"> </div> <p> <a href="/task/decoder"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Decoder</span> </span> </a> <a href="/task/feature-selection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>feature selection</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 10</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/synthetic-aperture-imaging-with-events-and" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/synthetic-aperture-imaging-with-events-and#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1979581 --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-normalized-gaussian-wasserstein-distance"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2110.13389.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-normalized-gaussian-wasserstein-distance">A Normalized Gaussian Wasserstein Distance for Tiny Object Detection</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-normalized-gaussian-wasserstein-distance#code">3 code implementations</a> • <span class="author-name-text item-date-pub">26 Oct 2021</span> • <span class="author-span "> <a href="/author/jinwang-wang">Jinwang Wang</a></span>, <span class="author-span "> <a href="/author/chang-xu">Chang Xu</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Our key observation is that Intersection over Union (IoU) based metrics such as IoU itself and its extensions are very sensitive to the location deviation of the tiny objects, and drastically deteriorate the detection performance when used in anchor-based detectors.</p> <div class="sota"> <p> <a href="/sota/object-detection-on-ai-tod"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/object-detection-on-ai-tod-small_c72e34d0.png"/> </a> Ranked #3 on <a href="/sota/object-detection-on-ai-tod"> Object Detection on AI-TOD </a> </p> </div> <p> <a href="/task/object"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Object</span> </span> </a> <a href="/task/object-detection-1"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>object-detection</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/a-normalized-gaussian-wasserstein-distance#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 209</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-normalized-gaussian-wasserstein-distance" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-normalized-gaussian-wasserstein-distance#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/drl-clusters-buffer-management-with"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/919174.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/drl-clusters-buffer-management-with">DRL-Clusters: Buffer Management with Clustering based Deep Reinforcement Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/drl-clusters-buffer-management-with#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/neurips-workshop-dbai-2021-12"> NeurIPS Workshop DBAI 2021 </a> </span> • <span class="author-span "> <a href="/author/kai-li">Kai Li</a></span>, <span class="author-span "> <a href="/author/qi-zhang">Qi Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/hong-min">Hong Min</a></span> </p> <p class="item-strip-abstract">Buffer cache has been widely implemented in database systems to reduce disk I/Os.</p> <div class="sota"> </div> <p> <a href="/task/clustering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001594-3ce5d6d8.jpg"> <span>Clustering</span> </span> </a> <a href="/task/deep-reinforcement-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Deep Reinforcement Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/drl-clusters-buffer-management-with#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/drl-clusters-buffer-management-with" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/drl-clusters-buffer-management-with#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/motion-deblurring-with-real-events"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2109.13695.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/motion-deblurring-with-real-events">Motion Deblurring with Real Events</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/motion-deblurring-with-real-events#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/iccv-2021-1"> ICCV 2021 </a> </span> • <span class="author-span "> <a href="/author/fang-xu">Fang Xu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/bishan-wang">Bishan Wang</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span>, <span class="author-span "> <a href="/author/xu-jia">Xu Jia</a></span>, <span class="author-span "> <a href="/author/zhendong-qiao">Zhendong Qiao</a></span>, <span class="author-span "> <a href="/author/jianzhuang-liu">Jianzhuang Liu</a></span> </p> <p class="item-strip-abstract">In this paper, we propose an end-to-end learning framework for event-based motion deblurring in a self-supervised manner, where real-world events are exploited to alleviate the performance degradation caused by data inconsistency.</p> <div class="sota"> </div> <p> <a href="/task/deblurring"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000014-82fe8b62.jpg"> <span>Deblurring</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/motion-deblurring-with-real-events" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/motion-deblurring-with-real-events#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1827491 --> <div class="col-lg-3 item-image-col"> <a href="/paper/predicting-emergent-linguistic-compositions"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2109.04652.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/predicting-emergent-linguistic-compositions">Predicting emergent linguistic compositions through time: Syntactic frame extension via multimodal chaining</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/predicting-emergent-linguistic-compositions#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/emnlp-2021-11"> EMNLP 2021 </a> </span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/yang-xu">Yang Xu</a></span> </p> <p class="item-strip-abstract">Natural language relies on a finite lexicon to express an unbounded set of emerging ideas.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 5</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/predicting-emergent-linguistic-compositions" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/predicting-emergent-linguistic-compositions#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1969595 --> <div class="col-lg-3 item-image-col"> <a href="/paper/ebsr-feature-enhanced-burst-super-resolution"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1013317.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/ebsr-feature-enhanced-burst-super-resolution">EBSR: Feature Enhanced Burst Super-Resolution With Deformable Alignment</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/ebsr-feature-enhanced-burst-super-resolution#code">2 code implementations</a> • <span class="item-conference-link"> <a href="/conference/proceedings-of-the-ieee-cvf-conference-on"> Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR) Workshops 2021 </a> </span> • <span class="author-span "> <a href="/author/ziwei-luo">Ziwei Luo</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/xuan-mo">Xuan Mo</a></span>, <span class="author-span "> <a href="/author/youwei-li">Youwei Li</a></span>, <span class="author-span "> <a href="/author/lanpeng-jia">Lanpeng Jia</a></span>, <span class="author-span "> <a href="/author/haoqiang-fan">Haoqiang Fan</a></span>, <span class="author-span "> <a href="/author/jian-sun">Jian Sun</a></span>, <span class="author-span "> <a href="/author/shuaicheng-liu">Shuaicheng Liu</a></span> </p> <p class="item-strip-abstract">We propose a novel architecture to handle the problem of multi-frame super-resolution (MFSR).</p> <div class="sota"> <p> <a href="/sota/burst-image-super-resolution-on"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/burst-image-super-resolution-on-small_c2bdc980.png"/> </a> Ranked #2 on <a href="/sota/burst-image-super-resolution-on"> Burst Image Super-Resolution on SyntheticBurst </a> </p> </div> <p> <a href="/task/burst-image-reconstruction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Burst Image Reconstruction</span> </span> </a> <a href="/task/burst-image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Burst Image Super-Resolution</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/ebsr-feature-enhanced-burst-super-resolution#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 184</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/ebsr-feature-enhanced-burst-super-resolution" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/ebsr-feature-enhanced-burst-super-resolution#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/ntire-2021-challenge-on-burst-super"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2106.03839.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/ntire-2021-challenge-on-burst-super">NTIRE 2021 Challenge on Burst Super-Resolution: Methods and Results</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/ntire-2021-challenge-on-burst-super#code">no code implementations</a> • <span class="author-name-text item-date-pub">7 Jun 2021</span> • <span class="author-span "> <a href="/author/goutam-bhat">Goutam Bhat</a></span>, <span class="author-span "> <a href="/author/martin-danelljan">Martin Danelljan</a></span>, <span class="author-span "> <a href="/author/radu-timofte">Radu Timofte</a></span>, <span class="author-span "> <a href="/author/kazutoshi-akita">Kazutoshi Akita</a></span>, <span class="author-span "> <a href="/author/wooyeong-cho">Wooyeong Cho</a></span>, <span class="author-span "> <a href="/author/haoqiang-fan">Haoqiang Fan</a></span>, <span class="author-span "> <a href="/author/lanpeng-jia">Lanpeng Jia</a></span>, <span class="author-span "> <a href="/author/daeshik-kim">Daeshik Kim</a></span>, <span class="author-span "> <a href="/author/bruno-lecouat">Bruno Lecouat</a></span>, <span class="author-span "> <a href="/author/youwei-li">Youwei Li</a></span>, <span class="author-span "> <a href="/author/shuaicheng-liu">Shuaicheng Liu</a></span>, <span class="author-span "> <a href="/author/ziluan-liu">Ziluan Liu</a></span>, <span class="author-span "> <a href="/author/ziwei-luo">Ziwei Luo</a></span>, <span class="author-span "> <a href="/author/takahiro-maeda">Takahiro Maeda</a></span>, <span class="author-span "> <a href="/author/julien-mairal">Julien Mairal</a></span>, <span class="author-span "> <a href="/author/christian-micheloni">Christian Micheloni</a></span>, <span class="author-span "> <a href="/author/xuan-mo">Xuan Mo</a></span>, <span class="author-span "> <a href="/author/takeru-oba">Takeru Oba</a></span>, <span class="author-span "> <a href="/author/pavel-ostyakov">Pavel Ostyakov</a></span>, <span class="author-span "> <a href="/author/jean-ponce">Jean Ponce</a></span>, <span class="author-span "> <a href="/author/sanghyeok-son">Sanghyeok Son</a></span>, <span class="author-span "> <a href="/author/jian-sun">Jian Sun</a></span>, <span class="author-span "> <a href="/author/norimichi-ukita">Norimichi Ukita</a></span>, <span class="author-span "> <a href="/author/rao-muhammad-umer">Rao Muhammad Umer</a></span>, <span class="author-span "> <a href="/author/youliang-yan">Youliang Yan</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/magauiya-zhussip">Magauiya Zhussip</a></span>, <span class="author-span "> <a href="/author/xueyi-zou">Xueyi Zou</a></span> </p> <p class="item-strip-abstract">This paper reviews the NTIRE2021 challenge on burst super-resolution.</p> <div class="sota"> </div> <p> <a href="/task/super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000032-0f0cf3b2.jpg"> <span>Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/ntire-2021-challenge-on-burst-super" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/ntire-2021-challenge-on-burst-super#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/diverse-pretrained-context-encodings-improve"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2106.03717.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/diverse-pretrained-context-encodings-improve">Diverse Pretrained Context Encodings Improve Document Translation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/diverse-pretrained-context-encodings-improve#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/acl-2021-5"> ACL 2021 </a> </span> • <span class="author-span "> <a href="/author/domenic-donato">Domenic Donato</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/chris-dyer">Chris Dyer</a></span> </p> <p class="item-strip-abstract">We propose a new architecture for adapting a sentence-level sequence-to-sequence transformer by incorporating multiple pretrained document context signals and assess the impact on translation performance of (1) different pretraining approaches for generating these signals, (2) the quantity of parallel data for which document context is available, and (3) conditioning on source, target, or source and target contexts.</p> <div class="sota"> </div> <p> <a href="/task/document-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Document Translation</span> </span> </a> <a href="/task/sentence"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Sentence</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/diverse-pretrained-context-encodings-improve#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/diverse-pretrained-context-encodings-improve" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/diverse-pretrained-context-encodings-improve#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/pretraining-the-noisy-channel-model-for-task"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2103.10518.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/pretraining-the-noisy-channel-model-for-task">Pretraining the Noisy Channel Model for Task-Oriented Dialogue</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/pretraining-the-noisy-channel-model-for-task#code">no code implementations</a> • <span class="author-name-text item-date-pub">18 Mar 2021</span> • <span class="author-span "> <a href="/author/qi-liu">Qi Liu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/laura-rimell">Laura Rimell</a></span>, <span class="author-span "> <a href="/author/phil-blunsom">Phil Blunsom</a></span> </p> <p class="item-strip-abstract">Direct decoding for task-oriented dialogue is known to suffer from the explaining-away effect, manifested in models that prefer short and generic responses.</p> <div class="sota"> <p> <a href="/sota/end-to-end-dialogue-modelling-on-multiwoz-2-0"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/end-to-end-dialogue-modelling-on-multiwoz-2-0-small_27de82a4.png"/> </a> Ranked #2 on <a href="/sota/end-to-end-dialogue-modelling-on-multiwoz-2-0"> End-To-End Dialogue Modelling on MULTIWOZ 2.0 </a> </p> </div> <p> <a href="/task/end-to-end-dialogue-modelling"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>End-To-End Dialogue Modelling</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/pretraining-the-noisy-channel-model-for-task" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/pretraining-the-noisy-channel-model-for-task#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1935189 --> <div class="col-lg-3 item-image-col"> <a href="/paper/event-based-synthetic-aperture-imaging"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2103.02376.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/event-based-synthetic-aperture-imaging">Event-based Synthetic Aperture Imaging with a Hybrid Network</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/event-based-synthetic-aperture-imaging#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2021-1"> CVPR 2021 </a> </span> • <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span "> <a href="/author/wei-liao">Wei Liao</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">Synthetic aperture imaging (SAI) is able to achieve the see through effect by blurring out the off-focus foreground occlusions and reconstructing the in-focus occluded targets from multi-view images.</p> <div class="sota"> </div> <p> <a href="/task/decoder"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Decoder</span> </span> </a> <a href="/task/style-transfer"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/aca0905b-6e4c-4347-b26f-db70ec527e5c.jpg"> <span>Style Transfer</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 29</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/event-based-synthetic-aperture-imaging" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/event-based-synthetic-aperture-imaging#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1852143 --> <div class="col-lg-3 item-image-col"> <a href="/paper/ulsd-unified-line-segment-detection-across"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/social-images/ModTQUZSuUAIPZHz.png');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/ulsd-unified-line-segment-detection-across">ULSD: Unified Line Segment Detection across Pinhole, Fisheye, and Spherical Cameras</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/ulsd-unified-line-segment-detection-across#code">2 code implementations</a> • <span class="author-name-text item-date-pub">6 Nov 2020</span> • <span class="author-span "> <a href="/author/hao-li">Hao Li</a></span>, <span class="author-span "> <a href="/author/huai-yu">Huai Yu</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/sebastian-scherer">Sebastian Scherer</a></span> </p> <p class="item-strip-abstract">Targeting at the unified line segment detection (ULSD) for both distorted and undistorted images, we propose to represent line segments with the Bezier curve model.</p> <div class="sota"> <p> <a href="/sota/line-segment-detection-on-wireframe-dataset"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/line-segment-detection-on-wireframe-dataset-small_43cf31a6.png"/> </a> Ranked #5 on <a href="/sota/line-segment-detection-on-wireframe-dataset"> Line Segment Detection on wireframe dataset </a> (sAP10 metric) </p> </div> <p> <a href="/task/line-segment-detection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Line Segment Detection</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 114</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/ulsd-unified-line-segment-detection-across" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/ulsd-unified-line-segment-detection-across#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1658744 --> <div class="col-lg-3 item-image-col"> <a href="/paper/inferring-symmetry-in-natural-language"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2010.08090.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/inferring-symmetry-in-natural-language">Inferring symmetry in natural language</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/inferring-symmetry-in-natural-language#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/findings-of-the-association-for-computational"> Findings of the Association for Computational Linguistics 2020 </a> </span> • <span class="author-span "> <a href="/author/chelsea-tanchip">Chelsea Tanchip</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/aotao-xu">Aotao Xu</a></span>, <span class="author-span "> <a href="/author/yang-xu">Yang Xu</a></span> </p> <p class="item-strip-abstract">Our results show that a hybrid transfer learning model that integrates linguistic features with contextualized language models most faithfully predicts the empirical data.</p> <div class="sota"> </div> <p> <a href="/task/sentence"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Sentence</span> </span> </a> <a href="/task/transfer-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000118-7e49033f_1eFA0SR.jpg"> <span>Transfer Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 0</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/inferring-symmetry-in-natural-language" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/inferring-symmetry-in-natural-language#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/improving-spiking-sparse-recovery-via-non"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2009.09163.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/improving-spiking-sparse-recovery-via-non">Improving Spiking Sparse Recovery via Non-Convex Penalties</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/improving-spiking-sparse-recovery-via-non#code">no code implementations</a> • <span class="author-name-text item-date-pub">19 Sep 2020</span> • <span class="author-span "> <a href="/author/xiang-zhang">Xiang Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/gang-zheng">Gang Zheng</a></span> </p> <p class="item-strip-abstract">Compared with digital methods, sparse recovery based on spiking neural networks has great advantages like high computational efficiency and low power-consumption.</p> <div class="sota"> </div> <p> <a href="/task/computational-efficiency"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Computational Efficiency</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/improving-spiking-sparse-recovery-via-non" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/improving-spiking-sparse-recovery-via-non#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/mixed-noise-removal-with-pareto-prior"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2008.11935.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/mixed-noise-removal-with-pareto-prior">Mixed Noise Removal with Pareto Prior</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/mixed-noise-removal-with-pareto-prior#code">no code implementations</a> • <span class="author-name-text item-date-pub">27 Aug 2020</span> • <span class="author-span "> <a href="/author/zhou-liu">Zhou Liu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span>, <span class="author-span "> <a href="/author/hong-sun">Hong Sun</a></span> </p> <p class="item-strip-abstract">To address this problem, we exploit the Pareto distribution as the priori of the weighting matrix, based on which an accurate and robust weight estimator is proposed for mixed noise removal.</p> <div class="sota"> </div> <p> <a href="/task/denoising"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/6c4d53f8-9c6d-47c8-80c7-1b8e1c0a7d42.jpg"> <span>Denoising</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/mixed-noise-removal-with-pareto-prior" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/mixed-noise-removal-with-pareto-prior#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/structure-aware-network-for-lane-marker"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2008.06204.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/structure-aware-network-for-lane-marker">Structure-Aware Network for Lane Marker Extraction with Dynamic Vision Sensor</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/structure-aware-network-for-lane-marker#code">no code implementations</a> • <span class="author-name-text item-date-pub">14 Aug 2020</span> • <span class="author-span "> <a href="/author/wensheng-cheng">Wensheng Cheng</a></span>, <span class="author-span "> <a href="/author/hao-luo">Hao Luo</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/wei-li">Wei Li</a></span> </p> <p class="item-strip-abstract">We then propose a structure-aware network for lane marker extraction in DVS images.</p> <div class="sota"> </div> <p> <a href="/task/autonomous-driving"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000363-06d10c79.jpg"> <span>Autonomous Driving</span> </span> </a> <a href="/task/semantic-segmentation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/0d834282-fd21-4e57-be69-d5c2ed538690.jpg"> <span>Semantic Segmentation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/structure-aware-network-for-lane-marker" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/structure-aware-network-for-lane-marker#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1606675 --> <div class="col-lg-3 item-image-col"> <a href="/paper/event-enhanced-high-quality-image-recovery"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2007.08336.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/event-enhanced-high-quality-image-recovery">Event Enhanced High-Quality Image Recovery</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/event-enhanced-high-quality-image-recovery#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/eccv-2020-8"> ECCV 2020 </a> </span> • <span class="author-span "> <a href="/author/bishan-wang">Bishan Wang</a></span>, <span class="author-span "> <a href="/author/jingwei-he">Jingwei He</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span> </p> <p class="item-strip-abstract">To recover high-quality intensity images, one should address both denoising and super-resolution problems for event cameras.</p> <div class="sota"> </div> <p> <a href="/task/denoising"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/6c4d53f8-9c6d-47c8-80c7-1b8e1c0a7d42.jpg"> <span>Denoising</span> </span> </a> <a href="/task/sparse-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Sparse Learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/event-enhanced-high-quality-image-recovery#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 60</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/event-enhanced-high-quality-image-recovery" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/event-enhanced-high-quality-image-recovery#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/implicit-euler-ode-networks-for-single-image"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2007.06443.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/implicit-euler-ode-networks-for-single-image">Implicit Euler ODE Networks for Single-Image Dehazing</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/implicit-euler-ode-networks-for-single-image#code">no code implementations</a> • <span class="author-name-text item-date-pub">13 Jul 2020</span> • <span class="author-span "> <a href="/author/jiawei-shen">Jiawei Shen</a></span>, <span class="author-span "> <a href="/author/zhuoyan-li">Zhuoyan Li</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span> </p> <p class="item-strip-abstract">Deep convolutional neural networks (CNN) have been applied for image dehazing tasks, where the residual network (ResNet) is often adopted as the basic component to avoid the vanishing gradient problem.</p> <div class="sota"> </div> <p> <a href="/task/image-dehazing"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Dehazing</span> </span> </a> <a href="/task/single-image-dehazing"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Single Image Dehazing</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/implicit-euler-ode-networks-for-single-image" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/implicit-euler-ode-networks-for-single-image#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1804889 --> <div class="col-lg-3 item-image-col"> <a href="/paper/kernel-learning-for-high-resolution-time"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2007.00322.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/kernel-learning-for-high-resolution-time">Kernel Learning for High-Resolution Time-Frequency Distribution</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/kernel-learning-for-high-resolution-time#code">1 code implementation</a> • <span class="author-name-text item-date-pub">1 Jul 2020</span> • <span class="author-span "> <a href="/author/lei-jiang">Lei Jiang</a></span>, <span class="author-span "> <a href="/author/haijian-zhang">Haijian Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/guang-hua">Guang Hua</a></span> </p> <p class="item-strip-abstract">To break the current limitation, we propose a data-driven kernel learning model directly based on Wigner-Ville distribution (WVD).</p> <div class="sota"> </div> <p> <a href="/task/high"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Vocal Bursts Intensity Prediction</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 1</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/kernel-learning-for-high-resolution-time" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/kernel-learning-for-high-resolution-time#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1907598 --> <div class="col-lg-3 item-image-col"> <a href="/paper/modelling-high-level-mathematical-reasoning"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2006.09265.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/modelling-high-level-mathematical-reasoning">IsarStep: a Benchmark for High-level Mathematical Reasoning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/modelling-high-level-mathematical-reasoning#code">2 code implementations</a> • <span class="item-conference-link"> <a href="/conference/iclr-2021-1"> ICLR 2021 </a> </span> • <span class="author-span "> <a href="/author/wenda-li">Wenda Li</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/yuhuai-wu">Yuhuai Wu</a></span>, <span class="author-span "> <a href="/author/lawrence-c-paulson">Lawrence C. Paulson</a></span> </p> <p class="item-strip-abstract">In this paper, we present a benchmark for high-level mathematical reasoning and study the reasoning capabilities of neural sequence-to-sequence models.</p> <div class="sota"> </div> <p> <a href="/task/mathematical-proofs"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Mathematical Proofs</span> </span> </a> <a href="/task/mathematical-reasoning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Mathematical Reasoning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/modelling-high-level-mathematical-reasoning#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 9</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/modelling-high-level-mathematical-reasoning" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/modelling-high-level-mathematical-reasoning#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/robust-time-frequency-reconstruction-by"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2004.14820.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/robust-time-frequency-reconstruction-by">Robust Time-Frequency Reconstruction by Learning Structured Sparsity</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/robust-time-frequency-reconstruction-by#code">no code implementations</a> • <span class="author-name-text item-date-pub">30 Apr 2020</span> • <span class="author-span "> <a href="/author/lei-jiang">Lei Jiang</a></span>, <span class="author-span "> <a href="/author/haijian-zhang">Haijian Zhang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Time-frequency distributions (TFDs) play a vital role in providing descriptive analysis of non-stationary signals involved in realistic scenarios.</p> <div class="sota"> </div> <p> <a href="/task/descriptive"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Descriptive</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/robust-time-frequency-reconstruction-by" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/robust-time-frequency-reconstruction-by#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-a-simple-and-effective-model-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2004.01972.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-a-simple-and-effective-model-for">Learning a Simple and Effective Model for Multi-turn Response Generation with Auxiliary Tasks</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-a-simple-and-effective-model-for#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/emnlp-2020-11"> EMNLP 2020 </a> </span> • <span class="author-span "> <a href="/author/yufan-zhao">Yufan Zhao</a></span>, <span class="author-span "> <a href="/author/can-xu">Can Xu</a></span>, <span class="author-span "> <a href="/author/wei-wu">Wei Wu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">We study multi-turn response generation for open-domain dialogues.</p> <div class="sota"> </div> <p> <a href="/task/response-generation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Response Generation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-a-simple-and-effective-model-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-a-simple-and-effective-model-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/capturing-document-context-inside-sentence"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2003.05259.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/capturing-document-context-inside-sentence">Capturing document context inside sentence-level neural machine translation models with self-training</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/capturing-document-context-inside-sentence#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/codi-2021-11"> CODI 2021 </a> </span> • <span class="author-span "> <a href="/author/elman-mansimov">Elman Mansimov</a></span>, <span class="author-span "> <a href="/author/gabor-melis-1">Gábor Melis</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Neural machine translation (NMT) has arguably achieved human level parity when trained and evaluated at the sentence-level.</p> <div class="sota"> </div> <p> <a href="/task/machine-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000257-2b560008_M7RFnV9.jpg"> <span>Machine Translation</span> </span> </a> <a href="/task/nmt"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>NMT</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/capturing-document-context-inside-sentence#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/capturing-document-context-inside-sentence" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/capturing-document-context-inside-sentence#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/matching-neuromorphic-events-and-color-images"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2003.00636.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/matching-neuromorphic-events-and-color-images">Matching Neuromorphic Events and Color Images via Adversarial Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/matching-neuromorphic-events-and-color-images#code">no code implementations</a> • <span class="author-name-text item-date-pub">2 Mar 2020</span> • <span class="author-span "> <a href="/author/fang-xu">Fang Xu</a></span>, <span class="author-span "> <a href="/author/shijie-lin">ShiJie Lin</a></span>, <span class="author-span "> <a href="/author/wen-yang">Wen Yang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/dengxin-dai">Dengxin Dai</a></span>, <span class="author-span "> <a href="/author/gui-song-xia">Gui-Song Xia</a></span> </p> <p class="item-strip-abstract">The event camera has appealing properties: high dynamic range, low latency, low power consumption and low memory usage, and thus provides complementariness to conventional frame-based cameras.</p> <div class="sota"> </div> <p> <a href="/task/image-retrieval"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/834263fd-0f2e-47a9-bda1-0fd3f44c71df.jpg"> <span>Image Retrieval</span> </span> </a> <a href="/task/retrieval"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/8576b666-5d7a-4b88-a1e2-5dcc3ea02f16.jpg"> <span>Retrieval</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/matching-neuromorphic-events-and-color-images" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/matching-neuromorphic-events-and-color-images#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/edge-isoperimetric-inequalities-and-ball"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/2002.03296.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/edge-isoperimetric-inequalities-and-ball">Edge-Isoperimetric Inequalities and Ball-Noise Stability: Linear Programming and Probabilistic Approaches</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/edge-isoperimetric-inequalities-and-ball#code">no code implementations</a> • <span class="author-name-text item-date-pub">9 Feb 2020</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Our bounds derived by the second approach are expressed in terms of the \emph{noise stability}, and they are shown to be asymptotically tight as $n\to\infty$ when $r=2\lfloor\frac{\beta n}{2}\rfloor+1$ and $M=\lfloor\alpha2^{n}\rfloor$ for fixed $\alpha,\beta\in(0, 1)$, and is tight up to a factor $2$ when $r=2\lfloor\frac{\beta n}{2}\rfloor$ and $M=\lfloor\alpha2^{n}\rfloor$.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Combinatorics</span> <span class="badge badge-primary badge-primary-nohover">Information Theory</span> <span class="badge badge-primary badge-primary-nohover">Information Theory</span> <span class="badge badge-primary badge-primary-nohover">Probability</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/edge-isoperimetric-inequalities-and-ball" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/edge-isoperimetric-inequalities-and-ball#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/effects-of-differential-privacy-and-data"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1911.09777.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/effects-of-differential-privacy-and-data">Effects of Differential Privacy and Data Skewness on Membership Inference Vulnerability</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/effects-of-differential-privacy-and-data#code">no code implementations</a> • <span class="author-name-text item-date-pub">21 Nov 2019</span> • <span class="author-span "> <a href="/author/stacey-truex">Stacey Truex</a></span>, <span class="author-span "> <a href="/author/ling-liu">Ling Liu</a></span>, <span class="author-span "> <a href="/author/mehmet-emre-gursoy">Mehmet Emre Gursoy</a></span>, <span class="author-span "> <a href="/author/wenqi-wei">Wenqi Wei</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Second, through MPLens, we highlight how the vulnerability of pre-trained models under membership inference attack is not uniform across all classes, particularly when the training data itself is skewed.</p> <div class="sota"> </div> <p> <a href="/task/inference-attack"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Inference Attack</span> </span> </a> <a href="/task/membership-inference-attack"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Membership Inference Attack</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/effects-of-differential-privacy-and-data" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/effects-of-differential-privacy-and-data#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/maximum-entropy-diverse-exploration"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1911.00828.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/maximum-entropy-diverse-exploration">Maximum Entropy Diverse Exploration: Disentangling Maximum Entropy Reinforcement Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/maximum-entropy-diverse-exploration#code">no code implementations</a> • <span class="author-name-text item-date-pub">3 Nov 2019</span> • <span class="author-span "> <a href="/author/andrew-cohen">Andrew Cohen</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/xingye-qiao">Xingye Qiao</a></span>, <span class="author-span "> <a href="/author/xiangrong-tong">Xiangrong Tong</a></span> </p> <p class="item-strip-abstract">A theoretical investigation shows that the set of policies learned by MEDE capture the same modalities as the optimal maximum entropy policy.</p> <div class="sota"> </div> <p> <a href="/task/diversity"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Diversity</span> </span> </a> <a href="/task/reinforcement-learning-2"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>reinforcement-learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/maximum-entropy-diverse-exploration#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/maximum-entropy-diverse-exploration" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/maximum-entropy-diverse-exploration#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/a-mutual-information-maximization-perspective-1"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1910.08350.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/a-mutual-information-maximization-perspective-1">A Mutual Information Maximization Perspective of Language Representation Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/a-mutual-information-maximization-perspective-1#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/iclr-2020-1"> ICLR 2020 </a> </span> • <span class="author-span "> <a href="/author/lingpeng-kong">Lingpeng Kong</a></span>, <span class="author-span "> <a href="/author/cyprien-de-masson-d-autume">Cyprien de Masson d'Autume</a></span>, <span class="author-span "> <a href="/author/wang-ling">Wang Ling</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/zihang-dai">Zihang Dai</a></span>, <span class="author-span "> <a href="/author/dani-yogatama">Dani Yogatama</a></span> </p> <p class="item-strip-abstract">We show state-of-the-art word representation learning methods maximize an objective function that is a lower bound on the mutual information between different parts of a word sequence (i. e., a sentence).</p> <div class="sota"> </div> <p> <a href="/task/representation-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000228-3131cfbf_nx72Tly.jpg"> <span>Representation Learning</span> </span> </a> <a href="/task/sentence"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Sentence</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/a-mutual-information-maximization-perspective-1" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/a-mutual-information-maximization-perspective-1#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-cluster-structured-sparsity-by-1"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1910.05303.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-cluster-structured-sparsity-by-1">Learning Cluster Structured Sparsity by Reweighting</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-cluster-structured-sparsity-by-1#code">no code implementations</a> • <span class="author-name-text item-date-pub">11 Oct 2019</span> • <span class="author-span "> <a href="/author/yulun-jiang">Yulun Jiang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/haijian-zhang">Haijian Zhang</a></span>, <span class="author-span "> <a href="/author/zhou-liu">Zhou Liu</a></span> </p> <p class="item-strip-abstract">Recently, the paradigm of unfolding iterative algorithms into finite-length feed-forward neural networks has achieved a great success in the area of sparse recovery.</p> <div class="sota"> </div> <p> <a href="/task/sparse-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Sparse Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-cluster-structured-sparsity-by-1" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-cluster-structured-sparsity-by-1#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/putting-machine-translation-in-context-with-1"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1910.00553.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/putting-machine-translation-in-context-with-1">Better Document-Level Machine Translation with Bayes' Rule</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/putting-machine-translation-in-context-with-1#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/tacl-2020-1"> TACL 2020 </a> </span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/laurent-sartran">Laurent Sartran</a></span>, <span class="author-span "> <a href="/author/wojciech-stokowiec">Wojciech Stokowiec</a></span>, <span class="author-span "> <a href="/author/wang-ling">Wang Ling</a></span>, <span class="author-span "> <a href="/author/lingpeng-kong">Lingpeng Kong</a></span>, <span class="author-span "> <a href="/author/phil-blunsom">Phil Blunsom</a></span>, <span class="author-span "> <a href="/author/chris-dyer">Chris Dyer</a></span> </p> <p class="item-strip-abstract">We show that Bayes' rule provides an effective mechanism for creating document translation models that can be learned from only parallel sentences and monolingual documents---a compelling benefit as parallel documents are not always available.</p> <div class="sota"> </div> <p> <a href="/task/document-level-machine-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Document Level Machine Translation</span> </span> </a> <a href="/task/document-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Document Translation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/putting-machine-translation-in-context-with-1#tasks"> <span class="badge badge-primary"> <b>+4</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/putting-machine-translation-in-context-with-1" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/putting-machine-translation-in-context-with-1#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/relative-pixel-prediction-for-autoregressive"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/917365.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/relative-pixel-prediction-for-autoregressive">Relative Pixel Prediction For Autoregressive Image Generation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/relative-pixel-prediction-for-autoregressive#code">no code implementations</a> • <span class="author-name-text item-date-pub">25 Sep 2019</span> • <span class="author-span "> <a href="/author/wang-ling">Wang Ling</a></span>, <span class="author-span "> <a href="/author/chris-dyer">Chris Dyer</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/lingpeng-kong">Lingpeng Kong</a></span>, <span class="author-span "> <a href="/author/dani-yogatama">Dani Yogatama</a></span>, <span class="author-span "> <a href="/author/susannah-young">Susannah Young</a></span> </p> <p class="item-strip-abstract">In natural images, transitions between adjacent pixels tend to be smooth and gradual, a fact that has long been exploited in image compression models based on predictive coding.</p> <div class="sota"> </div> <p> <a href="/task/colorization"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/9431d1fa-ade7-4849-ae77-746cec16d3c2.jpg"> <span>Colorization</span> </span> </a> <a href="/task/image-colorization"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Image Colorization</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/relative-pixel-prediction-for-autoregressive#tasks"> <span class="badge badge-primary"> <b>+4</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/relative-pixel-prediction-for-autoregressive" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/relative-pixel-prediction-for-autoregressive#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/putting-machine-translation-in-context-with"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/917267.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/putting-machine-translation-in-context-with">Putting Machine Translation in Context with the Noisy Channel Model</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/putting-machine-translation-in-context-with#code">no code implementations</a> • <span class="author-name-text item-date-pub">25 Sep 2019</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/laurent-sartran">Laurent Sartran</a></span>, <span class="author-span "> <a href="/author/wojciech-stokowiec">Wojciech Stokowiec</a></span>, <span class="author-span "> <a href="/author/wang-ling">Wang Ling</a></span>, <span class="author-span "> <a href="/author/lingpeng-kong">Lingpeng Kong</a></span>, <span class="author-span "> <a href="/author/phil-blunsom">Phil Blunsom</a></span>, <span class="author-span "> <a href="/author/chris-dyer">Chris Dyer</a></span> </p> <p class="item-strip-abstract">We show that Bayes' rule provides a compelling mechanism for controlling unconditional document language models, using the long-standing challenge of effectively leveraging document context in machine translation.</p> <div class="sota"> </div> <p> <a href="/task/document-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Document Translation</span> </span> </a> <a href="/task/language-modelling"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000267-8df06634.jpg"> <span>Language Modelling</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/putting-machine-translation-in-context-with#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/putting-machine-translation-in-context-with" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/putting-machine-translation-in-context-with#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 643899 --> <div class="col-lg-3 item-image-col"> <a href="/paper/demystifying-learning-rate-polices-for-high"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1908.06477.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/demystifying-learning-rate-polices-for-high">Demystifying Learning Rate Policies for High Accuracy Training of Deep Neural Networks</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/demystifying-learning-rate-polices-for-high#code">1 code implementation</a> • <span class="author-name-text item-date-pub">18 Aug 2019</span> • <span class="author-span "> <a href="/author/yanzhao-wu">Yanzhao Wu</a></span>, <span class="author-span "> <a href="/author/ling-liu">Ling Liu</a></span>, <span class="author-span "> <a href="/author/juhyun-bae">Juhyun Bae</a></span>, <span class="author-span "> <a href="/author/ka-ho-chow">Ka-Ho Chow</a></span>, <span class="author-span "> <a href="/author/arun-iyengar">Arun Iyengar</a></span>, <span class="author-span "> <a href="/author/calton-pu">Calton Pu</a></span>, <span class="author-span "> <a href="/author/wenqi-wei">Wenqi Wei</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/qi-zhang">Qi Zhang</a></span> </p> <p class="item-strip-abstract">Learning Rate (LR) is an important hyper-parameter to tune for effective training of deep neural networks (DNNs).</p> <div class="sota"> </div> <p> <a href="/task/benchmarking"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Benchmarking</span> </span> </a> <a href="/task/image-classification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/7a146e71-bbf8-4137-bf25-a3618bd043a0.jpg"> <span>Image Classification</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 19</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/demystifying-learning-rate-polices-for-high" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/demystifying-learning-rate-polices-for-high#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/crnet-image-super-resolution-using-a"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1908.01166.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/crnet-image-super-resolution-using-a">CRNet: Image Super-Resolution Using A Convolutional Sparse Coding Inspired Network</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/crnet-image-super-resolution-using-a#code">no code implementations</a> • <span class="author-name-text item-date-pub">3 Aug 2019</span> • <span class="author-span "> <a href="/author/menglei-zhang">Menglei Zhang</a></span>, <span class="author-span "> <a href="/author/zhou-liu">Zhou Liu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">Convolutional Sparse Coding (CSC) has been attracting more and more attention in recent years, for making full use of image global correlation to improve performance on various computer vision applications.</p> <div class="sota"> </div> <p> <a href="/task/image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/87198f6e-219c-4651-ba73-c17f794d9a7e.jpg"> <span>Image Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/crnet-image-super-resolution-using-a" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/crnet-image-super-resolution-using-a#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/deep-learning-scheme-for-microwave-photonic"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1907.07312.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/deep-learning-scheme-for-microwave-photonic">Deep learning scheme for recovery of broadband microwave photonic receiving systems in transceivers without expert knowledge and system priors</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/deep-learning-scheme-for-microwave-photonic#code">no code implementations</a> • <span class="author-name-text item-date-pub">17 Jul 2019</span> • <span class="author-span "> <a href="/author/shaofu-xu">Shaofu Xu</a></span>, <span class="author-span "> <a href="/author/rui-wang">Rui Wang</a></span>, <span class="author-span "> <a href="/author/jianping-chen">Jianping Chen</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/weiwen-zou">Weiwen Zou</a></span> </p> <p class="item-strip-abstract">However, the quality of the signals may be degraded by defective photonic analog links, especially in a complicated MWP system.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/deep-learning-scheme-for-microwave-photonic" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/deep-learning-scheme-for-microwave-photonic#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/facial-feature-embedded-cyclegan-for-vis-nir"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1904.09464.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/facial-feature-embedded-cyclegan-for-vis-nir">Facial Feature Embedded CycleGAN for VIS-NIR Translation</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/facial-feature-embedded-cyclegan-for-vis-nir#code">no code implementations</a> • <span class="author-name-text item-date-pub">20 Apr 2019</span> • <span class="author-span "> <a href="/author/huijiao-wang">Huijiao Wang</a></span>, <span class="author-span "> <a href="/author/li-wang">Li Wang</a></span>, <span class="author-span "> <a href="/author/xulei-yang">Xulei Yang</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/haijian-zhang">Haijian Zhang</a></span> </p> <p class="item-strip-abstract">VIS-NIR face recognition remains a challenging task due to the distinction between spectral components of two modalities and insufficient paired training data.</p> <div class="sota"> </div> <p> <a href="/task/face-recognition"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000458-a47f7d65.jpg"> <span>Face Recognition</span> </span> </a> <a href="/task/translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Translation</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/facial-feature-embedded-cyclegan-for-vis-nir" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/facial-feature-embedded-cyclegan-for-vis-nir#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/intra-ensemble-in-neural-networks"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1904.04466.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/intra-ensemble-in-neural-networks">Intra-Ensemble in Neural Networks</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/intra-ensemble-in-neural-networks#code">no code implementations</a> • <span class="author-name-text item-date-pub">9 Apr 2019</span> • <span class="author-span "> <a href="/author/yuan-gao">Yuan Gao</a></span>, <span class="author-span "> <a href="/author/zixiang-cai">Zixiang Cai</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">In this work, we propose Intra-Ensemble, an end-to-end ensemble strategy with stochastic channel recombination operations to train several sub-networks simultaneously within one neural network.</p> <div class="sota"> </div> <p> <a href="/task/diversity"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Diversity</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/intra-ensemble-in-neural-networks" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/intra-ensemble-in-neural-networks#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 312541 --> <div class="col-lg-3 item-image-col"> <a href="/paper/unsupervised-recurrent-neural-network"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1904.03746.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/unsupervised-recurrent-neural-network">Unsupervised Recurrent Neural Network Grammars</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/unsupervised-recurrent-neural-network#code">1 code implementation</a> • <span class="item-conference-link"> <a href="/conference/naacl-2019-6"> NAACL 2019 </a> </span> • <span class="author-span "> <a href="/author/yoon-kim">Yoon Kim</a></span>, <span class="author-span "> <a href="/author/alexander-m-rush">Alexander M. Rush</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/adhiguna-kuncoro">Adhiguna Kuncoro</a></span>, <span class="author-span "> <a href="/author/chris-dyer">Chris Dyer</a></span>, <span class="author-span "> <a href="/author/gabor-melis-1">Gábor Melis</a></span> </p> <p class="item-strip-abstract">On language modeling, unsupervised RNNGs perform as well their supervised counterparts on benchmarks in English and Chinese.</p> <div class="sota"> <p> <a href="/sota/constituency-grammar-induction-on-ptb"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/constituency-grammar-induction-on-ptb-small_5018732a.png"/> </a> Ranked #11 on <a href="/sota/constituency-grammar-induction-on-ptb"> Constituency Grammar Induction on PTB Diagnostic ECG Database </a> (Max F1 (WSJ) metric) </p> </div> <p> <a href="/task/constituency-grammar-induction"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Constituency Grammar Induction</span> </span> </a> <a href="/task/language-modelling"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000267-8df06634.jpg"> <span>Language Modelling</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/unsupervised-recurrent-neural-network#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 176</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/unsupervised-recurrent-neural-network" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/unsupervised-recurrent-neural-network#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/differentially-private-model-publishing-for"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1904.02200.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/differentially-private-model-publishing-for">Differentially Private Model Publishing for Deep Learning</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/differentially-private-model-publishing-for#code">no code implementations</a> • <span class="author-name-text item-date-pub">3 Apr 2019</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/ling-liu">Ling Liu</a></span>, <span class="author-span "> <a href="/author/calton-pu">Calton Pu</a></span>, <span class="author-span "> <a href="/author/mehmet-emre-gursoy">Mehmet Emre Gursoy</a></span>, <span class="author-span "> <a href="/author/stacey-truex">Stacey Truex</a></span> </p> <p class="item-strip-abstract">However, when the training datasets are crowdsourced from individuals and contain sensitive information, the model parameters may encode private information and bear the risks of privacy leakage.</p> <div class="sota"> </div> <p> <a href="/task/deep-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Deep Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/differentially-private-model-publishing-for" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/differentially-private-model-publishing-for#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/diverse-exploration-via-conjugate-policies"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1902.03633.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/diverse-exploration-via-conjugate-policies">Diverse Exploration via Conjugate Policies for Policy Gradient Methods</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/diverse-exploration-via-conjugate-policies#code">no code implementations</a> • <span class="author-name-text item-date-pub">10 Feb 2019</span> • <span class="author-span "> <a href="/author/andrew-cohen">Andrew Cohen</a></span>, <span class="author-span "> <a href="/author/xingye-qiao">Xingye Qiao</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/elliot-way">Elliot Way</a></span>, <span class="author-span "> <a href="/author/xiangrong-tong">Xiangrong Tong</a></span> </p> <p class="item-strip-abstract">We address the challenge of effective exploration while maintaining good performance in policy gradient methods.</p> <div class="sota"> </div> <p> <a href="/task/policy-gradient-methods"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Policy Gradient Methods</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/diverse-exploration-via-conjugate-policies" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/diverse-exploration-via-conjugate-policies#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/learning-and-evaluating-general-linguistic"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1901.11373.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/learning-and-evaluating-general-linguistic">Learning and Evaluating General Linguistic Intelligence</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/learning-and-evaluating-general-linguistic#code">no code implementations</a> • <span class="author-name-text item-date-pub">31 Jan 2019</span> • <span class="author-span "> <a href="/author/dani-yogatama">Dani Yogatama</a></span>, <span class="author-span "> <a href="/author/cyprien-de-masson-d-autume">Cyprien de Masson d'Autume</a></span>, <span class="author-span "> <a href="/author/jerome-connor">Jerome Connor</a></span>, <span class="author-span "> <a href="/author/tomas-kocisky">Tomas Kocisky</a></span>, <span class="author-span "> <a href="/author/mike-chrzanowski">Mike Chrzanowski</a></span>, <span class="author-span "> <a href="/author/lingpeng-kong">Lingpeng Kong</a></span>, <span class="author-span "> <a href="/author/angeliki-lazaridou">Angeliki Lazaridou</a></span>, <span class="author-span "> <a href="/author/wang-ling">Wang Ling</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/chris-dyer">Chris Dyer</a></span>, <span class="author-span "> <a href="/author/phil-blunsom">Phil Blunsom</a></span> </p> <p class="item-strip-abstract">We define general linguistic intelligence as the ability to reuse previously acquired knowledge about a language's lexicon, syntax, semantics, and pragmatic conventions to adapt to new tasks quickly.</p> <div class="sota"> </div> <p> <a href="/task/natural-language-understanding"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Natural Language Understanding</span> </span> </a> <a href="/task/question-answering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/56ae901a-265f-415f-b175-ce54133d648b.jpg"> <span>Question Answering</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/learning-and-evaluating-general-linguistic" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/learning-and-evaluating-general-linguistic#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/variational-smoothing-in-recurrent-neural"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1901.09296.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/variational-smoothing-in-recurrent-neural">Variational Smoothing in Recurrent Neural Network Language Models</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/variational-smoothing-in-recurrent-neural#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/iclr-2019-5"> ICLR 2019 </a> </span> • <span class="author-span "> <a href="/author/lingpeng-kong">Lingpeng Kong</a></span>, <span class="author-span "> <a href="/author/gabor-melis">Gabor Melis</a></span>, <span class="author-span "> <a href="/author/wang-ling">Wang Ling</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/dani-yogatama">Dani Yogatama</a></span> </p> <p class="item-strip-abstract">We present a new theoretical perspective of data noising in recurrent neural network language models (Xie et al., 2017).</p> <div class="sota"> </div> <p> <a href="/task/language-modelling"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000267-8df06634.jpg"> <span>Language Modelling</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/variational-smoothing-in-recurrent-neural" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/variational-smoothing-in-recurrent-neural#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/image-super-resolution-via-rl-csc-when"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1812.11950.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/image-super-resolution-via-rl-csc-when">Image Super-Resolution via RL-CSC: When Residual Learning Meets Convolutional Sparse Coding</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/image-super-resolution-via-rl-csc-when#code">no code implementations</a> • <span class="author-name-text item-date-pub">31 Dec 2018</span> • <span class="author-span "> <a href="/author/menglei-zhang">Menglei Zhang</a></span>, <span class="author-span "> <a href="/author/zhou-liu">Zhou Liu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">We extend LISTA to its convolutional version and build the main part of our model by strictly following the convolutional form, which improves the network's interpretability.</p> <div class="sota"> <p> <a href="/sota/image-super-resolution-on-bsd100-4x-upscaling"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/image-super-resolution-on-bsd100-4x-upscaling-small_66806d30.png"/> </a> Ranked #38 on <a href="/sota/image-super-resolution-on-bsd100-4x-upscaling"> Image Super-Resolution on BSD100 - 4x upscaling </a> </p> </div> <p> <a href="/task/image-super-resolution"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/87198f6e-219c-4651-ba73-c17f794d9a7e.jpg"> <span>Image Super-Resolution</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/image-super-resolution-via-rl-csc-when" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/image-super-resolution-via-rl-csc-when#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/sentence-encoding-with-tree-constrained"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1811.10475.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/sentence-encoding-with-tree-constrained">Sentence Encoding with Tree-constrained Relation Networks</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/sentence-encoding-with-tree-constrained#code">no code implementations</a> • <span class="author-name-text item-date-pub">26 Nov 2018</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/cyprien-de-masson-d-autume">Cyprien de Masson d'Autume</a></span>, <span class="author-span "> <a href="/author/chris-dyer">Chris Dyer</a></span>, <span class="author-span "> <a href="/author/phil-blunsom">Phil Blunsom</a></span>, <span class="author-span "> <a href="/author/lingpeng-kong">Lingpeng Kong</a></span>, <span class="author-span "> <a href="/author/wang-ling">Wang Ling</a></span> </p> <p class="item-strip-abstract">The meaning of a sentence is a function of the relations that hold between its words.</p> <div class="sota"> </div> <p> <a href="/task/classification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000001592-811f0118_3TU7fCb.jpg"> <span>General Classification</span> </span> </a> <a href="/task/machine-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000257-2b560008_M7RFnV9.jpg"> <span>Machine Translation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/sentence-encoding-with-tree-constrained#tasks"> <span class="badge badge-primary"> <b>+5</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/sentence-encoding-with-tree-constrained" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/sentence-encoding-with-tree-constrained#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/tackling-sequence-to-sequence-mapping"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1810.10802.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/tackling-sequence-to-sequence-mapping">Tackling Sequence to Sequence Mapping Problems with Neural Networks</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/tackling-sequence-to-sequence-mapping#code">no code implementations</a> • <span class="author-name-text item-date-pub">25 Oct 2018</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">In Natural Language Processing (NLP), it is important to detect the relationship between two sequences or to generate a sequence of tokens given another observed sequence.</p> <div class="sota"> </div> <p> <a href="/task/domain-adaptation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000588-823db955.jpg"> <span>Domain Adaptation</span> </span> </a> <a href="/task/feature-engineering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Feature Engineering</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/tackling-sequence-to-sequence-mapping#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/tackling-sequence-to-sequence-mapping" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/tackling-sequence-to-sequence-mapping#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/adversarial-examples-in-deep-learning"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1807.00051.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/adversarial-examples-in-deep-learning">Adversarial Examples in Deep Learning: Characterization and Divergence</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/adversarial-examples-in-deep-learning#code">no code implementations</a> • <span class="author-name-text item-date-pub">29 Jun 2018</span> • <span class="author-span "> <a href="/author/wenqi-wei">Wenqi Wei</a></span>, <span class="author-span "> <a href="/author/ling-liu">Ling Liu</a></span>, <span class="author-span "> <a href="/author/margaret-loper">Margaret Loper</a></span>, <span class="author-span "> <a href="/author/stacey-truex">Stacey Truex</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/mehmet-emre-gursoy">Mehmet Emre Gursoy</a></span>, <span class="author-span "> <a href="/author/yanzhao-wu">Yanzhao Wu</a></span> </p> <p class="item-strip-abstract">The burgeoning success of deep learning has raised the security and privacy concerns as more and more tasks are accompanied with sensitive data.</p> <div class="sota"> </div> <p> <a href="/task/adversarial-attack"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Adversarial Attack</span> </span> </a> <a href="/task/deep-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Deep Learning</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/adversarial-examples-in-deep-learning" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/adversarial-examples-in-deep-learning#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 53844 --> <div class="col-lg-3 item-image-col"> <a href="/paper/towards-demystifying-membership-inference"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1807.09173.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/towards-demystifying-membership-inference">Towards Demystifying Membership Inference Attacks</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/towards-demystifying-membership-inference#code">1 code implementation</a> • <span class="author-name-text item-date-pub">28 Jun 2018</span> • <span class="author-span "> <a href="/author/stacey-truex">Stacey Truex</a></span>, <span class="author-span "> <a href="/author/ling-liu">Ling Liu</a></span>, <span class="author-span "> <a href="/author/mehmet-emre-gursoy">Mehmet Emre Gursoy</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/wenqi-wei">Wenqi Wei</a></span> </p> <p class="item-strip-abstract">Our empirical results additionally show that (1) using the type of target model under attack within the attack model may not increase attack effectiveness and (2) collaborative learning in federated systems exposes vulnerabilities to membership inference risks when the adversary is a participant in the federation.</p> <div class="sota"> </div> <p> <span class="badge badge-primary badge-primary-nohover">Cryptography and Security</span> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 47</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/towards-demystifying-membership-inference" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/towards-demystifying-membership-inference#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/diverse-exploration-for-fast-and-safe-policy"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1802.08331.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/diverse-exploration-for-fast-and-safe-policy">Diverse Exploration for Fast and Safe Policy Improvement</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/diverse-exploration-for-fast-and-safe-policy#code">no code implementations</a> • <span class="author-name-text item-date-pub">22 Feb 2018</span> • <span class="author-span "> <a href="/author/andrew-cohen">Andrew Cohen</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/robert-wright">Robert Wright</a></span> </p> <p class="item-strip-abstract">We study an important yet under-addressed problem of quickly and safely improving policies in online reinforcement learning domains.</p> <div class="sota"> </div> <p> <a href="/task/diversity"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Diversity</span> </span> </a> <a href="/task/reinforcement-learning-2"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>reinforcement-learning</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/diverse-exploration-for-fast-and-safe-policy#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/diverse-exploration-for-fast-and-safe-policy" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/diverse-exploration-for-fast-and-safe-policy#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/automatic-streaming-segmentation-of-stereo"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1710.03488.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/automatic-streaming-segmentation-of-stereo">Automatic Streaming Segmentation of Stereo Video Using Bilateral Space</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/automatic-streaming-segmentation-of-stereo#code">no code implementations</a> • <span class="author-name-text item-date-pub">10 Oct 2017</span> • <span class="author-span "> <a href="/author/wenjing-ke">Wenjing Ke</a></span>, <span class="author-span "> <a href="/author/yuanjie-zhu">Yuanjie Zhu</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span> </p> <p class="item-strip-abstract">In this paper, we take advantage of binocular camera and propose an unsupervised algorithm based on semi-supervised segmentation algorithm and extracting foreground part efficiently.</p> <div class="sota"> </div> <p> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/automatic-streaming-segmentation-of-stereo" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/automatic-streaming-segmentation-of-stereo#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/the-neural-noisy-channel"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1611.02554.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/the-neural-noisy-channel">The Neural Noisy Channel</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/the-neural-noisy-channel#code">no code implementations</a> • <span class="author-name-text item-date-pub">8 Nov 2016</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/phil-blunsom">Phil Blunsom</a></span>, <span class="author-span "> <a href="/author/chris-dyer">Chris Dyer</a></span>, <span class="author-span "> <a href="/author/edward-grefenstette">Edward Grefenstette</a></span>, <span class="author-span "> <a href="/author/tomas-kocisky">Tomas Kocisky</a></span> </p> <p class="item-strip-abstract">We formulate sequence to sequence transduction as a noisy channel decoding problem and use recurrent neural networks to parameterise the source and channel models.</p> <div class="sota"> </div> <p> <a href="/task/decoder"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Decoder</span> </span> </a> <a href="/task/machine-translation"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000257-2b560008_M7RFnV9.jpg"> <span>Machine Translation</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/the-neural-noisy-channel#tasks"> <span class="badge badge-primary"> <b>+3</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/the-neural-noisy-channel" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/the-neural-noisy-channel#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/online-segment-to-segment-neural-transduction"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1609.08194.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/online-segment-to-segment-neural-transduction">Online Segment to Segment Neural Transduction</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/online-segment-to-segment-neural-transduction#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/emnlp-2016-11"> EMNLP 2016 </a> </span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/jan-buys">Jan Buys</a></span>, <span class="author-span "> <a href="/author/phil-blunsom">Phil Blunsom</a></span> </p> <p class="item-strip-abstract">We introduce an online neural sequence to sequence model that learns to alternate between encoding and decoding segments of the input as it is read.</p> <div class="sota"> </div> <p> <a href="/task/morphological-inflection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Morphological Inflection</span> </span> </a> <a href="/task/sentence"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Sentence</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/online-segment-to-segment-neural-transduction#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/online-segment-to-segment-neural-transduction" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/online-segment-to-segment-neural-transduction#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1612432 --> <div class="col-lg-3 item-image-col"> <a href="/paper/gpu-fv-realtime-fisher-vector-and-its"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1604.03498.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/gpu-fv-realtime-fisher-vector-and-its">GPU-FV: Realtime Fisher Vector and Its Applications in Video Monitoring</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/gpu-fv-realtime-fisher-vector-and-its#code">1 code implementation</a> • <span class="author-name-text item-date-pub">12 Apr 2016</span> • <span class="author-span "> <a href="/author/wenying-ma">Wenying Ma</a></span>, <span class="author-span "> <a href="/author/liangliang-cao">Liangliang Cao</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/guoping-long">Guoping Long</a></span>, <span class="author-span "> <a href="/author/yucheng-li">Yucheng Li</a></span> </p> <p class="item-strip-abstract">We also applied GPU-FV for realtime video monitoring tasks and found that GPU-FV outperforms a number of previous works.</p> <div class="sota"> </div> <p> <a href="/task/retrieval"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/8576b666-5d7a-4b88-a1e2-5dcc3ea02f16.jpg"> <span>Retrieval</span> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 0</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/gpu-fv-realtime-fisher-vector-and-its" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/gpu-fv-realtime-fisher-vector-and-its#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 1677394 --> <div class="col-lg-3 item-image-col"> <a href="/paper/neural-variational-inference-for-text"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/papergithubrepo/pgr-0001677394-5a89bc13.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/neural-variational-inference-for-text">Neural Variational Inference for Text Processing</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/neural-variational-inference-for-text#code">6 code implementations</a> • <span class="author-name-text item-date-pub">19 Nov 2015</span> • <span class="author-span "> <a href="/author/yishu-miao">Yishu Miao</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/phil-blunsom">Phil Blunsom</a></span> </p> <p class="item-strip-abstract">We validate this framework on two very different text modelling applications, generative document modelling and supervised question answering.</p> <div class="sota"> <p> <a href="/sota/question-answering-on-qasent"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/question-answering-on-qasent-small_7dcafa36.png"/> </a> Ranked #1 on <a class="sota-task" href="/sota/question-answering-on-qasent"> Question Answering on QASent </a> </p> </div> <p> <a href="/task/answer-selection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Answer Selection</span> </span> </a> <a href="/task/topic-models"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000179-fd3a1d11_u7YWXnj.jpg"> <span>Topic Models</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/neural-variational-inference-for-text#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 540</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/neural-variational-inference-for-text" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/neural-variational-inference-for-text#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/the-sysu-system-for-the-interspeech-2015"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1507.06711.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/the-sysu-system-for-the-interspeech-2015">The SYSU System for the Interspeech 2015 Automatic Speaker Verification Spoofing and Countermeasures Challenge</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/the-sysu-system-for-the-interspeech-2015#code">no code implementations</a> • <span class="author-name-text item-date-pub">24 Jul 2015</span> • <span class="author-span "> <a href="/author/shitao-weng">Shitao Weng</a></span>, <span class="author-span "> <a href="/author/shushan-chen">Shushan Chen</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/xuewei-wu">Xuewei Wu</a></span>, <span class="author-span "> <a href="/author/weicheng-cai">Weicheng Cai</a></span>, <span class="author-span "> <a href="/author/zhi-liu">Zhi Liu</a></span>, <span class="author-span "> <a href="/author/ming-li">Ming Li</a></span> </p> <p class="item-strip-abstract">In order to detect these spoofed speech signals as a countermeasure, we propose a score level fusion approach with several different i-vector subsystems.</p> <div class="sota"> </div> <p> <a href="/task/speaker-verification"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000273-9d67983f.jpg"> <span>Speaker Verification</span> </span> </a> <a href="/task/speech-synthesis"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/task-0000000247-88badf9f_aE9eDck.gif"> <span>Speech Synthesis</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/the-sysu-system-for-the-interspeech-2015#tasks"> <span class="badge badge-primary"> <b>+1</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/the-sysu-system-for-the-interspeech-2015" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/the-sysu-system-for-the-interspeech-2015#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- None --> <div class="col-lg-3 item-image-col"> <a href="/paper/small-instance-detection-by-integer"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/89886.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/small-instance-detection-by-integer">Small Instance Detection by Integer Programming on Object Density Maps</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/small-instance-detection-by-integer#code">no code implementations</a> • <span class="item-conference-link"> <a href="/conference/cvpr-2015-6"> CVPR 2015 </a> </span> • <span class="author-span "> <a href="/author/zheng-ma">Zheng Ma</a></span>, <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/antoni-b-chan">Antoni B. Chan</a></span> </p> <p class="item-strip-abstract">For each region, a sliding window (ROI) is passed over the density map to calculate the instance count within each ROI.</p> <div class="sota"> </div> <p> <a href="/task/novel-object-detection"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/thumbnails/task/eaf51796-a4c3-4a9f-8eb7-3663da7c8ab2.jpg"> <span>Novel Object Detection</span> </span> </a> <a href="/task/object"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Object</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/small-instance-detection-by-integer#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary" style="border:none;background-color:transparent"> </span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/small-instance-detection-by-integer" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/small-instance-detection-by-integer#code" class="badge badge-dark badge-nocode "> <span class=" icon-wrapper icon-ion" data-name="add"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="32" d="M256 112v288m144-144H112"/></svg></span> Add Code </a> <br/> </div> </div> </div> </div> </div> <div class="row infinite-item item paper-card"> <!-- 13076 --> <div class="col-lg-3 item-image-col"> <a href="/paper/deep-learning-for-answer-sentence-selection"> <div class="item-image" style="background-image: url('https://production-media.paperswithcode.com/thumbnails/paper/1412.1632.jpg');"> </div> </a> </div> <div class="col-lg-9 item-col"> <div class="row"> <div class="col-lg-9 item-content"> <h1><a href="/paper/deep-learning-for-answer-sentence-selection">Deep Learning for Answer Sentence Selection</a></h1> <p class="author-section" style="padding-top:2px"> <a href="/paper/deep-learning-for-answer-sentence-selection#code">2 code implementations</a> • <span class="author-name-text item-date-pub">4 Dec 2014</span> • <span class="author-span author-matched"> <a href="/author/lei-yu">Lei Yu</a></span>, <span class="author-span "> <a href="/author/karl-moritz-hermann">Karl Moritz Hermann</a></span>, <span class="author-span "> <a href="/author/phil-blunsom">Phil Blunsom</a></span>, <span class="author-span "> <a href="/author/stephen-pulman">Stephen Pulman</a></span> </p> <p class="item-strip-abstract">Answer sentence selection is the task of identifying sentences that contain the answer to a given question.</p> <div class="sota"> <p> <a href="/sota/question-answering-on-qasent"> <img style="height:20px;width:35px;position:relative;top:1px;" src="https://production-media.paperswithcode.com/sota-thumbs/question-answering-on-qasent-small_7dcafa36.png"/> </a> Ranked #3 on <a href="/sota/question-answering-on-qasent"> Question Answering on QASent </a> </p> </div> <p> <a href="/task/deep-learning"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Deep Learning</span> </span> </a> <a href="/task/feature-engineering"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Feature Engineering</span> </span> </a> <a style="position: relative; top: -2px;" href="/paper/deep-learning-for-answer-sentence-selection#tasks"> <span class="badge badge-primary"> <b>+2</b> </span> </a> </p> </div> <div class="col-lg-3 item-interact text-center"> <div class="entity-stars"> <span class="badge badge-secondary"><span class=" icon-wrapper icon-ion" data-name="star"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M394 480a16 16 0 0 1-9.39-3L256 383.76 127.39 477a16 16 0 0 1-24.55-18.08L153 310.35 23 221.2a16 16 0 0 1 9-29.2h160.38l48.4-148.95a16 16 0 0 1 30.44 0l48.4 149H480a16 16 0 0 1 9.05 29.2L359 310.35l50.13 148.53A16 16 0 0 1 394 480z"/></svg></span> 716</span> </div> <div class="entity" style="margin-bottom: 20px;"> <a href="/paper/deep-learning-for-answer-sentence-selection" class="badge badge-light "> <span class=" icon-wrapper icon-ion" data-name="document"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M428 224H288a48 48 0 0 1-48-48V36a4 4 0 0 0-4-4h-92a64 64 0 0 0-64 64v320a64 64 0 0 0 64 64h224a64 64 0 0 0 64-64V228a4 4 0 0 0-4-4z"/><path d="M419.22 188.59L275.41 44.78a2 2 0 0 0-3.41 1.41V176a16 16 0 0 0 16 16h129.81a2 2 0 0 0 1.41-3.41z"/></svg></span> Paper </a> <br/> <a href="/paper/deep-learning-for-answer-sentence-selection#code" class="badge badge-dark "> <span class=" icon-wrapper icon-ion" data-name="logo-github"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M256 32C132.3 32 32 134.9 32 261.7c0 101.5 64.2 187.5 153.2 217.9a17.56 17.56 0 0 0 3.8.4c8.3 0 11.5-6.1 11.5-11.4 0-5.5-.2-19.9-.3-39.1a102.4 102.4 0 0 1-22.6 2.7c-43.1 0-52.9-33.5-52.9-33.5-10.2-26.5-24.9-33.6-24.9-33.6-19.5-13.7-.1-14.1 1.4-14.1h.1c22.5 2 34.3 23.8 34.3 23.8 11.2 19.6 26.2 25.1 39.6 25.1a63 63 0 0 0 25.6-6c2-14.8 7.8-24.9 14.2-30.7-49.7-5.8-102-25.5-102-113.5 0-25.1 8.7-45.6 23-61.6-2.3-5.8-10-29.2 2.2-60.8a18.64 18.64 0 0 1 5-.5c8.1 0 26.4 3.1 56.6 24.1a208.21 208.21 0 0 1 112.2 0c30.2-21 48.5-24.1 56.6-24.1a18.64 18.64 0 0 1 5 .5c12.2 31.6 4.5 55 2.2 60.8 14.3 16.1 23 36.6 23 61.6 0 88.2-52.4 107.6-102.3 113.3 8 7.1 15.2 21.1 15.2 42.5 0 30.7-.3 55.5-.3 63 0 5.4 3.1 11.5 11.4 11.5a19.35 19.35 0 0 0 4-.4C415.9 449.2 480 363.1 480 261.7 480 134.9 379.7 32 256 32z"/></svg></span> Code </a> <br/> </div> </div> </div> </div> </div> </div> <div class="loading" style="display: none;"> <div class="loader-ellips infinite-scroll-request"> <span class="loader-ellips__dot"></span> <span class="loader-ellips__dot"></span> <span class="loader-ellips__dot"></span> <span class="loader-ellips__dot"></span> </div> </div> <div class="search-submit-paper text-center" style="font-size:16px;padding-bottom:30px;"> Cannot find the paper you are looking for? You can <a href="/submit-paper">Submit</a> a new open access paper. </div> </div> </div> <div class="footer"> <div class="footer-contact"> <span class="footer-contact-item">Contact us on:</span> <a class="footer-contact-item" href="mailto:hello@paperswithcode.com"> <span class=" icon-wrapper icon-ion" data-name="mail"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M424 80H88a56.06 56.06 0 0 0-56 56v240a56.06 56.06 0 0 0 56 56h336a56.06 56.06 0 0 0 56-56V136a56.06 56.06 0 0 0-56-56zm-14.18 92.63l-144 112a16 16 0 0 1-19.64 0l-144-112a16 16 0 1 1 19.64-25.26L256 251.73l134.18-104.36a16 16 0 0 1 19.64 25.26z"/></svg></span> hello@paperswithcode.com </a>. <span class="footer-contact-item"> Papers With Code is a free resource with all data licensed under <a rel="noreferrer" href="https://creativecommons.org/licenses/by-sa/4.0/">CC-BY-SA</a>. </span> </div> <div class="footer-links"> <a href="/site/terms">Terms</a> <a href="/site/data-policy">Data policy</a> <a href="/site/cookies-policy">Cookies policy</a> <a href="/about#team" class="fair-logo"> from <img src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAANAAAAAgCAMAAABU6AZfAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAJcEhZcwAAFiUAABYlAUlSJPAAAABFUExURUdwTBwqMhwqMxsqMhkqMxsqMhwqMgCA+hwrMxJIgBsrMxsqMgJ28AF58wF38BsqMwB58hsqMwF17wF07hwrMwRm4QJz7Wj6SIIAAAAUdFJOUwDP87wcPIT+4A1tVti1Ta0smZVzG3JP8wAABR9JREFUWMO1memWpCoMgF0QxX1//0e9kCAkAadq5tzKjzndQmM+szNFEWQ9puu6xn02BXm4j23bTsdapKJAMguFgRVT/Ejyx4uH5hgvL1PUfm69jEd6bN05GTJvXF5X/hfRcPyWe2kTLDFdRA4ENVMbZZJGMt3ppEttNMDC2X/Qa7MK1OrveZoKz2/445I+U4znuvaExxKZLFCqtym/A6rzn+OjbHj8ubwDmfESslvtgWea13WeckQPUKJTf/4USHkDnVXzCrT74DnmeX+8rjgcxA4QBmPpyAKdOm+5XwFpgHH/bG9AMzLMqM9DxxCQaM0qLr7U4xE/AgIDVRBHlcoDeYd7lFee6GZOBvaaskD8S6nut0Dg0ItZEt+IQAfjseIzRDvS/WCxWQJ17phqEGqepQBS/VaXZa0H/4XUYMVt6nr309DEjYvduPT2gWELQTr0iQbC1+SADOg/kjVvspGqX6zSRAgEKbqOf6zgd82AVB+8s0YNm5NL6Y8MGzttwKt0krP9+9A/+hzQTALoUX5MnxW7iCIEUmD7IVZb8G0G1HRE9UqbWKkEUFPSR0MWqH5eB65XmgzQdN3WGjxReROxPD2LROeBIEiD7UGLraBAjMcS9W9AquTPckBgoMqEWG1SIGN57otn5KO9Y30N4rq6MQFC5TX1cEWBfJLY+mbQ5ZMUm8UK7F1A9GNc90T3enkpCZhCdUzfdQq0Wp774gnZao55YU3SgkmAVBez1eDfR4BABd/XqY36ichyaLUnyJZ8jatimUBjqQTouK2M3OGs4miiiduN5bkHCL15C9Zw7heBRMHYSMRxIGyYFsPqpwTqactT8w0P0OSA9iRY9jQvrDyIAhCoAjrrR90I1PNCpcivHEh+cATUmS5xoCaNB3ggMzqgRO/RYPIb1WviDkB4sv22kB8ghQcgUIFWzyUmaQ6kpf5DCoTFh5fwQQCt493e9ypD5Xjq7S5cMQeEubpBf2oKCoSMohPzduBAi2yimhRIc3NvrOd+gCxPexvhcGPM3SRoJpbmIhAGSudTNgNCR+qIRL05UCebsxTIiAYOX6sEkONphRkw9A9ZjADIZIDg857we5MBSiQHVMlWJgXyeTBIyVpGD4RttHC4yVtENHn7K5ASdeM3QGX2sKcKBCBmITYmrGii9TOQT7JYwxOgrhbyby4XJrvs54kuR8vlCg4XEgEOEs8Q8R5DYZboCwEESpTmi/Hhc1Lo8zxPlghZjpbLqWVGUGxSes1y4W2lkkC+Wf0C6GPaxtZo0VQW4nOhsJLqAg01HXqgGN0+083MegKoYLdisbDqzHVG1iZJYe0EUDoB+dj149gDRCCgt2lZ1zA5nhvCyEwvrc/b3N/HiZlMgINmZaR/aX3MJluf7Kepo8+F5tRfUh1wR0odzg8Srnm9w7L5SyB/p6H9Ptt0Vj310ngAlDHbnLo3mGc00sJiQ+4KEM+I8xC7fWv5VGcz3Y0C2ZCa70sgf0tXbnbY1jXpln3W6jYXDG4jNthdrfVWn8n4gAVAZe+0GgaEaeGFx4XRQyTM9yWQnNuIAy5/HPAWPuDJ8Yc66sYvSeY/8dhlYqH0kuQzkFQ03nnHCyI/gtc0GfM7BVPmL5J0yHPkXm6d3u6v/TLw3GL5ayDr6WW47awHYmS1VC+XJOVQcCCZBPk13SCvgmcb8uI/UqjqdvlOlk3j5OU20C0putdO1ZWNo0a8oumXslx0vMYaNrfPURt2hnp5G2rhtsEP5j/3Wqt0fQd1YgAAAABJRU5ErkJggg=="> </a> </div> </div> <script> run_after_frontend_loaded.push(() => { var infinite = new Waypoint.Infinite({ element: $('.infinite-container')[0], onBeforePageLoad: function () { $('.loading').show(); }, onAfterPageLoad: function ($items) { $('.loading').hide(); } }); }); function toggleTweets(paper_id){ var element_id = "#tweets-" + paper_id; $( element_id ).toggle("slow"); } function fullHeight(paper_id){ var element_id = "#tweets-" + paper_id; // hide read more element var read_more = $("#see-more-"+paper_id); read_more.fadeOut(); $(element_id).get(0).style.setProperty("max-height", "9999px") } </script> <script> run_after_frontend_loaded.push(() => { $(function() { $.fn.modal.Constructor.prototype._enforceFocus = function() {}; $.widget( "custom.catcomplete", $.ui.autocomplete, { _create: function() { this._super(); this.widget().menu( "option", "items", "> :not(.ui-autocomplete-category)" ); }, /** Overrides the _renderItem method in jquery to allow for search result images and icons **/ _renderItem: function( ul, item ) { /** If we have an image in the seearch item then render it; if no task image available, use default **/ if ( "image" in item ) { if ( item.image ) { var image_url = item.image; } else { var image_url = "https://production-media.paperswithcode.com/" + "tasks/default.gif"; } return $( "<li>" ) .append( $( "<div>" ).text( item.label ).prepend( $( "<img src=" + image_url + ">") ) ) .appendTo( ul ); } else { return $( "<li>" ) .append($("<div>").text(item.label)) .appendTo( ul ); } }, _renderMenu: function( ul, items ) { var that = this, currentCategory = ""; $.each( items, function( index, item ) { var li; if ( item.category != currentCategory ) { ul.append( "<li class='ui-autocomplete-category'>" + item.category + "</li>" ); currentCategory = item.category; } li = that._renderItemData( ul, item ); if (item.meta !== null) { li.attr('data-qmeta', item.meta); } if ( item.category ) { li.attr( "aria-label", item.category + " : " + item.label ); } }); } }); $( "#id_global_search_input" ).catcomplete({ minLength: 2, source: function( request, response ) { var term = request.term; $.get( "/api/search-autocomplete/", {"q": term}, function(data){ let t = data.tasks, lb = data.leaderboards, p = data.papers, d = data.datasets, m = data.methods; let ts = [], lbs = [], ps = [], ds = [], ms = []; let total = 0; let maxItems = 12; for (let i=0; i<5 && total < maxItems; i++) { if (t.length && total < maxItems) { ts.push({ label: t[0].name, image: t[0].image, category: "Tasks", meta: null, }); t.shift(); total ++; } if (lb.length && total < maxItems) { lbs.push({ label: lb[0].name, image: lb[0].image, category: "Leaderboards", meta: lb[0].slug }); lb.shift(); total ++; } if (p.length && total < maxItems) { ps.push({label: p[0].title, category: "Papers", meta: null}); p.shift(); total ++; } if (d.length && total < maxItems) { ds.push({ label: d[0].name, image: d[0].image, category: "Datasets", meta: d[0].slug, }); d.shift(); total ++; } if (m.length && total < maxItems) { ms.push({ label: m[0].name, image: m[0].image, category: "Methods", meta: null }); m.shift(); total ++; } } let searchData = ts.concat(lbs, ps, ds, ms); response(searchData); }); }, select: function(event, ui) { $("#id_global_search_input").val(ui.item.label); if (typeof gtag !== 'undefined') { gtag('event', 'SiteActions', { 'event_category': 'Search', 'event_label': ui.item.category, }); } if (ui.item.meta === null) { $('#q_meta').val(''); $('#q_meta').removeAttr('name'); } else { if(!$('#q_meta').attr("name")) { $('#q_meta').attr('name', 'q_meta'); } $('#q_meta').val(ui.item.meta); } $('#q_type').val(ui.item.category.toLowerCase()); $("#id_global_search_form").submit(); } }); if ($(window).width() < 1200 && $(window).width() > 992 ) { $("#id_global_search_input").attr("placeholder", "Search"); } // Setup csrf token for ajax requests let getCookie = (name) => { var cookieValue = null; if (document.cookie && document.cookie !== '') { var cookies = document.cookie.split(';'); for (var i = 0; i < cookies.length; i++) { var cookie = jQuery.trim(cookies[i]); // Does this cookie string begin with the name we want? if (cookie.substring(0, name.length + 1) === (name + '=')) { cookieValue = decodeURIComponent(cookie.substring(name.length + 1)); break; } } } return cookieValue; }; let csrftoken = getCookie('csrftoken'); // Make sure we use the most up-to-date CSRF token $("input[name='csrfmiddlewaretoken']").val(csrftoken); function csrfSafeMethod(method) { // these HTTP methods do not require CSRF protection return (/^(GET|HEAD|OPTIONS|TRACE)$/.test(method)); } $.ajaxSetup({ beforeSend: function(xhr, settings) { if (!csrfSafeMethod(settings.type) && !this.crossDomain) { xhr.setRequestHeader("X-CSRFToken", csrftoken); } } }); }); }); </script> <script>!function(e){function t(t){for(var n,a,s=t[0],u=t[1],f=t[2],i=0,d=[];i<s.length;i++)a=s[i],Object.prototype.hasOwnProperty.call(o,a)&&o[a]&&d.push(o[a][0]),o[a]=0;for(n in u)Object.prototype.hasOwnProperty.call(u,n)&&(e[n]=u[n]);for(l&&l(t);d.length;)d.shift()();return c.push.apply(c,f||[]),r()}function r(){for(var e,t=0;t<c.length;t++){for(var r=c[t],n=!0,a=1;a<r.length;a++){var u=r[a];0!==o[u]&&(n=!1)}n&&(c.splice(t--,1),e=s(s.s=r[0]))}return e}var n={},a={11:0},o={11:0},c=[];function s(t){if(n[t])return n[t].exports;var r=n[t]={i:t,l:!1,exports:{}};return e[t].call(r.exports,r,r.exports,s),r.l=!0,r.exports}s.e=function(e){var t=[];a[e]?t.push(a[e]):0!==a[e]&&{2:1,3:1,5:1,6:1,8:1,9:1,10:1}[e]&&t.push(a[e]=new Promise((function(t,r){for(var n="static/css/"+({4:"chart",5:"conference-page",6:"example-page",8:"newsletters-create-page",9:"newsletters-edit-page",10:"newsletters-list-page",12:"table"}[e]||e)+"."+{0:"31d6cfe0",1:"31d6cfe0",2:"5745a9fd",3:"05600cd7",4:"31d6cfe0",5:"67565070",6:"8444f163",8:"f8a273b3",9:"f8a273b3",10:"db3e0a85",12:"31d6cfe0",14:"31d6cfe0",15:"31d6cfe0"}[e]+".chunk.css",o=s.p+n,c=document.getElementsByTagName("link"),u=0;u<c.length;u++){var f=(l=c[u]).getAttribute("data-href")||l.getAttribute("href");if("stylesheet"===l.rel&&(f===n||f===o))return t()}var i=document.getElementsByTagName("style");for(u=0;u<i.length;u++){var l;if((f=(l=i[u]).getAttribute("data-href"))===n||f===o)return t()}var d=document.createElement("link");d.rel="stylesheet",d.type="text/css",d.onload=t,d.onerror=function(t){var n=t&&t.target&&t.target.src||o,c=new Error("Loading CSS chunk "+e+" failed.\n("+n+")");c.code="CSS_CHUNK_LOAD_FAILED",c.request=n,delete a[e],d.parentNode.removeChild(d),r(c)},d.href=o,document.getElementsByTagName("head")[0].appendChild(d)})).then((function(){a[e]=0})));var r=o[e];if(0!==r)if(r)t.push(r[2]);else{var n=new Promise((function(t,n){r=o[e]=[t,n]}));t.push(r[2]=n);var c,u=document.createElement("script");u.charset="utf-8",u.timeout=120,s.nc&&u.setAttribute("nonce",s.nc),u.src=function(e){return s.p+"static/js/"+({4:"chart",5:"conference-page",6:"example-page",8:"newsletters-create-page",9:"newsletters-edit-page",10:"newsletters-list-page",12:"table"}[e]||e)+"."+{0:"041a0327",1:"eb8f85bf",2:"57df0e43",3:"dd682e9c",4:"934a42ca",5:"ddc33be8",6:"f5234ef0",8:"c76f72bd",9:"aa24afbf",10:"a749f71a",12:"c5756280",14:"be7b1031",15:"b8393014"}[e]+".chunk.js"}(e);var f=new Error;c=function(t){u.onerror=u.onload=null,clearTimeout(i);var r=o[e];if(0!==r){if(r){var n=t&&("load"===t.type?"missing":t.type),a=t&&t.target&&t.target.src;f.message="Loading chunk "+e+" failed.\n("+n+": "+a+")",f.name="ChunkLoadError",f.type=n,f.request=a,r[1](f)}o[e]=void 0}};var i=setTimeout((function(){c({type:"timeout",target:u})}),12e4);u.onerror=u.onload=c,document.head.appendChild(u)}return Promise.all(t)},s.m=e,s.c=n,s.d=function(e,t,r){s.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},s.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},s.t=function(e,t){if(1&t&&(e=s(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(s.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var n in e)s.d(r,n,function(t){return e[t]}.bind(null,n));return r},s.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return s.d(t,"a",t),t},s.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},s.p="https://production-assets.paperswithcode.com/",s.oe=function(e){throw console.error(e),e};var u=this.webpackJsonpfrontend=this.webpackJsonpfrontend||[],f=u.push.bind(u);u.push=t,u=u.slice();for(var i=0;i<u.length;i++)t(u[i]);var l=f;r()}([])</script><script src="https://production-assets.paperswithcode.com/static/js/13.aa3fa037.chunk.js"></script><script src="https://production-assets.paperswithcode.com/static/js/main.99ee382b.chunk.js"></script> </body> </html>