CINXE.COM
Alexey Bochkovskiy - Academia.edu
<!DOCTYPE html> <html lang="en" xmlns:fb="http://www.facebook.com/2008/fbml" class="wf-loading"> <head prefix="og: https://ogp.me/ns# fb: https://ogp.me/ns/fb# academia: https://ogp.me/ns/fb/academia#"> <meta charset="utf-8"> <meta name=viewport content="width=device-width, initial-scale=1"> <meta rel="search" type="application/opensearchdescription+xml" href="/open_search.xml" title="Academia.edu"> <title>Alexey Bochkovskiy - Academia.edu</title> <!-- _ _ _ | | (_) | | __ _ ___ __ _ __| | ___ _ __ ___ _ __ _ ___ __| |_ _ / _` |/ __/ _` |/ _` |/ _ \ '_ ` _ \| |/ _` | / _ \/ _` | | | | | (_| | (_| (_| | (_| | __/ | | | | | | (_| || __/ (_| | |_| | \__,_|\___\__,_|\__,_|\___|_| |_| |_|_|\__,_(_)___|\__,_|\__,_| We're hiring! See https://www.academia.edu/hiring --> <link href="//a.academia-assets.com/images/favicons/favicon-production.ico" rel="shortcut icon" type="image/vnd.microsoft.icon"> <link rel="apple-touch-icon" sizes="57x57" href="//a.academia-assets.com/images/favicons/apple-touch-icon-57x57.png"> <link rel="apple-touch-icon" sizes="60x60" href="//a.academia-assets.com/images/favicons/apple-touch-icon-60x60.png"> <link rel="apple-touch-icon" sizes="72x72" href="//a.academia-assets.com/images/favicons/apple-touch-icon-72x72.png"> <link rel="apple-touch-icon" sizes="76x76" href="//a.academia-assets.com/images/favicons/apple-touch-icon-76x76.png"> <link rel="apple-touch-icon" sizes="114x114" href="//a.academia-assets.com/images/favicons/apple-touch-icon-114x114.png"> <link rel="apple-touch-icon" sizes="120x120" href="//a.academia-assets.com/images/favicons/apple-touch-icon-120x120.png"> <link rel="apple-touch-icon" sizes="144x144" href="//a.academia-assets.com/images/favicons/apple-touch-icon-144x144.png"> <link rel="apple-touch-icon" sizes="152x152" href="//a.academia-assets.com/images/favicons/apple-touch-icon-152x152.png"> <link rel="apple-touch-icon" sizes="180x180" href="//a.academia-assets.com/images/favicons/apple-touch-icon-180x180.png"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-32x32.png" sizes="32x32"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-194x194.png" sizes="194x194"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-96x96.png" sizes="96x96"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/android-chrome-192x192.png" sizes="192x192"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-16x16.png" sizes="16x16"> <link rel="manifest" href="//a.academia-assets.com/images/favicons/manifest.json"> <meta name="msapplication-TileColor" content="#2b5797"> <meta name="msapplication-TileImage" content="//a.academia-assets.com/images/favicons/mstile-144x144.png"> <meta name="theme-color" content="#ffffff"> <script> window.performance && window.performance.measure && window.performance.measure("Time To First Byte", "requestStart", "responseStart"); </script> <script> (function() { if (!window.URLSearchParams || !window.history || !window.history.replaceState) { return; } var searchParams = new URLSearchParams(window.location.search); var paramsToDelete = [ 'fs', 'sm', 'swp', 'iid', 'nbs', 'rcc', // related content category 'rcpos', // related content carousel position 'rcpg', // related carousel page 'rchid', // related content hit id 'f_ri', // research interest id, for SEO tracking 'f_fri', // featured research interest, for SEO tracking (param key without value) 'f_rid', // from research interest directory for SEO tracking 'f_loswp', // from research interest pills on LOSWP sidebar for SEO tracking 'rhid', // referrring hit id ]; if (paramsToDelete.every((key) => searchParams.get(key) === null)) { return; } paramsToDelete.forEach((key) => { searchParams.delete(key); }); var cleanUrl = new URL(window.location.href); cleanUrl.search = searchParams.toString(); history.replaceState({}, document.title, cleanUrl); })(); </script> <script async src="https://www.googletagmanager.com/gtag/js?id=G-5VKX33P2DS"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-5VKX33P2DS', { cookie_domain: 'academia.edu', send_page_view: false, }); gtag('event', 'page_view', { 'controller': "profiles/works", 'action': "summary", 'controller_action': 'profiles/works#summary', 'logged_in': 'false', 'edge': 'unknown', // Send nil if there is no A/B test bucket, in case some records get logged // with missing data - that way we can distinguish between the two cases. // ab_test_bucket should be of the form <ab_test_name>:<bucket> 'ab_test_bucket': null, }) </script> <script type="text/javascript"> window.sendUserTiming = function(timingName) { if (!(window.performance && window.performance.measure)) return; var entries = window.performance.getEntriesByName(timingName, "measure"); if (entries.length !== 1) return; var timingValue = Math.round(entries[0].duration); gtag('event', 'timing_complete', { name: timingName, value: timingValue, event_category: 'User-centric', }); }; window.sendUserTiming("Time To First Byte"); </script> <meta name="csrf-param" content="authenticity_token" /> <meta name="csrf-token" content="sd3EdqNxxdiu5bmaH7HKLX6JkRmm/sfLxrF3WBtN6/lBlrO+I5buhayJBShqDiSf0sKz42xHfLz3WRZo1DntNA==" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/wow-77f7b87cb1583fc59aa8f94756ebfe913345937eb932042b4077563bebb5fb4b.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/social/home-1c712297ae3ac71207193b1bae0ecf1aae125886850f62c9c0139dd867630797.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/heading-b2b823dd904da60a48fd1bfa1defd840610c2ff414d3f39ed3af46277ab8df3b.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/button-3cea6e0ad4715ed965c49bfb15dedfc632787b32ff6d8c3a474182b231146ab7.css" /><link crossorigin="" href="https://fonts.gstatic.com/" rel="preconnect" /><link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,opsz,wght@0,9..40,100..1000;1,9..40,100..1000&family=Gupter:wght@400;500;700&family=IBM+Plex+Mono:wght@300;400&family=Material+Symbols+Outlined:opsz,wght,FILL,GRAD@20,400,0,0&display=swap" rel="stylesheet" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/common-10fa40af19d25203774df2d4a03b9b5771b45109c2304968038e88a81d1215c5.css" /> <meta name="author" content="alexey bochkovskiy" /> <meta name="description" content="Alexey Bochkovskiy: 6 Followers, 6 Following, 4 Research papers. Research interests: Rate Control, Artificial Intelligence, and Data Mining." /> <meta name="google-site-verification" content="bKJMBZA7E43xhDOopFZkssMMkBRjvYERV-NaN4R6mrs" /> <script> var $controller_name = 'works'; var $action_name = "summary"; var $rails_env = 'production'; var $app_rev = '49879c2402910372f4abc62630a427bbe033d190'; var $domain = 'academia.edu'; var $app_host = "academia.edu"; var $asset_host = "academia-assets.com"; var $start_time = new Date().getTime(); var $recaptcha_key = "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB"; var $recaptcha_invisible_key = "6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj"; var $disableClientRecordHit = false; </script> <script> window.Aedu = { hit_data: null }; window.Aedu.SiteStats = {"premium_universities_count":15276,"monthly_visitors":"112 million","monthly_visitor_count":112794806,"monthly_visitor_count_in_millions":112,"user_count":277182362,"paper_count":55203019,"paper_count_in_millions":55,"page_count":432000000,"page_count_in_millions":432,"pdf_count":16500000,"pdf_count_in_millions":16}; window.Aedu.serverRenderTime = new Date(1732462385000); window.Aedu.timeDifference = new Date().getTime() - 1732462385000; window.Aedu.isUsingCssV1 = false; window.Aedu.enableLocalization = true; window.Aedu.activateFullstory = false; window.Aedu.serviceAvailability = { status: {"attention_db":"on","bibliography_db":"on","contacts_db":"on","email_db":"on","indexability_db":"on","mentions_db":"on","news_db":"on","notifications_db":"on","offsite_mentions_db":"on","redshift":"on","redshift_exports_db":"on","related_works_db":"on","ring_db":"on","user_tests_db":"on"}, serviceEnabled: function(service) { return this.status[service] === "on"; }, readEnabled: function(service) { return this.serviceEnabled(service) || this.status[service] === "read_only"; }, }; window.Aedu.viewApmTrace = function() { // Check if x-apm-trace-id meta tag is set, and open the trace in APM // in a new window if it is. var apmTraceId = document.head.querySelector('meta[name="x-apm-trace-id"]'); if (apmTraceId) { var traceId = apmTraceId.content; // Use trace ID to construct URL, an example URL looks like: // https://app.datadoghq.com/apm/traces?query=trace_id%31298410148923562634 var apmUrl = 'https://app.datadoghq.com/apm/traces?query=trace_id%3A' + traceId; window.open(apmUrl, '_blank'); } }; </script> <!--[if lt IE 9]> <script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.2/html5shiv.min.js"></script> <![endif]--> <link href="https://fonts.googleapis.com/css?family=Roboto:100,100i,300,300i,400,400i,500,500i,700,700i,900,900i" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css" rel="stylesheet"> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/libraries-a9675dcb01ec4ef6aa807ba772c7a5a00c1820d3ff661c1038a20f80d06bb4e4.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/academia-296162c7af6fd81dcdd76f1a94f1fad04fb5f647401337d136fe8b68742170b1.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system_legacy-056a9113b9a0f5343d013b29ee1929d5a18be35fdcdceb616600b4db8bd20054.css" /> <script src="//a.academia-assets.com/assets/webpack_bundles/runtime-bundle-005434038af4252ca37c527588411a3d6a0eabb5f727fac83f8bbe7fd88d93bb.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/webpack_libraries_and_infrequently_changed.wjs-bundle-8d53a22151f33ab413d88fa1c02f979c3f8706d470fc1bced09852c72a9f3454.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/core_webpack.wjs-bundle-f8fe82512740391f81c9e8cc48220144024b425b359b08194e316f4de070b9e8.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/sentry.wjs-bundle-5fe03fddca915c8ba0f7edbe64c194308e8ce5abaed7bffe1255ff37549c4808.js"></script> <script> jade = window.jade || {}; jade.helpers = window.$h; jade._ = window._; </script> <!-- Google Tag Manager --> <script id="tag-manager-head-root">(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0], j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src= 'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f); })(window,document,'script','dataLayer_old','GTM-5G9JF7Z');</script> <!-- End Google Tag Manager --> <script> window.gptadslots = []; window.googletag = window.googletag || {}; window.googletag.cmd = window.googletag.cmd || []; </script> <script type="text/javascript"> // TODO(jacob): This should be defined, may be rare load order problem. // Checking if null is just a quick fix, will default to en if unset. // Better fix is to run this immedietely after I18n is set. if (window.I18n != null) { I18n.defaultLocale = "en"; I18n.locale = "en"; I18n.fallbacks = true; } </script> <link rel="canonical" href="https://independent.academia.edu/alexeyab84" /> </head> <!--[if gte IE 9 ]> <body class='ie ie9 c-profiles/works a-summary logged_out'> <![endif]--> <!--[if !(IE) ]><!--> <body class='c-profiles/works a-summary logged_out'> <!--<![endif]--> <div id="fb-root"></div><script>window.fbAsyncInit = function() { FB.init({ appId: "2369844204", version: "v8.0", status: true, cookie: true, xfbml: true }); // Additional initialization code. if (window.InitFacebook) { // facebook.ts already loaded, set it up. window.InitFacebook(); } else { // Set a flag for facebook.ts to find when it loads. window.academiaAuthReadyFacebook = true; } };</script><script>window.fbAsyncLoad = function() { // Protection against double calling of this function if (window.FB) { return; } (function(d, s, id){ var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) {return;} js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_US/sdk.js"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk')); } if (!window.defer_facebook) { // Autoload if not deferred window.fbAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.fbAsyncLoad(); }, 5000); }</script> <div id="google-root"></div><script>window.loadGoogle = function() { if (window.InitGoogle) { // google.ts already loaded, set it up. window.InitGoogle("331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"); } else { // Set a flag for google.ts to use when it loads. window.GoogleClientID = "331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"; } };</script><script>window.googleAsyncLoad = function() { // Protection against double calling of this function (function(d) { var js; var id = 'google-jssdk'; var ref = d.getElementsByTagName('script')[0]; if (d.getElementById(id)) { return; } js = d.createElement('script'); js.id = id; js.async = true; js.onload = loadGoogle; js.src = "https://accounts.google.com/gsi/client" ref.parentNode.insertBefore(js, ref); }(document)); } if (!window.defer_google) { // Autoload if not deferred window.googleAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.googleAsyncLoad(); }, 5000); }</script> <div id="tag-manager-body-root"> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-5G9JF7Z" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <!-- Event listeners for analytics --> <script> window.addEventListener('load', function() { if (document.querySelector('input[name="commit"]')) { document.querySelector('input[name="commit"]').addEventListener('click', function() { gtag('event', 'click', { event_category: 'button', event_label: 'Log In' }) }) } }); </script> </div> <script>var _comscore = _comscore || []; _comscore.push({ c1: "2", c2: "26766707" }); (function() { var s = document.createElement("script"), el = document.getElementsByTagName("script")[0]; s.async = true; s.src = (document.location.protocol == "https:" ? "https://sb" : "http://b") + ".scorecardresearch.com/beacon.js"; el.parentNode.insertBefore(s, el); })();</script><img src="https://sb.scorecardresearch.com/p?c1=2&c2=26766707&cv=2.0&cj=1" style="position: absolute; visibility: hidden" /> <div id='react-modal'></div> <div class='DesignSystem'> <a class='u-showOnFocus' href='#site'> Skip to main content </a> </div> <div id="upgrade_ie_banner" style="display: none;"><p>Academia.edu no longer supports Internet Explorer.</p><p>To browse Academia.edu and the wider internet faster and more securely, please take a few seconds to <a href="https://www.academia.edu/upgrade-browser">upgrade your browser</a>.</p></div><script>// Show this banner for all versions of IE if (!!window.MSInputMethodContext || /(MSIE)/.test(navigator.userAgent)) { document.getElementById('upgrade_ie_banner').style.display = 'block'; }</script> <div class="DesignSystem bootstrap ShrinkableNav"><div class="navbar navbar-default main-header"><div class="container-wrapper" id="main-header-container"><div class="container"><div class="navbar-header"><div class="nav-left-wrapper u-mt0x"><div class="nav-logo"><a data-main-header-link-target="logo_home" href="https://www.academia.edu/"><img class="visible-xs-inline-block" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015-A.svg" width="24" height="24" /><img width="145.2" height="18" class="hidden-xs" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015.svg" /></a></div><div class="nav-search"><div class="SiteSearch-wrapper select2-no-default-pills"><form class="js-SiteSearch-form DesignSystem" action="https://www.academia.edu/search" accept-charset="UTF-8" method="get"><input name="utf8" type="hidden" value="✓" autocomplete="off" /><i class="SiteSearch-icon fa fa-search u-fw700 u-positionAbsolute u-tcGrayDark"></i><input class="js-SiteSearch-form-input SiteSearch-form-input form-control" data-main-header-click-target="search_input" name="q" placeholder="Search" type="text" value="" /></form></div></div></div><div class="nav-right-wrapper pull-right"><ul class="NavLinks js-main-nav list-unstyled"><li class="NavLinks-link"><a class="js-header-login-url Button Button--inverseGray Button--sm u-mb4x" id="nav_log_in" rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="NavLinks-link u-p0x"><a class="Button Button--inverseGray Button--sm u-mb4x" rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li></ul><button class="hidden-lg hidden-md hidden-sm u-ml4x navbar-toggle collapsed" data-target=".js-mobile-header-links" data-toggle="collapse" type="button"><span class="icon-bar"></span><span class="icon-bar"></span><span class="icon-bar"></span></button></div></div><div class="collapse navbar-collapse js-mobile-header-links"><ul class="nav navbar-nav"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li><li class="u-borderColorGrayLight u-borderBottom1 js-mobile-nav-expand-trigger"><a href="#">more <span class="caret"></span></a></li><li><ul class="js-mobile-nav-expand-section nav navbar-nav u-m0x collapse"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/about">About</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/press">Press</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://medium.com/@academia">Blog</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/documents">Papers</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/hiring"><i class="fa fa-briefcase"></i> We're Hiring!</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://support.academia.edu/"><i class="fa fa-question-circle"></i> Help Center</a></li><li class="js-mobile-nav-collapse-trigger u-borderColorGrayLight u-borderBottom1 dropup" style="display:none"><a href="#">less <span class="caret"></span></a></li></ul></li></ul></div></div></div><script>(function(){ var $moreLink = $(".js-mobile-nav-expand-trigger"); var $lessLink = $(".js-mobile-nav-collapse-trigger"); var $section = $('.js-mobile-nav-expand-section'); $moreLink.click(function(ev){ ev.preventDefault(); $moreLink.hide(); $lessLink.show(); $section.collapse('show'); }); $lessLink.click(function(ev){ ev.preventDefault(); $moreLink.show(); $lessLink.hide(); $section.collapse('hide'); }); })() if ($a.is_logged_in() || false) { new Aedu.NavigationController({ el: '.js-main-nav', showHighlightedNotification: false }); } else { $(".js-header-login-url").attr("href", $a.loginUrlWithRedirect()); } Aedu.autocompleteSearch = new AutocompleteSearch({el: '.js-SiteSearch-form'});</script></div></div> <div id='site' class='fixed'> <div id="content" class="clearfix"> <script>document.addEventListener('DOMContentLoaded', function(){ var $dismissible = $(".dismissible_banner"); $dismissible.click(function(ev) { $dismissible.hide(); }); });</script> <script src="//a.academia-assets.com/assets/webpack_bundles/profile.wjs-bundle-9601d1cc3d68aa07c0a9901d03d3611aec04cc07d2a2039718ebef4ad4d148ca.js" defer="defer"></script><script>Aedu.rankings = { showPaperRankingsLink: false } $viewedUser = Aedu.User.set_viewed( {"id":8971336,"first_name":"Alexey","middle_initials":"","last_name":"Bochkovskiy","page_name":"alexeyab84","domain_name":"independent","created_at":"2014-02-08T19:34:11.371-08:00","display_name":"Alexey Bochkovskiy","url":"https://independent.academia.edu/alexeyab84","photo":"https://gravatar.com/avatar/61ebf62562ce4432667b8be2f713f707?s=65","has_photo":true,"is_analytics_public":false,"interests":[{"id":228357,"name":"Rate Control","url":"https://www.academia.edu/Documents/in/Rate_Control"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":2009,"name":"Data Mining","url":"https://www.academia.edu/Documents/in/Data_Mining"},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing"},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":2151,"name":"Virtual Reality (Computer Graphics)","url":"https://www.academia.edu/Documents/in/Virtual_Reality_Computer_Graphics_"},{"id":10924,"name":"Optimization techniques","url":"https://www.academia.edu/Documents/in/Optimization_techniques"},{"id":34050,"name":"Compute Unified Device Architecture NVIDIA CUDA","url":"https://www.academia.edu/Documents/in/Compute_Unified_Device_Architecture_NVIDIA_CUDA"},{"id":21081,"name":"GPGPU (General Purpose GPU) Programming","url":"https://www.academia.edu/Documents/in/GPGPU_General_Purpose_GPU_Programming"}]} ); if ($a.is_logged_in() && $viewedUser.is_current_user()) { $('body').addClass('profile-viewed-by-owner'); } $socialProfiles = []</script><div id="js-react-on-rails-context" style="display:none" data-rails-context="{"inMailer":false,"i18nLocale":"en","i18nDefaultLocale":"en","href":"https://independent.academia.edu/alexeyab84","location":"/alexeyab84","scheme":"https","host":"independent.academia.edu","port":null,"pathname":"/alexeyab84","search":null,"httpAcceptLanguage":null,"serverSide":false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="ProfileCheckPaperUpdate" data-props="{}" data-trace="false" data-dom-id="ProfileCheckPaperUpdate-react-component-ce3b0794-0356-4ff5-89ec-6be9f552adc7"></div> <div id="ProfileCheckPaperUpdate-react-component-ce3b0794-0356-4ff5-89ec-6be9f552adc7"></div> <div class="DesignSystem"><div class="onsite-ping" id="onsite-ping"></div></div><div class="profile-user-info DesignSystem"><div class="social-profile-container"><div class="left-panel-container"><div class="user-info-component-wrapper"><div class="user-summary-cta-container"><div class="user-summary-container"><div class="social-profile-avatar-container"><img class="profile-avatar u-positionAbsolute" alt="Alexey Bochkovskiy" border="0" onerror="if (this.src != '//a.academia-assets.com/images/s200_no_pic.png') this.src = '//a.academia-assets.com/images/s200_no_pic.png';" src="https://gravatar.com/avatar/61ebf62562ce4432667b8be2f713f707?s=200" /></div><div class="title-container"><h1 class="ds2-5-heading-sans-serif-sm">Alexey Bochkovskiy</h1><div class="affiliations-container fake-truncate js-profile-affiliations"></div></div></div><div class="sidebar-cta-container"><button class="ds2-5-button hidden profile-cta-button grow js-profile-follow-button" data-broccoli-component="user-info.follow-button" data-click-track="profile-user-info-follow-button" data-follow-user-fname="Alexey" data-follow-user-id="8971336" data-follow-user-source="profile_button" data-has-google="false"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">add</span>Follow</button><button class="ds2-5-button hidden profile-cta-button grow js-profile-unfollow-button" data-broccoli-component="user-info.unfollow-button" data-click-track="profile-user-info-unfollow-button" data-unfollow-user-id="8971336"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">done</span>Following</button></div></div><div class="user-stats-container"><a><div class="stat-container js-profile-followers"><p class="label">Followers</p><p class="data">6</p></div></a><a><div class="stat-container js-profile-followees" data-broccoli-component="user-info.followees-count" data-click-track="profile-expand-user-info-following"><p class="label">Following</p><p class="data">6</p></div></a><span><div class="stat-container"><p class="label"><span class="js-profile-total-view-text">Public Views</span></p><p class="data"><span class="js-profile-view-count"></span></p></div></span></div><div class="user-bio-container"><div class="profile-bio fake-truncate js-profile-about" style="margin: 0px;">Research Engineer at Intel ISL<br /><b>Address: </b>Russian Federation<br /><div class="js-profile-less-about u-linkUnstyled u-tcGrayDarker u-textDecorationUnderline u-displayNone">less</div></div></div><div class="ri-section"><div class="ri-section-header"><span>Interests</span><a class="ri-more-link js-profile-ri-list-card" data-click-track="profile-user-info-primary-research-interest" data-has-card-for-ri-list="8971336">View All (10)</a></div><div class="ri-tags-container"><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="8971336" href="https://www.academia.edu/Documents/in/Rate_Control"><div id="js-react-on-rails-context" style="display:none" data-rails-context="{"inMailer":false,"i18nLocale":"en","i18nDefaultLocale":"en","href":"https://independent.academia.edu/alexeyab84","location":"/alexeyab84","scheme":"https","host":"independent.academia.edu","port":null,"pathname":"/alexeyab84","search":null,"httpAcceptLanguage":null,"serverSide":false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{"color":"gray","children":["Rate Control"]}" data-trace="false" data-dom-id="Pill-react-component-e15b2dc4-6645-473e-b038-d0fdb3961b87"></div> <div id="Pill-react-component-e15b2dc4-6645-473e-b038-d0fdb3961b87"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="8971336" href="https://www.academia.edu/Documents/in/Artificial_Intelligence"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{"color":"gray","children":["Artificial Intelligence"]}" data-trace="false" data-dom-id="Pill-react-component-c55643d7-395b-4a48-b4f8-be717f7bc66f"></div> <div id="Pill-react-component-c55643d7-395b-4a48-b4f8-be717f7bc66f"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="8971336" href="https://www.academia.edu/Documents/in/Data_Mining"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{"color":"gray","children":["Data Mining"]}" data-trace="false" data-dom-id="Pill-react-component-6f7b3668-861d-41d6-8c98-25d3519d1807"></div> <div id="Pill-react-component-6f7b3668-861d-41d6-8c98-25d3519d1807"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="8971336" href="https://www.academia.edu/Documents/in/Image_Processing"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{"color":"gray","children":["Image Processing"]}" data-trace="false" data-dom-id="Pill-react-component-0f86851a-7b15-4783-92b2-f18a5651faaf"></div> <div id="Pill-react-component-0f86851a-7b15-4783-92b2-f18a5651faaf"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="8971336" href="https://www.academia.edu/Documents/in/Machine_Learning"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{"color":"gray","children":["Machine Learning"]}" data-trace="false" data-dom-id="Pill-react-component-4cbe77a7-c8e6-4549-9d2a-41bda2cbed16"></div> <div id="Pill-react-component-4cbe77a7-c8e6-4549-9d2a-41bda2cbed16"></div> </a></div></div></div></div><div class="right-panel-container"><div class="user-content-wrapper"><div class="uploads-container" id="social-redesign-work-container"><div class="upload-header"><h2 class="ds2-5-heading-sans-serif-xs">Uploads</h2></div><div class="documents-container backbone-social-profile-documents" style="width: 100%;"><div class="u-taCenter"></div><div class="profile--tab_content_container js-tab-pane tab-pane active" id="all"><div class="profile--tab_heading_container js-section-heading" data-section="Papers" id="Papers"><h3 class="profile--tab_heading_container">Papers by Alexey Bochkovskiy</h3></div><div class="js-work-strip profile--work_container" data-work-id="72590818"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/72590818/Non_deep_Networks"><img alt="Research paper thumbnail of Non-deep Networks" class="work-thumbnail" src="https://attachments.academia-assets.com/81459795/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/72590818/Non_deep_Networks">Non-deep Networks</a></div><div class="wp-workCard_item"><span>ArXiv</span><span>, 2021</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Depth is the hallmark of deep neural networks. But more depth means more sequential computation a...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Depth is the hallmark of deep neural networks. But more depth means more sequential computation and higher latency. This begs the question – is it possible to build high-performing “non-deep” neural networks? We show that it is. To do so, we use parallel subnetworks instead of stacking one layer after another. This helps effectively reduce depth while maintaining high performance. By utilizing parallel substructures, we show, for the first time, that a network with a depth of just 12 can achieve top-1 accuracy over 80% on ImageNet, 96% on CIFAR10, and 81% on CIFAR100. We also show that a network with a low-depth (12) backbone can achieve an AP of 48% on MS-COCO. We analyze the scaling rules for our design and show how to increase performance without changing the network’s depth. Finally, we provide a proof of concept for how non-deep networks could be used to build low-latency recognition systems. Code is available at <a href="https://github.com/imankgoyal/NonDeepNetworks" rel="nofollow">https://github.com/imankgoyal/NonDeepNetworks</a>.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="9030f3fb60fe04ce1a475c92080ecd0c" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":81459795,"asset_id":72590818,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/81459795/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="72590818"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="72590818"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 72590818; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=72590818]").text(description); $(".js-view-count[data-work-id=72590818]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 72590818; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='72590818']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 72590818, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "9030f3fb60fe04ce1a475c92080ecd0c" } } $('.js-work-strip[data-work-id=72590818]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":72590818,"title":"Non-deep Networks","translated_title":"","metadata":{"abstract":"Depth is the hallmark of deep neural networks. But more depth means more sequential computation and higher latency. This begs the question – is it possible to build high-performing “non-deep” neural networks? We show that it is. To do so, we use parallel subnetworks instead of stacking one layer after another. This helps effectively reduce depth while maintaining high performance. By utilizing parallel substructures, we show, for the first time, that a network with a depth of just 12 can achieve top-1 accuracy over 80% on ImageNet, 96% on CIFAR10, and 81% on CIFAR100. We also show that a network with a low-depth (12) backbone can achieve an AP of 48% on MS-COCO. We analyze the scaling rules for our design and show how to increase performance without changing the network’s depth. Finally, we provide a proof of concept for how non-deep networks could be used to build low-latency recognition systems. Code is available at https://github.com/imankgoyal/NonDeepNetworks.","publisher":"ArXiv","publication_date":{"day":null,"month":null,"year":2021,"errors":{}},"publication_name":"ArXiv"},"translated_abstract":"Depth is the hallmark of deep neural networks. But more depth means more sequential computation and higher latency. This begs the question – is it possible to build high-performing “non-deep” neural networks? We show that it is. To do so, we use parallel subnetworks instead of stacking one layer after another. This helps effectively reduce depth while maintaining high performance. By utilizing parallel substructures, we show, for the first time, that a network with a depth of just 12 can achieve top-1 accuracy over 80% on ImageNet, 96% on CIFAR10, and 81% on CIFAR100. We also show that a network with a low-depth (12) backbone can achieve an AP of 48% on MS-COCO. We analyze the scaling rules for our design and show how to increase performance without changing the network’s depth. Finally, we provide a proof of concept for how non-deep networks could be used to build low-latency recognition systems. Code is available at https://github.com/imankgoyal/NonDeepNetworks.","internal_url":"https://www.academia.edu/72590818/Non_deep_Networks","translated_internal_url":"","created_at":"2022-02-28T04:48:59.133-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":8971336,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":81459795,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459795/thumbnails/1.jpg","file_name":"2110.07641v1.pdf","download_url":"https://www.academia.edu/attachments/81459795/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Non_deep_Networks.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459795/2110.07641v1-libre.pdf?1646053092=\u0026response-content-disposition=attachment%3B+filename%3DNon_deep_Networks.pdf\u0026Expires=1732465985\u0026Signature=e2i1g~It4qQwS6HaJiVskZEhvK94r3RANaK5yXc4fF8LUtpXpk1ZOO42u9f1g~aw2WHEZHiCLXux4pffpz7wluCKvRimnbWu~KMr~dM3NpuHaLe5AKy0teQmwp1GS-4XQ~kG-vz-gaLDkt5HTTjtSKFU2WPoBM-sSEwvAQl8wZCQXzg0CrgZSQgQwedScqz8JHGk6horIW2CvHcUD~mT1pHLEnrizJscZEgfp8JcPj7L~RU~9r3jCRxceMa0O-8NRQ5-yS2Goe1UbvHt7eT4P0qXD7hcnkt0vEXVuB-Z5laN4~rdFSeARWrSwvG88RZ-r8X5i0-pFAvMxjfcZG4MaQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Non_deep_Networks","translated_slug":"","page_count":12,"language":"en","content_type":"Work","owner":{"id":8971336,"first_name":"Alexey","middle_initials":"","last_name":"Bochkovskiy","page_name":"alexeyab84","domain_name":"independent","created_at":"2014-02-08T19:34:11.371-08:00","display_name":"Alexey Bochkovskiy","url":"https://independent.academia.edu/alexeyab84"},"attachments":[{"id":81459795,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459795/thumbnails/1.jpg","file_name":"2110.07641v1.pdf","download_url":"https://www.academia.edu/attachments/81459795/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Non_deep_Networks.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459795/2110.07641v1-libre.pdf?1646053092=\u0026response-content-disposition=attachment%3B+filename%3DNon_deep_Networks.pdf\u0026Expires=1732465985\u0026Signature=e2i1g~It4qQwS6HaJiVskZEhvK94r3RANaK5yXc4fF8LUtpXpk1ZOO42u9f1g~aw2WHEZHiCLXux4pffpz7wluCKvRimnbWu~KMr~dM3NpuHaLe5AKy0teQmwp1GS-4XQ~kG-vz-gaLDkt5HTTjtSKFU2WPoBM-sSEwvAQl8wZCQXzg0CrgZSQgQwedScqz8JHGk6horIW2CvHcUD~mT1pHLEnrizJscZEgfp8JcPj7L~RU~9r3jCRxceMa0O-8NRQ5-yS2Goe1UbvHt7eT4P0qXD7hcnkt0vEXVuB-Z5laN4~rdFSeARWrSwvG88RZ-r8X5i0-pFAvMxjfcZG4MaQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":81459793,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459793/thumbnails/1.jpg","file_name":"2110.07641v1.pdf","download_url":"https://www.academia.edu/attachments/81459793/download_file","bulk_download_file_name":"Non_deep_Networks.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459793/2110.07641v1-libre.pdf?1646053094=\u0026response-content-disposition=attachment%3B+filename%3DNon_deep_Networks.pdf\u0026Expires=1732465985\u0026Signature=fkh-m4OHUL4JdV6VnwVdRL6dbQqHv~zD8LkmTv7VgGNnryAp1eL9aW1NVt9PDd13mtJAls-LElOhkuPbaU8h-ZXWZppQxEgWfWEO1jwDRlGd8AlscWNMkmmDTqsN0TVddn2UBaQ0P11fwrbS5alICApBymWlAJ1rkXrFAtuDzHpvKktT5C53pJIcJAVPxYLO2o1lniUK6k1O42AW23Z8z4LVQLf1V3plIuEBDdJqVKCfP1aG2NGE02f636KvI0ED-v3wzsLmWl9tjrzRQLaxCRelQSnLI9dGFwzeu3n7CC2OnbkhTU5DSoLMMKD2jmoBN6~ZxTcxzRb0WnZTrt7vcQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":3193313,"name":"arXiv","url":"https://www.academia.edu/Documents/in/arXiv"}],"urls":[{"id":18076270,"url":"https://arxiv.org/pdf/2110.07641v1.pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="72590817"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/72590817/Vision_Transformers_for_Dense_Prediction"><img alt="Research paper thumbnail of Vision Transformers for Dense Prediction" class="work-thumbnail" src="https://attachments.academia-assets.com/81459792/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/72590817/Vision_Transformers_for_Dense_Prediction">Vision Transformers for Dense Prediction</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">We introduce dense prediction transformers, an architecture that leverages vision transformers in...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">We introduce dense prediction transformers, an architecture that leverages vision transformers in place of convolutional networks as a backbone for dense prediction tasks. We assemble tokens from various stages of the vision transformer into image-like representations at various resolutions and progressively combine them into fullresolution predictions using a convolutional decoder. The transformer backbone processes representations at a constant and relatively high resolution and has a global receptive field at every stage. These properties allow the dense prediction transformer to provide finer-grained and more globally coherent predictions when compared to fullyconvolutional networks. Our experiments show that this architecture yields substantial improvements on dense prediction tasks, especially when a large amount of training data is available. For monocular depth estimation, we observe an improvement of up to 28% in relative performance when compared to a state-of-the-art full...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="bc795400c77313bdff1c626924dfc2e8" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":81459792,"asset_id":72590817,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/81459792/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="72590817"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="72590817"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 72590817; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=72590817]").text(description); $(".js-view-count[data-work-id=72590817]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 72590817; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='72590817']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 72590817, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "bc795400c77313bdff1c626924dfc2e8" } } $('.js-work-strip[data-work-id=72590817]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":72590817,"title":"Vision Transformers for Dense Prediction","translated_title":"","metadata":{"abstract":"We introduce dense prediction transformers, an architecture that leverages vision transformers in place of convolutional networks as a backbone for dense prediction tasks. We assemble tokens from various stages of the vision transformer into image-like representations at various resolutions and progressively combine them into fullresolution predictions using a convolutional decoder. The transformer backbone processes representations at a constant and relatively high resolution and has a global receptive field at every stage. These properties allow the dense prediction transformer to provide finer-grained and more globally coherent predictions when compared to fullyconvolutional networks. Our experiments show that this architecture yields substantial improvements on dense prediction tasks, especially when a large amount of training data is available. For monocular depth estimation, we observe an improvement of up to 28% in relative performance when compared to a state-of-the-art full...","publisher":"ArXiv","publication_date":{"day":null,"month":null,"year":2021,"errors":{}}},"translated_abstract":"We introduce dense prediction transformers, an architecture that leverages vision transformers in place of convolutional networks as a backbone for dense prediction tasks. We assemble tokens from various stages of the vision transformer into image-like representations at various resolutions and progressively combine them into fullresolution predictions using a convolutional decoder. The transformer backbone processes representations at a constant and relatively high resolution and has a global receptive field at every stage. These properties allow the dense prediction transformer to provide finer-grained and more globally coherent predictions when compared to fullyconvolutional networks. Our experiments show that this architecture yields substantial improvements on dense prediction tasks, especially when a large amount of training data is available. For monocular depth estimation, we observe an improvement of up to 28% in relative performance when compared to a state-of-the-art full...","internal_url":"https://www.academia.edu/72590817/Vision_Transformers_for_Dense_Prediction","translated_internal_url":"","created_at":"2022-02-28T04:48:58.976-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":8971336,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":81459792,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459792/thumbnails/1.jpg","file_name":"DPT.pdf","download_url":"https://www.academia.edu/attachments/81459792/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Vision_Transformers_for_Dense_Prediction.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459792/DPT-libre.pdf?1646053096=\u0026response-content-disposition=attachment%3B+filename%3DVision_Transformers_for_Dense_Prediction.pdf\u0026Expires=1732465985\u0026Signature=fVHHrXobwx3Q2HX~yg8lh-quu5oS7xvGKY9Pqg7zT9HS9pz5GJl9PefPyGvFzMV0BQaCVPlsTCquG7~LrUEhMdGYnsBAFBhajvh73QCgCdwvaEbQ1iZ7C2N3usyf9MyUNSWkUDZv5A-4S0wWcMXW46mw3V~IFNlEPh8o5ZMMP0Lhe9cXkZ9hkF06-3Kvmo0b72Mc-oCJ5l-pQrXk7N4QaoP7uLn2ob-1gU-scJ6TWXYID~fq7OGUWN8RcpH8c72Tw8TfqqmwxVW~9dmkhkvqQix1-xXbebTBIPS4lFvo-Gp3DKdSa7pxydhsSPinPfCVXuNpdRd7tOzOV2I4mzS6iw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Vision_Transformers_for_Dense_Prediction","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":8971336,"first_name":"Alexey","middle_initials":"","last_name":"Bochkovskiy","page_name":"alexeyab84","domain_name":"independent","created_at":"2014-02-08T19:34:11.371-08:00","display_name":"Alexey Bochkovskiy","url":"https://independent.academia.edu/alexeyab84"},"attachments":[{"id":81459792,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459792/thumbnails/1.jpg","file_name":"DPT.pdf","download_url":"https://www.academia.edu/attachments/81459792/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Vision_Transformers_for_Dense_Prediction.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459792/DPT-libre.pdf?1646053096=\u0026response-content-disposition=attachment%3B+filename%3DVision_Transformers_for_Dense_Prediction.pdf\u0026Expires=1732465985\u0026Signature=fVHHrXobwx3Q2HX~yg8lh-quu5oS7xvGKY9Pqg7zT9HS9pz5GJl9PefPyGvFzMV0BQaCVPlsTCquG7~LrUEhMdGYnsBAFBhajvh73QCgCdwvaEbQ1iZ7C2N3usyf9MyUNSWkUDZv5A-4S0wWcMXW46mw3V~IFNlEPh8o5ZMMP0Lhe9cXkZ9hkF06-3Kvmo0b72Mc-oCJ5l-pQrXk7N4QaoP7uLn2ob-1gU-scJ6TWXYID~fq7OGUWN8RcpH8c72Tw8TfqqmwxVW~9dmkhkvqQix1-xXbebTBIPS4lFvo-Gp3DKdSa7pxydhsSPinPfCVXuNpdRd7tOzOV2I4mzS6iw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":81459794,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459794/thumbnails/1.jpg","file_name":"DPT.pdf","download_url":"https://www.academia.edu/attachments/81459794/download_file","bulk_download_file_name":"Vision_Transformers_for_Dense_Prediction.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459794/DPT-libre.pdf?1646053094=\u0026response-content-disposition=attachment%3B+filename%3DVision_Transformers_for_Dense_Prediction.pdf\u0026Expires=1732465985\u0026Signature=PtgYd7t8SckN3Bba6k4azcPd~3BZvYSlvA13Vh1jMBsnL801xP5Wz4bF~HQHGVjkxM0Bk-xNezp4I6WBdLNVfgVJ0M8gEw2AMFMykrs097E~vDUJ9MLvFvDNbc4ue9Tm9Fn-w0HLqSUkg8~2u0NUnWCGVk4YS-JP3z7ZYODW4img0FTzYzaFfFU1b1PUUBI-cMK-t6wLNWgRRSvqRXKCb0QNmVrm6qdEOCvPrabW1u2plTIIXdiAhi2QvSyJ7XmbakcVKdca12UBYVTOW7laM4PYUl6KeO1qIYSlXEjk~-tGpF6-CKqqyD5JfRNeAMLMixDZjqad30cGf5nU547kRg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[],"urls":[{"id":18076269,"url":"http://vladlen.info/papers/DPT.pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="72590795"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/72590795/YOLOv4_Optimal_Speed_and_Accuracy_of_Object_Detection"><img alt="Research paper thumbnail of YOLOv4: Optimal Speed and Accuracy of Object Detection" class="work-thumbnail" src="https://attachments.academia-assets.com/81459848/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/72590795/YOLOv4_Optimal_Speed_and_Accuracy_of_Object_Detection">YOLOv4: Optimal Speed and Accuracy of Object Detection</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">There are a huge number of features which are said to improve Convolutional Neural Network (CNN) ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">There are a huge number of features which are said to improve Convolutional Neural Network (CNN) accuracy. Practical testing of combinations of such features on large datasets, and theoretical justification of the result, is required. Some features operate on certain models exclusively and for certain problems exclusively, or only for small-scale datasets; while some features, such as batch-normalization and residual-connections, are applicable to the majority of models, tasks, and datasets. We assume that such universal features include Weighted-Residual-Connections (WRC), Cross-Stage-Partial-connections (CSP), Cross mini-Batch Normalization (CmBN), Self-adversarial-training (SAT) and Mish-activation. We use new features: WRC, CSP, CmBN, SAT, Mish activation, Mosaic data augmentation, CmBN, DropBlock regularization, and CIoU loss, and combine some of them to achieve state-of-the-art results: 43.5% AP (65.7% AP50) for the MS COCO dataset at a realtime speed of ~65 FPS on Tesla V100....</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="5d00682b4d8120117c3420c350e0d91d" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":81459848,"asset_id":72590795,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/81459848/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="72590795"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="72590795"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 72590795; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=72590795]").text(description); $(".js-view-count[data-work-id=72590795]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 72590795; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='72590795']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 72590795, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "5d00682b4d8120117c3420c350e0d91d" } } $('.js-work-strip[data-work-id=72590795]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":72590795,"title":"YOLOv4: Optimal Speed and Accuracy of Object Detection","translated_title":"","metadata":{"abstract":"There are a huge number of features which are said to improve Convolutional Neural Network (CNN) accuracy. Practical testing of combinations of such features on large datasets, and theoretical justification of the result, is required. Some features operate on certain models exclusively and for certain problems exclusively, or only for small-scale datasets; while some features, such as batch-normalization and residual-connections, are applicable to the majority of models, tasks, and datasets. We assume that such universal features include Weighted-Residual-Connections (WRC), Cross-Stage-Partial-connections (CSP), Cross mini-Batch Normalization (CmBN), Self-adversarial-training (SAT) and Mish-activation. We use new features: WRC, CSP, CmBN, SAT, Mish activation, Mosaic data augmentation, CmBN, DropBlock regularization, and CIoU loss, and combine some of them to achieve state-of-the-art results: 43.5% AP (65.7% AP50) for the MS COCO dataset at a realtime speed of ~65 FPS on Tesla V100....","publisher":"ArXiv","publication_date":{"day":null,"month":null,"year":2020,"errors":{}}},"translated_abstract":"There are a huge number of features which are said to improve Convolutional Neural Network (CNN) accuracy. Practical testing of combinations of such features on large datasets, and theoretical justification of the result, is required. Some features operate on certain models exclusively and for certain problems exclusively, or only for small-scale datasets; while some features, such as batch-normalization and residual-connections, are applicable to the majority of models, tasks, and datasets. We assume that such universal features include Weighted-Residual-Connections (WRC), Cross-Stage-Partial-connections (CSP), Cross mini-Batch Normalization (CmBN), Self-adversarial-training (SAT) and Mish-activation. We use new features: WRC, CSP, CmBN, SAT, Mish activation, Mosaic data augmentation, CmBN, DropBlock regularization, and CIoU loss, and combine some of them to achieve state-of-the-art results: 43.5% AP (65.7% AP50) for the MS COCO dataset at a realtime speed of ~65 FPS on Tesla V100....","internal_url":"https://www.academia.edu/72590795/YOLOv4_Optimal_Speed_and_Accuracy_of_Object_Detection","translated_internal_url":"","created_at":"2022-02-28T04:48:45.236-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":8971336,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":81459848,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459848/thumbnails/1.jpg","file_name":"2004.10934v1.pdf","download_url":"https://www.academia.edu/attachments/81459848/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"YOLOv4_Optimal_Speed_and_Accuracy_of_Obj.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459848/2004.10934v1-libre.pdf?1646053088=\u0026response-content-disposition=attachment%3B+filename%3DYOLOv4_Optimal_Speed_and_Accuracy_of_Obj.pdf\u0026Expires=1732465985\u0026Signature=fh79hBxy0oo-xy0LMcnZSajCjVLDrZuzDsysJZJKnnc~4fUcCevZIRNXNKdW1U5j8rgJUdFAvOrXl6EthTfNMmrjiikdQGHOPeiRCloUSrx2Nfaukj~lmXbnttxs8GkRgi8JgoYhTKoG88narw20ehgtbTfQJXjzPqQfqssmhrHO4uRgxobwyNOLZyldfEXeGvF99xJsaX53CEisakyx3AqGD0L6-yTn2xwQe3T90Uoi6YDRVEzba7K71IzyEImFekEglx8fhVKvlYX5iIek7E5d7ztF3ZFotGj7flOYWUB3UhK51y-x8nTi9LBIaoL9FcXqePvg8yyYijAZY~i-NQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"YOLOv4_Optimal_Speed_and_Accuracy_of_Object_Detection","translated_slug":"","page_count":17,"language":"en","content_type":"Work","owner":{"id":8971336,"first_name":"Alexey","middle_initials":"","last_name":"Bochkovskiy","page_name":"alexeyab84","domain_name":"independent","created_at":"2014-02-08T19:34:11.371-08:00","display_name":"Alexey Bochkovskiy","url":"https://independent.academia.edu/alexeyab84"},"attachments":[{"id":81459848,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459848/thumbnails/1.jpg","file_name":"2004.10934v1.pdf","download_url":"https://www.academia.edu/attachments/81459848/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"YOLOv4_Optimal_Speed_and_Accuracy_of_Obj.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459848/2004.10934v1-libre.pdf?1646053088=\u0026response-content-disposition=attachment%3B+filename%3DYOLOv4_Optimal_Speed_and_Accuracy_of_Obj.pdf\u0026Expires=1732465985\u0026Signature=fh79hBxy0oo-xy0LMcnZSajCjVLDrZuzDsysJZJKnnc~4fUcCevZIRNXNKdW1U5j8rgJUdFAvOrXl6EthTfNMmrjiikdQGHOPeiRCloUSrx2Nfaukj~lmXbnttxs8GkRgi8JgoYhTKoG88narw20ehgtbTfQJXjzPqQfqssmhrHO4uRgxobwyNOLZyldfEXeGvF99xJsaX53CEisakyx3AqGD0L6-yTn2xwQe3T90Uoi6YDRVEzba7K71IzyEImFekEglx8fhVKvlYX5iIek7E5d7ztF3ZFotGj7flOYWUB3UhK51y-x8nTi9LBIaoL9FcXqePvg8yyYijAZY~i-NQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[],"urls":[{"id":18076264,"url":"https://arxiv.org/abs/2004.10934"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="62764217"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/62764217/Scaled_YOLOv4_Scaling_Cross_Stage_Partial_Network"><img alt="Research paper thumbnail of Scaled-YOLOv4: Scaling Cross Stage Partial Network" class="work-thumbnail" src="https://attachments.academia-assets.com/75430154/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/62764217/Scaled_YOLOv4_Scaling_Cross_Stage_Partial_Network">Scaled-YOLOv4: Scaling Cross Stage Partial Network</a></div><div class="wp-workCard_item"><span>2021 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="a1403a26a0367d75361ee92707e63663" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":75430154,"asset_id":62764217,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/75430154/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="62764217"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="62764217"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 62764217; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=62764217]").text(description); $(".js-view-count[data-work-id=62764217]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 62764217; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='62764217']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 62764217, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "a1403a26a0367d75361ee92707e63663" } } $('.js-work-strip[data-work-id=62764217]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":62764217,"title":"Scaled-YOLOv4: Scaling Cross Stage Partial Network","translated_title":"","metadata":{"publisher":"IEEE","publication_name":"2021 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)"},"translated_abstract":null,"internal_url":"https://www.academia.edu/62764217/Scaled_YOLOv4_Scaling_Cross_Stage_Partial_Network","translated_internal_url":"","created_at":"2021-11-30T15:39:18.515-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":8971336,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":75430154,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/75430154/thumbnails/1.jpg","file_name":"2011.08036v1.pdf","download_url":"https://www.academia.edu/attachments/75430154/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Scaled_YOLOv4_Scaling_Cross_Stage_Partia.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/75430154/2011.08036v1-libre.pdf?1638421068=\u0026response-content-disposition=attachment%3B+filename%3DScaled_YOLOv4_Scaling_Cross_Stage_Partia.pdf\u0026Expires=1732465985\u0026Signature=ZyoTxeEBl4qO-6ScfiC361PTTG8NcOCgGf2-MGHx0hoPFA4gLDEj1rJY7QoAQIFgmt7xyiRdPK8wH0KOyPCk2ilALdsqhN-3kntIQ1oz6VqTQ~lbJt~-mHoirqnJ7fj-sJi7d83x87ALjVutVtE4f6YJ2II48fPFnC0ygLOjADphZh0sUhX9uxYalnMkvaJD6Yncg6Az0aaxJrWSPSqVRRfyKqrYvvcN2FEaJH6IjJFCcu9KILy4Em16h-X0HM65OyORXSEcS5jPveI-CR1VdIbpYYof8YpBXxxTkch~YnD17rDPGq1s9on9iKVlfQEUD2yHpjbY9YiTN8cDvfL~0Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Scaled_YOLOv4_Scaling_Cross_Stage_Partial_Network","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":8971336,"first_name":"Alexey","middle_initials":"","last_name":"Bochkovskiy","page_name":"alexeyab84","domain_name":"independent","created_at":"2014-02-08T19:34:11.371-08:00","display_name":"Alexey Bochkovskiy","url":"https://independent.academia.edu/alexeyab84"},"attachments":[{"id":75430154,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/75430154/thumbnails/1.jpg","file_name":"2011.08036v1.pdf","download_url":"https://www.academia.edu/attachments/75430154/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Scaled_YOLOv4_Scaling_Cross_Stage_Partia.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/75430154/2011.08036v1-libre.pdf?1638421068=\u0026response-content-disposition=attachment%3B+filename%3DScaled_YOLOv4_Scaling_Cross_Stage_Partia.pdf\u0026Expires=1732465985\u0026Signature=ZyoTxeEBl4qO-6ScfiC361PTTG8NcOCgGf2-MGHx0hoPFA4gLDEj1rJY7QoAQIFgmt7xyiRdPK8wH0KOyPCk2ilALdsqhN-3kntIQ1oz6VqTQ~lbJt~-mHoirqnJ7fj-sJi7d83x87ALjVutVtE4f6YJ2II48fPFnC0ygLOjADphZh0sUhX9uxYalnMkvaJD6Yncg6Az0aaxJrWSPSqVRRfyKqrYvvcN2FEaJH6IjJFCcu9KILy4Em16h-X0HM65OyORXSEcS5jPveI-CR1VdIbpYYof8YpBXxxTkch~YnD17rDPGq1s9on9iKVlfQEUD2yHpjbY9YiTN8cDvfL~0Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[],"urls":[{"id":14620229,"url":"http://xplorestaging.ieee.org/ielx7/9577055/9577056/09577489.pdf?arnumber=9577489"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="13046971" id="papers"><div class="js-work-strip profile--work_container" data-work-id="72590818"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/72590818/Non_deep_Networks"><img alt="Research paper thumbnail of Non-deep Networks" class="work-thumbnail" src="https://attachments.academia-assets.com/81459795/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/72590818/Non_deep_Networks">Non-deep Networks</a></div><div class="wp-workCard_item"><span>ArXiv</span><span>, 2021</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Depth is the hallmark of deep neural networks. But more depth means more sequential computation a...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Depth is the hallmark of deep neural networks. But more depth means more sequential computation and higher latency. This begs the question – is it possible to build high-performing “non-deep” neural networks? We show that it is. To do so, we use parallel subnetworks instead of stacking one layer after another. This helps effectively reduce depth while maintaining high performance. By utilizing parallel substructures, we show, for the first time, that a network with a depth of just 12 can achieve top-1 accuracy over 80% on ImageNet, 96% on CIFAR10, and 81% on CIFAR100. We also show that a network with a low-depth (12) backbone can achieve an AP of 48% on MS-COCO. We analyze the scaling rules for our design and show how to increase performance without changing the network’s depth. Finally, we provide a proof of concept for how non-deep networks could be used to build low-latency recognition systems. Code is available at <a href="https://github.com/imankgoyal/NonDeepNetworks" rel="nofollow">https://github.com/imankgoyal/NonDeepNetworks</a>.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="9030f3fb60fe04ce1a475c92080ecd0c" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":81459795,"asset_id":72590818,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/81459795/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="72590818"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="72590818"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 72590818; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=72590818]").text(description); $(".js-view-count[data-work-id=72590818]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 72590818; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='72590818']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 72590818, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "9030f3fb60fe04ce1a475c92080ecd0c" } } $('.js-work-strip[data-work-id=72590818]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":72590818,"title":"Non-deep Networks","translated_title":"","metadata":{"abstract":"Depth is the hallmark of deep neural networks. But more depth means more sequential computation and higher latency. This begs the question – is it possible to build high-performing “non-deep” neural networks? We show that it is. To do so, we use parallel subnetworks instead of stacking one layer after another. This helps effectively reduce depth while maintaining high performance. By utilizing parallel substructures, we show, for the first time, that a network with a depth of just 12 can achieve top-1 accuracy over 80% on ImageNet, 96% on CIFAR10, and 81% on CIFAR100. We also show that a network with a low-depth (12) backbone can achieve an AP of 48% on MS-COCO. We analyze the scaling rules for our design and show how to increase performance without changing the network’s depth. Finally, we provide a proof of concept for how non-deep networks could be used to build low-latency recognition systems. Code is available at https://github.com/imankgoyal/NonDeepNetworks.","publisher":"ArXiv","publication_date":{"day":null,"month":null,"year":2021,"errors":{}},"publication_name":"ArXiv"},"translated_abstract":"Depth is the hallmark of deep neural networks. But more depth means more sequential computation and higher latency. This begs the question – is it possible to build high-performing “non-deep” neural networks? We show that it is. To do so, we use parallel subnetworks instead of stacking one layer after another. This helps effectively reduce depth while maintaining high performance. By utilizing parallel substructures, we show, for the first time, that a network with a depth of just 12 can achieve top-1 accuracy over 80% on ImageNet, 96% on CIFAR10, and 81% on CIFAR100. We also show that a network with a low-depth (12) backbone can achieve an AP of 48% on MS-COCO. We analyze the scaling rules for our design and show how to increase performance without changing the network’s depth. Finally, we provide a proof of concept for how non-deep networks could be used to build low-latency recognition systems. Code is available at https://github.com/imankgoyal/NonDeepNetworks.","internal_url":"https://www.academia.edu/72590818/Non_deep_Networks","translated_internal_url":"","created_at":"2022-02-28T04:48:59.133-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":8971336,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":81459795,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459795/thumbnails/1.jpg","file_name":"2110.07641v1.pdf","download_url":"https://www.academia.edu/attachments/81459795/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Non_deep_Networks.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459795/2110.07641v1-libre.pdf?1646053092=\u0026response-content-disposition=attachment%3B+filename%3DNon_deep_Networks.pdf\u0026Expires=1732465985\u0026Signature=e2i1g~It4qQwS6HaJiVskZEhvK94r3RANaK5yXc4fF8LUtpXpk1ZOO42u9f1g~aw2WHEZHiCLXux4pffpz7wluCKvRimnbWu~KMr~dM3NpuHaLe5AKy0teQmwp1GS-4XQ~kG-vz-gaLDkt5HTTjtSKFU2WPoBM-sSEwvAQl8wZCQXzg0CrgZSQgQwedScqz8JHGk6horIW2CvHcUD~mT1pHLEnrizJscZEgfp8JcPj7L~RU~9r3jCRxceMa0O-8NRQ5-yS2Goe1UbvHt7eT4P0qXD7hcnkt0vEXVuB-Z5laN4~rdFSeARWrSwvG88RZ-r8X5i0-pFAvMxjfcZG4MaQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Non_deep_Networks","translated_slug":"","page_count":12,"language":"en","content_type":"Work","owner":{"id":8971336,"first_name":"Alexey","middle_initials":"","last_name":"Bochkovskiy","page_name":"alexeyab84","domain_name":"independent","created_at":"2014-02-08T19:34:11.371-08:00","display_name":"Alexey Bochkovskiy","url":"https://independent.academia.edu/alexeyab84"},"attachments":[{"id":81459795,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459795/thumbnails/1.jpg","file_name":"2110.07641v1.pdf","download_url":"https://www.academia.edu/attachments/81459795/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Non_deep_Networks.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459795/2110.07641v1-libre.pdf?1646053092=\u0026response-content-disposition=attachment%3B+filename%3DNon_deep_Networks.pdf\u0026Expires=1732465985\u0026Signature=e2i1g~It4qQwS6HaJiVskZEhvK94r3RANaK5yXc4fF8LUtpXpk1ZOO42u9f1g~aw2WHEZHiCLXux4pffpz7wluCKvRimnbWu~KMr~dM3NpuHaLe5AKy0teQmwp1GS-4XQ~kG-vz-gaLDkt5HTTjtSKFU2WPoBM-sSEwvAQl8wZCQXzg0CrgZSQgQwedScqz8JHGk6horIW2CvHcUD~mT1pHLEnrizJscZEgfp8JcPj7L~RU~9r3jCRxceMa0O-8NRQ5-yS2Goe1UbvHt7eT4P0qXD7hcnkt0vEXVuB-Z5laN4~rdFSeARWrSwvG88RZ-r8X5i0-pFAvMxjfcZG4MaQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":81459793,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459793/thumbnails/1.jpg","file_name":"2110.07641v1.pdf","download_url":"https://www.academia.edu/attachments/81459793/download_file","bulk_download_file_name":"Non_deep_Networks.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459793/2110.07641v1-libre.pdf?1646053094=\u0026response-content-disposition=attachment%3B+filename%3DNon_deep_Networks.pdf\u0026Expires=1732465985\u0026Signature=fkh-m4OHUL4JdV6VnwVdRL6dbQqHv~zD8LkmTv7VgGNnryAp1eL9aW1NVt9PDd13mtJAls-LElOhkuPbaU8h-ZXWZppQxEgWfWEO1jwDRlGd8AlscWNMkmmDTqsN0TVddn2UBaQ0P11fwrbS5alICApBymWlAJ1rkXrFAtuDzHpvKktT5C53pJIcJAVPxYLO2o1lniUK6k1O42AW23Z8z4LVQLf1V3plIuEBDdJqVKCfP1aG2NGE02f636KvI0ED-v3wzsLmWl9tjrzRQLaxCRelQSnLI9dGFwzeu3n7CC2OnbkhTU5DSoLMMKD2jmoBN6~ZxTcxzRb0WnZTrt7vcQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":3193313,"name":"arXiv","url":"https://www.academia.edu/Documents/in/arXiv"}],"urls":[{"id":18076270,"url":"https://arxiv.org/pdf/2110.07641v1.pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="72590817"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/72590817/Vision_Transformers_for_Dense_Prediction"><img alt="Research paper thumbnail of Vision Transformers for Dense Prediction" class="work-thumbnail" src="https://attachments.academia-assets.com/81459792/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/72590817/Vision_Transformers_for_Dense_Prediction">Vision Transformers for Dense Prediction</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">We introduce dense prediction transformers, an architecture that leverages vision transformers in...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">We introduce dense prediction transformers, an architecture that leverages vision transformers in place of convolutional networks as a backbone for dense prediction tasks. We assemble tokens from various stages of the vision transformer into image-like representations at various resolutions and progressively combine them into fullresolution predictions using a convolutional decoder. The transformer backbone processes representations at a constant and relatively high resolution and has a global receptive field at every stage. These properties allow the dense prediction transformer to provide finer-grained and more globally coherent predictions when compared to fullyconvolutional networks. Our experiments show that this architecture yields substantial improvements on dense prediction tasks, especially when a large amount of training data is available. For monocular depth estimation, we observe an improvement of up to 28% in relative performance when compared to a state-of-the-art full...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="bc795400c77313bdff1c626924dfc2e8" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":81459792,"asset_id":72590817,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/81459792/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="72590817"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="72590817"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 72590817; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=72590817]").text(description); $(".js-view-count[data-work-id=72590817]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 72590817; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='72590817']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 72590817, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "bc795400c77313bdff1c626924dfc2e8" } } $('.js-work-strip[data-work-id=72590817]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":72590817,"title":"Vision Transformers for Dense Prediction","translated_title":"","metadata":{"abstract":"We introduce dense prediction transformers, an architecture that leverages vision transformers in place of convolutional networks as a backbone for dense prediction tasks. We assemble tokens from various stages of the vision transformer into image-like representations at various resolutions and progressively combine them into fullresolution predictions using a convolutional decoder. The transformer backbone processes representations at a constant and relatively high resolution and has a global receptive field at every stage. These properties allow the dense prediction transformer to provide finer-grained and more globally coherent predictions when compared to fullyconvolutional networks. Our experiments show that this architecture yields substantial improvements on dense prediction tasks, especially when a large amount of training data is available. For monocular depth estimation, we observe an improvement of up to 28% in relative performance when compared to a state-of-the-art full...","publisher":"ArXiv","publication_date":{"day":null,"month":null,"year":2021,"errors":{}}},"translated_abstract":"We introduce dense prediction transformers, an architecture that leverages vision transformers in place of convolutional networks as a backbone for dense prediction tasks. We assemble tokens from various stages of the vision transformer into image-like representations at various resolutions and progressively combine them into fullresolution predictions using a convolutional decoder. The transformer backbone processes representations at a constant and relatively high resolution and has a global receptive field at every stage. These properties allow the dense prediction transformer to provide finer-grained and more globally coherent predictions when compared to fullyconvolutional networks. Our experiments show that this architecture yields substantial improvements on dense prediction tasks, especially when a large amount of training data is available. For monocular depth estimation, we observe an improvement of up to 28% in relative performance when compared to a state-of-the-art full...","internal_url":"https://www.academia.edu/72590817/Vision_Transformers_for_Dense_Prediction","translated_internal_url":"","created_at":"2022-02-28T04:48:58.976-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":8971336,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":81459792,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459792/thumbnails/1.jpg","file_name":"DPT.pdf","download_url":"https://www.academia.edu/attachments/81459792/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Vision_Transformers_for_Dense_Prediction.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459792/DPT-libre.pdf?1646053096=\u0026response-content-disposition=attachment%3B+filename%3DVision_Transformers_for_Dense_Prediction.pdf\u0026Expires=1732465985\u0026Signature=fVHHrXobwx3Q2HX~yg8lh-quu5oS7xvGKY9Pqg7zT9HS9pz5GJl9PefPyGvFzMV0BQaCVPlsTCquG7~LrUEhMdGYnsBAFBhajvh73QCgCdwvaEbQ1iZ7C2N3usyf9MyUNSWkUDZv5A-4S0wWcMXW46mw3V~IFNlEPh8o5ZMMP0Lhe9cXkZ9hkF06-3Kvmo0b72Mc-oCJ5l-pQrXk7N4QaoP7uLn2ob-1gU-scJ6TWXYID~fq7OGUWN8RcpH8c72Tw8TfqqmwxVW~9dmkhkvqQix1-xXbebTBIPS4lFvo-Gp3DKdSa7pxydhsSPinPfCVXuNpdRd7tOzOV2I4mzS6iw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Vision_Transformers_for_Dense_Prediction","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":8971336,"first_name":"Alexey","middle_initials":"","last_name":"Bochkovskiy","page_name":"alexeyab84","domain_name":"independent","created_at":"2014-02-08T19:34:11.371-08:00","display_name":"Alexey Bochkovskiy","url":"https://independent.academia.edu/alexeyab84"},"attachments":[{"id":81459792,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459792/thumbnails/1.jpg","file_name":"DPT.pdf","download_url":"https://www.academia.edu/attachments/81459792/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Vision_Transformers_for_Dense_Prediction.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459792/DPT-libre.pdf?1646053096=\u0026response-content-disposition=attachment%3B+filename%3DVision_Transformers_for_Dense_Prediction.pdf\u0026Expires=1732465985\u0026Signature=fVHHrXobwx3Q2HX~yg8lh-quu5oS7xvGKY9Pqg7zT9HS9pz5GJl9PefPyGvFzMV0BQaCVPlsTCquG7~LrUEhMdGYnsBAFBhajvh73QCgCdwvaEbQ1iZ7C2N3usyf9MyUNSWkUDZv5A-4S0wWcMXW46mw3V~IFNlEPh8o5ZMMP0Lhe9cXkZ9hkF06-3Kvmo0b72Mc-oCJ5l-pQrXk7N4QaoP7uLn2ob-1gU-scJ6TWXYID~fq7OGUWN8RcpH8c72Tw8TfqqmwxVW~9dmkhkvqQix1-xXbebTBIPS4lFvo-Gp3DKdSa7pxydhsSPinPfCVXuNpdRd7tOzOV2I4mzS6iw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":81459794,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459794/thumbnails/1.jpg","file_name":"DPT.pdf","download_url":"https://www.academia.edu/attachments/81459794/download_file","bulk_download_file_name":"Vision_Transformers_for_Dense_Prediction.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459794/DPT-libre.pdf?1646053094=\u0026response-content-disposition=attachment%3B+filename%3DVision_Transformers_for_Dense_Prediction.pdf\u0026Expires=1732465985\u0026Signature=PtgYd7t8SckN3Bba6k4azcPd~3BZvYSlvA13Vh1jMBsnL801xP5Wz4bF~HQHGVjkxM0Bk-xNezp4I6WBdLNVfgVJ0M8gEw2AMFMykrs097E~vDUJ9MLvFvDNbc4ue9Tm9Fn-w0HLqSUkg8~2u0NUnWCGVk4YS-JP3z7ZYODW4img0FTzYzaFfFU1b1PUUBI-cMK-t6wLNWgRRSvqRXKCb0QNmVrm6qdEOCvPrabW1u2plTIIXdiAhi2QvSyJ7XmbakcVKdca12UBYVTOW7laM4PYUl6KeO1qIYSlXEjk~-tGpF6-CKqqyD5JfRNeAMLMixDZjqad30cGf5nU547kRg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[],"urls":[{"id":18076269,"url":"http://vladlen.info/papers/DPT.pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="72590795"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/72590795/YOLOv4_Optimal_Speed_and_Accuracy_of_Object_Detection"><img alt="Research paper thumbnail of YOLOv4: Optimal Speed and Accuracy of Object Detection" class="work-thumbnail" src="https://attachments.academia-assets.com/81459848/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/72590795/YOLOv4_Optimal_Speed_and_Accuracy_of_Object_Detection">YOLOv4: Optimal Speed and Accuracy of Object Detection</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">There are a huge number of features which are said to improve Convolutional Neural Network (CNN) ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">There are a huge number of features which are said to improve Convolutional Neural Network (CNN) accuracy. Practical testing of combinations of such features on large datasets, and theoretical justification of the result, is required. Some features operate on certain models exclusively and for certain problems exclusively, or only for small-scale datasets; while some features, such as batch-normalization and residual-connections, are applicable to the majority of models, tasks, and datasets. We assume that such universal features include Weighted-Residual-Connections (WRC), Cross-Stage-Partial-connections (CSP), Cross mini-Batch Normalization (CmBN), Self-adversarial-training (SAT) and Mish-activation. We use new features: WRC, CSP, CmBN, SAT, Mish activation, Mosaic data augmentation, CmBN, DropBlock regularization, and CIoU loss, and combine some of them to achieve state-of-the-art results: 43.5% AP (65.7% AP50) for the MS COCO dataset at a realtime speed of ~65 FPS on Tesla V100....</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="5d00682b4d8120117c3420c350e0d91d" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":81459848,"asset_id":72590795,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/81459848/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="72590795"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="72590795"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 72590795; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=72590795]").text(description); $(".js-view-count[data-work-id=72590795]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 72590795; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='72590795']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 72590795, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "5d00682b4d8120117c3420c350e0d91d" } } $('.js-work-strip[data-work-id=72590795]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":72590795,"title":"YOLOv4: Optimal Speed and Accuracy of Object Detection","translated_title":"","metadata":{"abstract":"There are a huge number of features which are said to improve Convolutional Neural Network (CNN) accuracy. Practical testing of combinations of such features on large datasets, and theoretical justification of the result, is required. Some features operate on certain models exclusively and for certain problems exclusively, or only for small-scale datasets; while some features, such as batch-normalization and residual-connections, are applicable to the majority of models, tasks, and datasets. We assume that such universal features include Weighted-Residual-Connections (WRC), Cross-Stage-Partial-connections (CSP), Cross mini-Batch Normalization (CmBN), Self-adversarial-training (SAT) and Mish-activation. We use new features: WRC, CSP, CmBN, SAT, Mish activation, Mosaic data augmentation, CmBN, DropBlock regularization, and CIoU loss, and combine some of them to achieve state-of-the-art results: 43.5% AP (65.7% AP50) for the MS COCO dataset at a realtime speed of ~65 FPS on Tesla V100....","publisher":"ArXiv","publication_date":{"day":null,"month":null,"year":2020,"errors":{}}},"translated_abstract":"There are a huge number of features which are said to improve Convolutional Neural Network (CNN) accuracy. Practical testing of combinations of such features on large datasets, and theoretical justification of the result, is required. Some features operate on certain models exclusively and for certain problems exclusively, or only for small-scale datasets; while some features, such as batch-normalization and residual-connections, are applicable to the majority of models, tasks, and datasets. We assume that such universal features include Weighted-Residual-Connections (WRC), Cross-Stage-Partial-connections (CSP), Cross mini-Batch Normalization (CmBN), Self-adversarial-training (SAT) and Mish-activation. We use new features: WRC, CSP, CmBN, SAT, Mish activation, Mosaic data augmentation, CmBN, DropBlock regularization, and CIoU loss, and combine some of them to achieve state-of-the-art results: 43.5% AP (65.7% AP50) for the MS COCO dataset at a realtime speed of ~65 FPS on Tesla V100....","internal_url":"https://www.academia.edu/72590795/YOLOv4_Optimal_Speed_and_Accuracy_of_Object_Detection","translated_internal_url":"","created_at":"2022-02-28T04:48:45.236-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":8971336,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":81459848,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459848/thumbnails/1.jpg","file_name":"2004.10934v1.pdf","download_url":"https://www.academia.edu/attachments/81459848/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"YOLOv4_Optimal_Speed_and_Accuracy_of_Obj.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459848/2004.10934v1-libre.pdf?1646053088=\u0026response-content-disposition=attachment%3B+filename%3DYOLOv4_Optimal_Speed_and_Accuracy_of_Obj.pdf\u0026Expires=1732465985\u0026Signature=fh79hBxy0oo-xy0LMcnZSajCjVLDrZuzDsysJZJKnnc~4fUcCevZIRNXNKdW1U5j8rgJUdFAvOrXl6EthTfNMmrjiikdQGHOPeiRCloUSrx2Nfaukj~lmXbnttxs8GkRgi8JgoYhTKoG88narw20ehgtbTfQJXjzPqQfqssmhrHO4uRgxobwyNOLZyldfEXeGvF99xJsaX53CEisakyx3AqGD0L6-yTn2xwQe3T90Uoi6YDRVEzba7K71IzyEImFekEglx8fhVKvlYX5iIek7E5d7ztF3ZFotGj7flOYWUB3UhK51y-x8nTi9LBIaoL9FcXqePvg8yyYijAZY~i-NQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"YOLOv4_Optimal_Speed_and_Accuracy_of_Object_Detection","translated_slug":"","page_count":17,"language":"en","content_type":"Work","owner":{"id":8971336,"first_name":"Alexey","middle_initials":"","last_name":"Bochkovskiy","page_name":"alexeyab84","domain_name":"independent","created_at":"2014-02-08T19:34:11.371-08:00","display_name":"Alexey Bochkovskiy","url":"https://independent.academia.edu/alexeyab84"},"attachments":[{"id":81459848,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/81459848/thumbnails/1.jpg","file_name":"2004.10934v1.pdf","download_url":"https://www.academia.edu/attachments/81459848/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"YOLOv4_Optimal_Speed_and_Accuracy_of_Obj.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/81459848/2004.10934v1-libre.pdf?1646053088=\u0026response-content-disposition=attachment%3B+filename%3DYOLOv4_Optimal_Speed_and_Accuracy_of_Obj.pdf\u0026Expires=1732465985\u0026Signature=fh79hBxy0oo-xy0LMcnZSajCjVLDrZuzDsysJZJKnnc~4fUcCevZIRNXNKdW1U5j8rgJUdFAvOrXl6EthTfNMmrjiikdQGHOPeiRCloUSrx2Nfaukj~lmXbnttxs8GkRgi8JgoYhTKoG88narw20ehgtbTfQJXjzPqQfqssmhrHO4uRgxobwyNOLZyldfEXeGvF99xJsaX53CEisakyx3AqGD0L6-yTn2xwQe3T90Uoi6YDRVEzba7K71IzyEImFekEglx8fhVKvlYX5iIek7E5d7ztF3ZFotGj7flOYWUB3UhK51y-x8nTi9LBIaoL9FcXqePvg8yyYijAZY~i-NQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[],"urls":[{"id":18076264,"url":"https://arxiv.org/abs/2004.10934"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="62764217"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/62764217/Scaled_YOLOv4_Scaling_Cross_Stage_Partial_Network"><img alt="Research paper thumbnail of Scaled-YOLOv4: Scaling Cross Stage Partial Network" class="work-thumbnail" src="https://attachments.academia-assets.com/75430154/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/62764217/Scaled_YOLOv4_Scaling_Cross_Stage_Partial_Network">Scaled-YOLOv4: Scaling Cross Stage Partial Network</a></div><div class="wp-workCard_item"><span>2021 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="a1403a26a0367d75361ee92707e63663" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":75430154,"asset_id":62764217,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/75430154/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="62764217"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="62764217"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 62764217; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=62764217]").text(description); $(".js-view-count[data-work-id=62764217]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 62764217; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='62764217']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 62764217, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "a1403a26a0367d75361ee92707e63663" } } $('.js-work-strip[data-work-id=62764217]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":62764217,"title":"Scaled-YOLOv4: Scaling Cross Stage Partial Network","translated_title":"","metadata":{"publisher":"IEEE","publication_name":"2021 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)"},"translated_abstract":null,"internal_url":"https://www.academia.edu/62764217/Scaled_YOLOv4_Scaling_Cross_Stage_Partial_Network","translated_internal_url":"","created_at":"2021-11-30T15:39:18.515-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":8971336,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":75430154,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/75430154/thumbnails/1.jpg","file_name":"2011.08036v1.pdf","download_url":"https://www.academia.edu/attachments/75430154/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Scaled_YOLOv4_Scaling_Cross_Stage_Partia.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/75430154/2011.08036v1-libre.pdf?1638421068=\u0026response-content-disposition=attachment%3B+filename%3DScaled_YOLOv4_Scaling_Cross_Stage_Partia.pdf\u0026Expires=1732465985\u0026Signature=ZyoTxeEBl4qO-6ScfiC361PTTG8NcOCgGf2-MGHx0hoPFA4gLDEj1rJY7QoAQIFgmt7xyiRdPK8wH0KOyPCk2ilALdsqhN-3kntIQ1oz6VqTQ~lbJt~-mHoirqnJ7fj-sJi7d83x87ALjVutVtE4f6YJ2II48fPFnC0ygLOjADphZh0sUhX9uxYalnMkvaJD6Yncg6Az0aaxJrWSPSqVRRfyKqrYvvcN2FEaJH6IjJFCcu9KILy4Em16h-X0HM65OyORXSEcS5jPveI-CR1VdIbpYYof8YpBXxxTkch~YnD17rDPGq1s9on9iKVlfQEUD2yHpjbY9YiTN8cDvfL~0Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Scaled_YOLOv4_Scaling_Cross_Stage_Partial_Network","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":8971336,"first_name":"Alexey","middle_initials":"","last_name":"Bochkovskiy","page_name":"alexeyab84","domain_name":"independent","created_at":"2014-02-08T19:34:11.371-08:00","display_name":"Alexey Bochkovskiy","url":"https://independent.academia.edu/alexeyab84"},"attachments":[{"id":75430154,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/75430154/thumbnails/1.jpg","file_name":"2011.08036v1.pdf","download_url":"https://www.academia.edu/attachments/75430154/download_file?st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&st=MTczMjQ2MjM4NSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Scaled_YOLOv4_Scaling_Cross_Stage_Partia.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/75430154/2011.08036v1-libre.pdf?1638421068=\u0026response-content-disposition=attachment%3B+filename%3DScaled_YOLOv4_Scaling_Cross_Stage_Partia.pdf\u0026Expires=1732465985\u0026Signature=ZyoTxeEBl4qO-6ScfiC361PTTG8NcOCgGf2-MGHx0hoPFA4gLDEj1rJY7QoAQIFgmt7xyiRdPK8wH0KOyPCk2ilALdsqhN-3kntIQ1oz6VqTQ~lbJt~-mHoirqnJ7fj-sJi7d83x87ALjVutVtE4f6YJ2II48fPFnC0ygLOjADphZh0sUhX9uxYalnMkvaJD6Yncg6Az0aaxJrWSPSqVRRfyKqrYvvcN2FEaJH6IjJFCcu9KILy4Em16h-X0HM65OyORXSEcS5jPveI-CR1VdIbpYYof8YpBXxxTkch~YnD17rDPGq1s9on9iKVlfQEUD2yHpjbY9YiTN8cDvfL~0Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[],"urls":[{"id":14620229,"url":"http://xplorestaging.ieee.org/ielx7/9577055/9577056/09577489.pdf?arnumber=9577489"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/google_contacts-0dfb882d836b94dbcb4a2d123d6933fc9533eda5be911641f20b4eb428429600.js"], function() { // from javascript_helper.rb $('.js-google-connect-button').click(function(e) { e.preventDefault(); GoogleContacts.authorize_and_show_contacts(); Aedu.Dismissibles.recordClickthrough("WowProfileImportContactsPrompt"); }); $('.js-update-biography-button').click(function(e) { e.preventDefault(); Aedu.Dismissibles.recordClickthrough("UpdateUserBiographyPrompt"); $.ajax({ url: $r.api_v0_profiles_update_about_path({ subdomain_param: 'api', about: "", }), type: 'PUT', success: function(response) { location.reload(); } }); }); $('.js-work-creator-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_document_path({ source: encodeURIComponent(""), }); }); $('.js-video-upload-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_video_path({ source: encodeURIComponent(""), }); }); $('.js-do-this-later-button').click(function() { $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("WowProfileImportContactsPrompt"); }); $('.js-update-biography-do-this-later-button').click(function(){ $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("UpdateUserBiographyPrompt"); }); $('.wow-profile-mentions-upsell--close').click(function(){ $('.wow-profile-mentions-upsell--panel').hide(); Aedu.Dismissibles.recordDismissal("WowProfileMentionsUpsell"); }); $('.wow-profile-mentions-upsell--button').click(function(){ Aedu.Dismissibles.recordClickthrough("WowProfileMentionsUpsell"); }); new WowProfile.SocialRedesignUserWorks({ initialWorksOffset: 20, allWorksOffset: 20, maxSections: 1 }) }); </script> </div></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile_edit-5ea339ee107c863779f560dd7275595239fed73f1a13d279d2b599a28c0ecd33.js","https://a.academia-assets.com/assets/add_coauthor-22174b608f9cb871d03443cafa7feac496fb50d7df2d66a53f5ee3c04ba67f53.js","https://a.academia-assets.com/assets/tab-dcac0130902f0cc2d8cb403714dd47454f11fc6fb0e99ae6a0827b06613abc20.js","https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js"], function() { // from javascript_helper.rb window.ae = window.ae || {}; window.ae.WowProfile = window.ae.WowProfile || {}; if(Aedu.User.current && Aedu.User.current.id === $viewedUser.id) { window.ae.WowProfile.current_user_edit = {}; new WowProfileEdit.EditUploadView({ el: '.js-edit-upload-button-wrapper', model: window.$current_user, }); new AddCoauthor.AddCoauthorsController(); } var userInfoView = new WowProfile.SocialRedesignUserInfo({ recaptcha_key: "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB" }); WowProfile.router = new WowProfile.Router({ userInfoView: userInfoView }); Backbone.history.start({ pushState: true, root: "/" + $viewedUser.page_name }); new WowProfile.UserWorksNav() }); </script> </div> <div class="bootstrap login"><div class="modal fade login-modal" id="login-modal"><div class="login-modal-dialog modal-dialog"><div class="modal-content"><div class="modal-header"><button class="close close" data-dismiss="modal" type="button"><span aria-hidden="true">×</span><span class="sr-only">Close</span></button><h4 class="modal-title text-center"><strong>Log In</strong></h4></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><button class="btn btn-fb btn-lg btn-block btn-v-center-content" id="login-facebook-oauth-button"><svg style="float: left; width: 19px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="facebook-square" class="svg-inline--fa fa-facebook-square fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M400 32H48A48 48 0 0 0 0 80v352a48 48 0 0 0 48 48h137.25V327.69h-63V256h63v-54.64c0-62.15 37-96.48 93.67-96.48 27.14 0 55.52 4.84 55.52 4.84v61h-31.27c-30.81 0-40.42 19.12-40.42 38.73V256h68.78l-11 71.69h-57.78V480H400a48 48 0 0 0 48-48V80a48 48 0 0 0-48-48z"></path></svg><small><strong>Log in</strong> with <strong>Facebook</strong></small></button><br /><button class="btn btn-google btn-lg btn-block btn-v-center-content" id="login-google-oauth-button"><svg style="float: left; width: 22px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="google-plus" class="svg-inline--fa fa-google-plus fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M256,8C119.1,8,8,119.1,8,256S119.1,504,256,504,504,392.9,504,256,392.9,8,256,8ZM185.3,380a124,124,0,0,1,0-248c31.3,0,60.1,11,83,32.3l-33.6,32.6c-13.2-12.9-31.3-19.1-49.4-19.1-42.9,0-77.2,35.5-77.2,78.1S142.3,334,185.3,334c32.6,0,64.9-19.1,70.1-53.3H185.3V238.1H302.2a109.2,109.2,0,0,1,1.9,20.7c0,70.8-47.5,121.2-118.8,121.2ZM415.5,273.8v35.5H380V273.8H344.5V238.3H380V202.8h35.5v35.5h35.2v35.5Z"></path></svg><small><strong>Log in</strong> with <strong>Google</strong></small></button><br /><style type="text/css">.sign-in-with-apple-button { width: 100%; height: 52px; border-radius: 3px; border: 1px solid black; cursor: pointer; }</style><script src="https://appleid.cdn-apple.com/appleauth/static/jsapi/appleid/1/en_US/appleid.auth.js" type="text/javascript"></script><div class="sign-in-with-apple-button" data-border="false" data-color="white" id="appleid-signin"><span ="Sign Up with Apple" class="u-fs11"></span></div><script>AppleID.auth.init({ clientId: 'edu.academia.applesignon', scope: 'name email', redirectURI: 'https://www.academia.edu/sessions', state: "1ff7649ae1137da0ce2d3cfda74fd52deae063e47cc08cf37845ec9a881700f5", });</script><script>// Hacky way of checking if on fast loswp if (window.loswp == null) { (function() { const Google = window?.Aedu?.Auth?.OauthButton?.Login?.Google; const Facebook = window?.Aedu?.Auth?.OauthButton?.Login?.Facebook; if (Google) { new Google({ el: '#login-google-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } if (Facebook) { new Facebook({ el: '#login-facebook-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } })(); }</script></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><div class="hr-heading login-hr-heading"><span class="hr-heading-text">or</span></div></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><form class="js-login-form" action="https://www.academia.edu/sessions" accept-charset="UTF-8" method="post"><input name="utf8" type="hidden" value="✓" autocomplete="off" /><input type="hidden" name="authenticity_token" value="n5+iZOkGR8wgTA8HPYxVI9z/68+yE9JhHDxyFUkF2LBv1NWsaeFskSIgs7VIM7uRcLTJNXiqaRYt1BMlhnHefQ==" autocomplete="off" /><div class="form-group"><label class="control-label" for="login-modal-email-input" style="font-size: 14px;">Email</label><input class="form-control" id="login-modal-email-input" name="login" type="email" /></div><div class="form-group"><label class="control-label" for="login-modal-password-input" style="font-size: 14px;">Password</label><input class="form-control" id="login-modal-password-input" name="password" type="password" /></div><input type="hidden" name="post_login_redirect_url" id="post_login_redirect_url" value="https://independent.academia.edu/alexeyab84" autocomplete="off" /><div class="checkbox"><label><input type="checkbox" name="remember_me" id="remember_me" value="1" checked="checked" /><small style="font-size: 12px; margin-top: 2px; display: inline-block;">Remember me on this computer</small></label></div><br><input type="submit" name="commit" value="Log In" class="btn btn-primary btn-block btn-lg js-login-submit" data-disable-with="Log In" /></br></form><script>typeof window?.Aedu?.recaptchaManagedForm === 'function' && window.Aedu.recaptchaManagedForm( document.querySelector('.js-login-form'), document.querySelector('.js-login-submit') );</script><small style="font-size: 12px;"><br />or <a data-target="#login-modal-reset-password-container" data-toggle="collapse" href="javascript:void(0)">reset password</a></small><div class="collapse" id="login-modal-reset-password-container"><br /><div class="well margin-0x"><form class="js-password-reset-form" action="https://www.academia.edu/reset_password" accept-charset="UTF-8" method="post"><input name="utf8" type="hidden" value="✓" autocomplete="off" /><input type="hidden" name="authenticity_token" value="WE0o5Qf4KqPuhbP7a9J324y6RjCru8FqaBRIT2bUI3KoBl8thx8B/uzpD0kebZlpIPFkymECeh1Z/Cl/qaAlvw==" autocomplete="off" /><p>Enter the email address you signed up with and we'll email you a reset link.</p><div class="form-group"><input class="form-control" name="email" type="email" /></div><script src="https://recaptcha.net/recaptcha/api.js" async defer></script> <script> var invisibleRecaptchaSubmit = function () { var closestForm = function (ele) { var curEle = ele.parentNode; while (curEle.nodeName !== 'FORM' && curEle.nodeName !== 'BODY'){ curEle = curEle.parentNode; } return curEle.nodeName === 'FORM' ? curEle : null }; var eles = document.getElementsByClassName('g-recaptcha'); if (eles.length > 0) { var form = closestForm(eles[0]); if (form) { form.submit(); } } }; </script> <input type="submit" data-sitekey="6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj" data-callback="invisibleRecaptchaSubmit" class="g-recaptcha btn btn-primary btn-block" value="Email me a link" value=""/> </form></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/collapse-45805421cf446ca5adf7aaa1935b08a3a8d1d9a6cc5d91a62a2a3a00b20b3e6a.js"], function() { // from javascript_helper.rb $("#login-modal-reset-password-container").on("shown.bs.collapse", function() { $(this).find("input[type=email]").focus(); }); }); </script> </div></div></div><div class="modal-footer"><div class="text-center"><small style="font-size: 12px;">Need an account? <a rel="nofollow" href="https://www.academia.edu/signup">Click here to sign up</a></small></div></div></div></div></div></div><script>// If we are on subdomain or non-bootstrapped page, redirect to login page instead of showing modal (function(){ if (typeof $ === 'undefined') return; var host = window.location.hostname; if ((host === $domain || host === "www."+$domain) && (typeof $().modal === 'function')) { $("#nav_log_in").click(function(e) { // Don't follow the link and open the modal e.preventDefault(); $("#login-modal").on('shown.bs.modal', function() { $(this).find("#login-modal-email-input").focus() }).modal('show'); }); } })()</script> <div class="bootstrap" id="footer"><div class="footer-content clearfix text-center padding-top-7x" style="width:100%;"><ul class="footer-links-secondary footer-links-wide list-inline margin-bottom-1x"><li><a href="https://www.academia.edu/about">About</a></li><li><a href="https://www.academia.edu/press">Press</a></li><li><a rel="nofollow" href="https://medium.com/academia">Blog</a></li><li><a href="https://www.academia.edu/documents">Papers</a></li><li><a href="https://www.academia.edu/topics">Topics</a></li><li><a href="https://www.academia.edu/journals">Academia.edu Journals</a></li><li><a rel="nofollow" href="https://www.academia.edu/hiring"><svg style="width: 13px; height: 13px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="briefcase" class="svg-inline--fa fa-briefcase fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M320 336c0 8.84-7.16 16-16 16h-96c-8.84 0-16-7.16-16-16v-48H0v144c0 25.6 22.4 48 48 48h416c25.6 0 48-22.4 48-48V288H320v48zm144-208h-80V80c0-25.6-22.4-48-48-48H176c-25.6 0-48 22.4-48 48v48H48c-25.6 0-48 22.4-48 48v80h512v-80c0-25.6-22.4-48-48-48zm-144 0H192V96h128v32z"></path></svg> <strong>We're Hiring!</strong></a></li><li><a rel="nofollow" href="https://support.academia.edu/"><svg style="width: 12px; height: 12px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="question-circle" class="svg-inline--fa fa-question-circle fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M504 256c0 136.997-111.043 248-248 248S8 392.997 8 256C8 119.083 119.043 8 256 8s248 111.083 248 248zM262.655 90c-54.497 0-89.255 22.957-116.549 63.758-3.536 5.286-2.353 12.415 2.715 16.258l34.699 26.31c5.205 3.947 12.621 3.008 16.665-2.122 17.864-22.658 30.113-35.797 57.303-35.797 20.429 0 45.698 13.148 45.698 32.958 0 14.976-12.363 22.667-32.534 33.976C247.128 238.528 216 254.941 216 296v4c0 6.627 5.373 12 12 12h56c6.627 0 12-5.373 12-12v-1.333c0-28.462 83.186-29.647 83.186-106.667 0-58.002-60.165-102-116.531-102zM256 338c-25.365 0-46 20.635-46 46 0 25.364 20.635 46 46 46s46-20.636 46-46c0-25.365-20.635-46-46-46z"></path></svg> <strong>Help Center</strong></a></li></ul><ul class="footer-links-tertiary list-inline margin-bottom-1x"><li class="small">Find new research papers in:</li><li class="small"><a href="https://www.academia.edu/Documents/in/Physics">Physics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Chemistry">Chemistry</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Biology">Biology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Health_Sciences">Health Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Ecology">Ecology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Earth_Sciences">Earth Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a></li></ul></div></div><div class="DesignSystem" id="credit" style="width:100%;"><ul class="u-pl0x footer-links-legal list-inline"><li><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li>Academia ©2024</li></ul></div><script> //<![CDATA[ window.detect_gmtoffset = true; window.Academia && window.Academia.set_gmtoffset && Academia.set_gmtoffset('/gmtoffset'); //]]> </script> <div id='overlay_background'></div> <div id='bootstrap-modal-container' class='bootstrap'></div> <div id='ds-modal-container' class='bootstrap DesignSystem'></div> <div id='full-screen-modal'></div> </div> </body> </html>