CINXE.COM
GOT-10k Benchmark (Visual Object Tracking) | Papers With Code
<!doctype html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"> <script> const GTAG_ENABLED = true ; const GTAG_TRACKING_ID = "UA-121182717-1"; const SENTRY_DSN_FRONTEND = "".trim(); const GLOBAL_CSRF_TOKEN = 'z6QGX3HsqCxwqd7FageiboQCXB5Ifbx4VKmpJz0SRbpLhWLaHLYbJPBzI3VCDlyK'; const MEDIA_URL = "https://production-media.paperswithcode.com/"; const ASSETS_URL = "https://production-assets.paperswithcode.com"; run_after_frontend_loaded = window.run_after_frontend_loaded || []; </script> <link rel="preconnect" href="https://production-assets.paperswithcode.com"><link rel="dns-prefetch" href="https://production-assets.paperswithcode.com"><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/65e877e527022735c1a1.woff2" crossorigin><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/917632e36982ca7933c8.woff2" crossorigin><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/f1405bd8a987c2ea8a67.woff2" crossorigin><script>(()=>{if(GTAG_ENABLED){const t=document.createElement("script");function n(){window.dataLayer.push(arguments)}t.src=`https://www.googletagmanager.com/gtag/js?id=${GTAG_TRACKING_ID}`,document.head.appendChild(t),window.dataLayer=window.dataLayer||[],window.gtag=n,n("js",new Date),n("config",GTAG_TRACKING_ID),window.captureOutboundLink=function(t){n("event","click",{event_category:"outbound",event_label:t})}}else window.captureOutboundLink=function(n){document.location=n}})();</script><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/766.4af6b88b.js"><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/2.6da00df7.js"><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/351.a22a9607.js"><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/452.d3ecdfa4.js"><link rel="preload" as="style" href="https://production-assets.paperswithcode.com/perf/553.4050647d.css"><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/553.357efc0e.js"><link rel="preload" as="style" href="https://production-assets.paperswithcode.com/perf/918.c41196c3.css"><link rel="preload" as="style" href="https://production-assets.paperswithcode.com/perf/sota.table.fe0fcc15.css"><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/sota.table.040f2c99.js"><link rel="stylesheet" href="https://production-assets.paperswithcode.com/perf/553.4050647d.css"><link rel="stylesheet" href="https://production-assets.paperswithcode.com/perf/918.c41196c3.css"><link rel="stylesheet" href="https://production-assets.paperswithcode.com/perf/sota.table.fe0fcc15.css"> <!-- Metadata --> <title>GOT-10k Benchmark (Visual Object Tracking) | Papers With Code</title> <meta name="description" content="The current state-of-the-art on GOT-10k is SAMURAI-L. See a full comparison of 36 papers with code." /> <!-- Open Graph protocol metadata --> <meta property="og:title" content="Papers with Code - GOT-10k Benchmark (Visual Object Tracking)"> <meta property="og:description" content="The current state-of-the-art on GOT-10k is SAMURAI-L. See a full comparison of 36 papers with code."> <meta property="og:image" content="https://production-media.paperswithcode.com/sota-thumbs/visual-object-tracking-on-got-10k-large_131118d1.png"> <meta property="og:url" content="https://paperswithcode.com/sota/visual-object-tracking-on-got-10k"> <!-- Twitter metadata --> <meta name="twitter:card" content="summary_large_image"> <meta name="twitter:site" content="@paperswithcode"> <meta name="twitter:title" content="Papers with Code - GOT-10k Benchmark (Visual Object Tracking)"> <meta name="twitter:description" content="The current state-of-the-art on GOT-10k is SAMURAI-L. See a full comparison of 36 papers with code."> <meta name="twitter:creator" content="@paperswithcode"> <meta name="twitter:url" content="https://paperswithcode.com/sota/visual-object-tracking-on-got-10k"> <meta name="twitter:domain" content="paperswithcode.com"> <!-- JSON LD --> <script type="application/ld+json">{ "@context": "http://schema.org", "@graph": { "@type": "ItemList", "name": "GOT-10k Benchmark (Visual Object Tracking)", "description": "The current state-of-the-art on GOT-10k is SAMURAI-L. See a full comparison of 36 papers with code.", "url": "https://paperswithcode.com/sota/visual-object-tracking-on-got-10k", "image": "https://production-media.paperswithcode.com/sota-thumbs/visual-object-tracking-on-got-10k-large_131118d1.png" } }</script> <meta name="theme-color" content="#fff"/> <link rel="manifest" href="https://production-assets.paperswithcode.com/static/manifest.web.json"> </head> <body> <nav class="navbar navbar-expand-lg navbar-light header"> <a class="navbar-brand" href="/"> <span class=" icon-wrapper" data-name="pwc"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path d="M88 128h48v256H88zm144 0h48v256h-48zm-72 16h48v224h-48zm144 0h48v224h-48zm72-16h48v256h-48z"/><path d="M104 104V56H16v400h88v-48H64V104zm304-48v48h40v304h-40v48h88V56z"/></svg></span> </a> <div class="navbar-mobile-twitter d-lg-none"> <a rel="noreferrer" href="https://twitter.com/paperswithcode"> <span class=" icon-wrapper icon-fa icon-fa-brands" data-name="twitter"><svg viewBox="0 0 512.001 515.25" xmlns="http://www.w3.org/2000/svg"><path d="M459.37 152.016c.326 4.548.326 9.097.326 13.645 0 138.72-105.583 298.558-298.559 298.558C101.685 464.22 46.457 447 0 417.114c8.447.973 16.568 1.298 25.34 1.298 49.054 0 94.213-16.568 130.274-44.832-46.132-.975-84.792-31.188-98.113-72.772 6.499.975 12.996 1.624 19.819 1.624 9.42 0 18.843-1.3 27.613-3.573-48.08-9.747-84.142-51.98-84.142-102.984v-1.3c13.968 7.798 30.213 12.67 47.43 13.32-28.263-18.843-46.78-51.006-46.78-87.391 0-19.492 5.196-37.36 14.294-52.954 51.654 63.674 129.3 105.258 216.364 109.807-1.624-7.797-2.599-15.918-2.599-24.04 0-57.827 46.782-104.934 104.934-104.934 30.214 0 57.502 12.67 76.671 33.136 23.715-4.548 46.455-13.319 66.599-25.34-7.798 24.367-24.366 44.834-46.132 57.828 21.117-2.274 41.584-8.122 60.426-16.244-14.292 20.791-32.161 39.309-52.628 54.253z"/></svg></span> </a> </div> <button class="navbar-toggler" type="button" data-toggle="collapse" data-bs-toggle="collapse" data-target="#top-menu" data-bs-target="#top-menu" aria-controls="top-menu" aria-expanded="false" aria-label="Toggle navigation" > <span class="navbar-toggler-icon"></span> </button> <div class="collapse navbar-collapse" id="top-menu"> <ul class="navbar-nav mr-auto navbar-nav__left light-header"> <li class="nav-item header-search"> <form action="/search" method="get" id="id_global_search_form" autocomplete="off"> <input type="text" name="q_meta" style="display:none" id="q_meta" /> <input type="hidden" name="q_type" id="q_type" /> <input id="id_global_search_input" autocomplete="off" value="" name='q' class="global-search" type="search" placeholder='Search'/> <button type="submit" class="icon"><span class=" icon-wrapper icon-fa icon-fa-light" data-name="search"><svg viewBox="0 0 512.025 520.146" xmlns="http://www.w3.org/2000/svg"><path d="M508.5 482.6c4.7 4.7 4.7 12.3 0 17l-9.9 9.9c-4.7 4.7-12.3 4.7-17 0l-129-129c-2.2-2.3-3.5-5.3-3.5-8.5v-10.2C312 396 262.5 417 208 417 93.1 417 0 323.9 0 209S93.1 1 208 1s208 93.1 208 208c0 54.5-21 104-55.3 141.1H371c3.2 0 6.2 1.2 8.5 3.5zM208 385c97.3 0 176-78.7 176-176S305.3 33 208 33 32 111.7 32 209s78.7 176 176 176z"/></svg></span></button> </form> </li> <li class="nav-item"> <a class="nav-link" href="/sota"> Browse State-of-the-Art </a> </li> <li class="nav-item"> <a class="nav-link" href="/datasets"> Datasets </a> </li> <li class="nav-item"> <a class="nav-link" href="/methods">Methods</a> </li> <li class="nav-item dropdown"> <a class="nav-link dropdown-toggle" role="button" id="navbarDropdownRepro" data-toggle="dropdown" data-bs-toggle="dropdown" aria-haspopup="true" aria-expanded="false" > More </a> <div class="dropdown-menu" aria-labelledby="navbarDropdownRepro"> <a class="dropdown-item" href="/newsletter">Newsletter</a> <a class="dropdown-item" href="/rc2022">RC2022</a> <div class="dropdown-divider"></div> <a class="dropdown-item" href="/about">About</a> <a class="dropdown-item" href="/trends">Trends</a> <a class="dropdown-item" href="https://portal.paperswithcode.com/"> Portals </a> <a class="dropdown-item" href="/libraries"> Libraries </a> </div> </li> </ul> <ul class="navbar-nav ml-auto navbar-nav__right navbar-subscribe justify-content-center align-items-center"> <li class="nav-item"> <a class="nav-link" rel="noreferrer" href="https://twitter.com/paperswithcode"> <span class="nav-link-social-icon icon-wrapper icon-fa icon-fa-brands" data-name="twitter"><svg viewBox="0 0 512.001 515.25" xmlns="http://www.w3.org/2000/svg"><path d="M459.37 152.016c.326 4.548.326 9.097.326 13.645 0 138.72-105.583 298.558-298.559 298.558C101.685 464.22 46.457 447 0 417.114c8.447.973 16.568 1.298 25.34 1.298 49.054 0 94.213-16.568 130.274-44.832-46.132-.975-84.792-31.188-98.113-72.772 6.499.975 12.996 1.624 19.819 1.624 9.42 0 18.843-1.3 27.613-3.573-48.08-9.747-84.142-51.98-84.142-102.984v-1.3c13.968 7.798 30.213 12.67 47.43 13.32-28.263-18.843-46.78-51.006-46.78-87.391 0-19.492 5.196-37.36 14.294-52.954 51.654 63.674 129.3 105.258 216.364 109.807-1.624-7.797-2.599-15.918-2.599-24.04 0-57.827 46.782-104.934 104.934-104.934 30.214 0 57.502 12.67 76.671 33.136 23.715-4.548 46.455-13.319 66.599-25.34-7.798 24.367-24.366 44.834-46.132 57.828 21.117-2.274 41.584-8.122 60.426-16.244-14.292 20.791-32.161 39.309-52.628 54.253z"/></svg></span> </a> </li> <li class="nav-item"> <a id="signin-link" class="nav-link" href="/accounts/login?next=/sota/visual-object-tracking-on-got-10k">Sign In</a> </li> </ul> </div> </nav> <!-- Page modals --> <div class="modal fade" id="emailModal" tabindex="-1" role="dialog" aria-labelledby="emailModalLabel" aria-hidden="true"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <h3 class="modal-title" id="emailModalLabel">Subscribe to the PwC Newsletter</h3> <button type="button" class="close" data-dismiss="modal" data-bs-dismiss="modal" aria-label="Close"> <span aria-hidden="true">×</span> </button> </div> <form action="" method="post"> <div class="modal-body"> <div class="modal-body-info-text"> Stay informed on the latest trending ML papers with code, research developments, libraries, methods, and datasets.<br/><br/> <a href="/newsletter">Read previous issues</a> </div> <input type="hidden" name="csrfmiddlewaretoken" value="z6QGX3HsqCxwqd7FageiboQCXB5Ifbx4VKmpJz0SRbpLhWLaHLYbJPBzI3VCDlyK"> <input placeholder="Enter your email" type="email" class="form-control pwc-email" name="address" id="id_address" max_length="100" required> </div> <div class="modal-footer"> <button type="submit" class="btn btn-primary">Subscribe</button> </div> </form> </div> </div> </div> <!-- Login --> <div class="modal fade" id="loginModal" tabindex="-1" role="dialog" aria-labelledby="loginModalLabel" aria-hidden="true"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <h5 class="modal-title" id="loginModalLabel">Join the community</h5> <button type="button" class="close btn-close" data-dismiss="modal" data-bs-dismiss="modal" aria-label="Close"> <span aria-hidden="true">×</span> </button> </div> <div class="login-modal-message"> You need to <a href="/accounts/login?next=/sota/visual-object-tracking-on-got-10k">log in</a> to edit.<br/> You can <a href="/accounts/register?next=/sota/visual-object-tracking-on-got-10k">create a new account</a> if you don't have one.<br/><br/> </div> </div> </div> </div> <div class="container content content-buffer "> <div class="leaderboard-header"> <a href="/task/visual-object-tracking"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Visual Object Tracking</span> </span> </a> </div> <div id="sota-page"> <div class="text-center"> <img src="https://production-assets.paperswithcode.com/perf/images/spin-1s-32px-ed14c515.gif"> </div> </div> <link href="https://production-assets.paperswithcode.com/static/fonts/font-awesome/css/all.min.css" rel="stylesheet" /> <script type="application/javascript"> const CSRF_TOKEN = "z6QGX3HsqCxwqd7FageiboQCXB5Ifbx4VKmpJz0SRbpLhWLaHLYbJPBzI3VCDlyK"; const USER_IS_AUTHENTICATED = false; const LOGIN_REQUIRED = true; </script> <script type="module" src="https://unpkg.com/ionicons@5.1.2/dist/ionicons/ionicons.esm.js" ></script> <script nomodule="" src="https://unpkg.com/ionicons@5.1.2/dist/ionicons/ionicons.js" ></script> <!-- Start SOTA Table Generation --> <script id="evaluation-chart-data" type="application/json">{"all": {"yAxis": {"title": "Average Overlap", "includeZero": false, "gridColor": "#ddd", "valueFormatString": "", "minimum": 58.93, "maximum": 83.77000000000001}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": [{"x": "2018-11-19", "y": 61.0, "name": "ATOM", "nameShort": "ATOM", "nameDetails": null, "paperSlug": "atom-accurate-tracking-by-overlap", "usesAdditionalData": false}, {"x": "2019-04-15", "y": 61.1, "name": "DiMP", "nameShort": "DiMP", "nameDetails": null, "paperSlug": "190407220", "usesAdditionalData": false}, {"x": "2019-11-28", "y": 64.9, "name": "Siam R-CNN", "nameShort": "Siam R-CNN", "nameDetails": null, "paperSlug": "siam-r-cnn-visual-tracking-by-re-detection", "usesAdditionalData": false}, {"x": "2021-03-31", "y": 68.8, "name": "STARK", "nameShort": "STARK", "nameDetails": null, "paperSlug": "learning-spatio-temporal-transformer-for", "usesAdditionalData": false}, {"x": "2021-12-02", "y": 69.4, "name": "SwinTrack-B", "nameShort": "SwinTrack-B", "nameDetails": null, "paperSlug": "swintrack-a-simple-and-strong-baseline-for", "usesAdditionalData": false}, {"x": "2022-03-21", "y": 75.6, "name": "MixFormer-L", "nameShort": "MixFormer-L", "nameDetails": null, "paperSlug": "mixformer-end-to-end-tracking-with-iterative-1", "usesAdditionalData": false}, {"x": "2022-11-12", "y": 75.7, "name": "NeighborTrack-OSTrack", "nameShort": "NeighborTrack-OSTrack", "nameDetails": null, "paperSlug": "neighbortrack-improving-single-object", "usesAdditionalData": false}, {"x": "2023-01-01", "y": 78.5, "name": "ARTrack-L", "nameShort": "ARTrack-L", "nameDetails": null, "paperSlug": "autoregressive-visual-tracking", "usesAdditionalData": false}, {"x": "2023-08-25", "y": 80.4, "name": "MITS", "nameShort": "MITS", "nameDetails": null, "paperSlug": "integrating-boxes-and-masks-a-multi-object", "usesAdditionalData": false}, {"x": "2024-11-18", "y": 81.7, "name": "SAMURAI-L", "nameShort": "SAMURAI-L", "nameDetails": null, "paperSlug": "samurai-adapting-segment-anything-model-for-1", "usesAdditionalData": false}]}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": [{"x": "2019-11-14", "y": 61.0, "name": "SiamFC++", "nameShort": "SiamFC++", "nameDetails": null, "paperSlug": "siamfc-towards-robust-and-accurate-visual", "usesAdditionalData": false}, {"x": "2020-06-18", "y": 61.1, "name": "Ocean", "nameShort": "Ocean", "nameDetails": null, "paperSlug": "ocean-object-aware-anchor-free-tracking", "usesAdditionalData": false}, {"x": "2020-07-08", "y": 61.7, "name": "TRASFUST", "nameShort": "TRASFUST", "nameDetails": null, "paperSlug": "a-distilled-model-for-tracking-and-tracker", "usesAdditionalData": false}, {"x": "2021-04-01", "y": 66.8, "name": "TREG", "nameShort": "TREG", "nameDetails": null, "paperSlug": "target-transformed-regression-for-accurate", "usesAdditionalData": false}, {"x": "2021-04-01", "y": 64.2, "name": "STMTrack", "nameShort": "STMTrack", "nameDetails": null, "paperSlug": "stmtrack-template-free-visual-tracking-with", "usesAdditionalData": false}, {"x": "2021-12-15", "y": 64.5, "name": "FEAR-L", "nameShort": "FEAR-L", "nameDetails": null, "paperSlug": "fear-fast-efficient-accurate-and-robust", "usesAdditionalData": false}, {"x": "2021-12-15", "y": 62.3, "name": "FEAR-M", "nameShort": "FEAR-M", "nameDetails": null, "paperSlug": "fear-fast-efficient-accurate-and-robust", "usesAdditionalData": false}, {"x": "2021-12-15", "y": 61.9, "name": "FEAR-XS", "nameShort": "FEAR-XS", "nameDetails": null, "paperSlug": "fear-fast-efficient-accurate-and-robust", "usesAdditionalData": false}, {"x": "2022-03-21", "y": 71.2, "name": "MixFormer-1k", "nameShort": "MixFormer-1k", "nameDetails": null, "paperSlug": "mixformer-end-to-end-tracking-with-iterative-1", "usesAdditionalData": false}, {"x": "2022-03-21", "y": 70.7, "name": "MixFormer", "nameShort": "MixFormer", "nameDetails": null, "paperSlug": "mixformer-end-to-end-tracking-with-iterative-1", "usesAdditionalData": false}, {"x": "2022-03-22", "y": 73.7, "name": "OSTrack-384", "nameShort": "OSTrack-384", "nameDetails": null, "paperSlug": "joint-feature-learning-and-relation-modeling", "usesAdditionalData": false}, {"x": "2022-05-26", "y": 72.9, "name": "SwinV2-L 1K-MIM", "nameShort": "SwinV2-L 1K-MIM", "nameDetails": null, "paperSlug": "revealing-the-dark-secrets-of-masked-image", "usesAdditionalData": false}, {"x": "2022-05-26", "y": 70.8, "name": "SwinV2-B 1K-MIM", "nameShort": "SwinV2-B 1K-MIM", "nameDetails": null, "paperSlug": "revealing-the-dark-secrets-of-masked-image", "usesAdditionalData": false}, {"x": "2022-07-20", "y": 69.6, "name": "AiATrack", "nameShort": "AiATrack", "nameDetails": null, "paperSlug": "aiatrack-attention-in-attention-for", "usesAdditionalData": false}, {"x": "2022-08-11", "y": 67.5, "name": "SLT-TransT", "nameShort": "SLT-TransT", "nameDetails": null, "paperSlug": "towards-sequence-level-training-for-visual", "usesAdditionalData": false}, {"x": "2023-02-06", "y": 75.7, "name": "MixViT-L", "nameShort": "MixViT-L", "nameDetails": "ConvMAE", "paperSlug": "mixformer-end-to-end-tracking-with-iterative-2", "usesAdditionalData": false}, {"x": "2023-02-27", "y": 76.6, "name": "TATrack-L-GOT", "nameShort": "TATrack-L-GOT", "nameDetails": null, "paperSlug": "target-aware-tracking-with-long-term-context", "usesAdditionalData": false}, {"x": "2023-04-02", "y": 75.9, "name": "DropMAE", "nameShort": "DropMAE", "nameDetails": null, "paperSlug": "dropmae-masked-autoencoders-with-spatial", "usesAdditionalData": false}, {"x": "2023-04-27", "y": 74.8, "name": "SeqTrack-L384", "nameShort": "SeqTrack-L384", "nameDetails": null, "paperSlug": "seqtrack-sequence-to-sequence-learning-for", "usesAdditionalData": false}, {"x": "2023-11-03", "y": 77.4, "name": "HIPTrack", "nameShort": "HIPTrack", "nameDetails": null, "paperSlug": "learning-historical-status-prompt-for", "usesAdditionalData": false}, {"x": "2023-12-28", "y": 79.5, "name": "ARTrackV2-L", "nameShort": "ARTrackV2-L", "nameDetails": null, "paperSlug": "artrackv2-prompting-autoregressive-tracker", "usesAdditionalData": false}, {"x": "2024-01-03", "y": 78.2, "name": "ODTrack-L", "nameShort": "ODTrack-L", "nameDetails": null, "paperSlug": "odtrack-online-dense-temporal-token-learning", "usesAdditionalData": false}, {"x": "2024-01-03", "y": 77.0, "name": "ODTrack-B", "nameShort": "ODTrack-B", "nameDetails": null, "paperSlug": "odtrack-online-dense-temporal-token-learning", "usesAdditionalData": false}, {"x": "2024-03-08", "y": 78.9, "name": "LoRAT-g-378", "nameShort": "LoRAT-g-378", "nameDetails": null, "paperSlug": "tracking-meets-lora-faster-training-larger", "usesAdditionalData": false}, {"x": "2024-03-08", "y": 77.5, "name": "LoRAT-L-378", "nameShort": "LoRAT-L-378", "nameDetails": null, "paperSlug": "tracking-meets-lora-faster-training-larger", "usesAdditionalData": false}, {"x": "2024-03-28", "y": 77.9, "name": "RTracker-L", "nameShort": "RTracker-L", "nameDetails": null, "paperSlug": "rtracker-recoverable-tracking-via-pn-tree", "usesAdditionalData": false}]}}}, "uses_additional_data": {"yAxis": {"title": "Average Overlap", "includeZero": false, "gridColor": "#ddd", "valueFormatString": ""}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": []}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": []}}}, "no_additional_data": {"yAxis": {"title": "Average Overlap", "includeZero": false, "gridColor": "#ddd", "valueFormatString": "", "minimum": 58.93, "maximum": 83.77000000000001}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": [{"x": "2018-11-19", "y": 61.0, "name": "ATOM", "nameShort": "ATOM", "nameDetails": null, "paperSlug": "atom-accurate-tracking-by-overlap", "usesAdditionalData": false}, {"x": "2019-04-15", "y": 61.1, "name": "DiMP", "nameShort": "DiMP", "nameDetails": null, "paperSlug": "190407220", "usesAdditionalData": false}, {"x": "2019-11-28", "y": 64.9, "name": "Siam R-CNN", "nameShort": "Siam R-CNN", "nameDetails": null, "paperSlug": "siam-r-cnn-visual-tracking-by-re-detection", "usesAdditionalData": false}, {"x": "2021-03-31", "y": 68.8, "name": "STARK", "nameShort": "STARK", "nameDetails": null, "paperSlug": "learning-spatio-temporal-transformer-for", "usesAdditionalData": false}, {"x": "2021-12-02", "y": 69.4, "name": "SwinTrack-B", "nameShort": "SwinTrack-B", "nameDetails": null, "paperSlug": "swintrack-a-simple-and-strong-baseline-for", "usesAdditionalData": false}, {"x": "2022-03-21", "y": 75.6, "name": "MixFormer-L", "nameShort": "MixFormer-L", "nameDetails": null, "paperSlug": "mixformer-end-to-end-tracking-with-iterative-1", "usesAdditionalData": false}, {"x": "2022-11-12", "y": 75.7, "name": "NeighborTrack-OSTrack", "nameShort": "NeighborTrack-OSTrack", "nameDetails": null, "paperSlug": "neighbortrack-improving-single-object", "usesAdditionalData": false}, {"x": "2023-01-01", "y": 78.5, "name": "ARTrack-L", "nameShort": "ARTrack-L", "nameDetails": null, "paperSlug": "autoregressive-visual-tracking", "usesAdditionalData": false}, {"x": "2023-08-25", "y": 80.4, "name": "MITS", "nameShort": "MITS", "nameDetails": null, "paperSlug": "integrating-boxes-and-masks-a-multi-object", "usesAdditionalData": false}, {"x": "2024-11-18", "y": 81.7, "name": "SAMURAI-L", "nameShort": "SAMURAI-L", "nameDetails": null, "paperSlug": "samurai-adapting-segment-anything-model-for-1", "usesAdditionalData": false}]}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": [{"x": "2019-11-14", "y": 61.0, "name": "SiamFC++", "nameShort": "SiamFC++", "nameDetails": null, "paperSlug": "siamfc-towards-robust-and-accurate-visual", "usesAdditionalData": false}, {"x": "2020-06-18", "y": 61.1, "name": "Ocean", "nameShort": "Ocean", "nameDetails": null, "paperSlug": "ocean-object-aware-anchor-free-tracking", "usesAdditionalData": false}, {"x": "2020-07-08", "y": 61.7, "name": "TRASFUST", "nameShort": "TRASFUST", "nameDetails": null, "paperSlug": "a-distilled-model-for-tracking-and-tracker", "usesAdditionalData": false}, {"x": "2021-04-01", "y": 66.8, "name": "TREG", "nameShort": "TREG", "nameDetails": null, "paperSlug": "target-transformed-regression-for-accurate", "usesAdditionalData": false}, {"x": "2021-04-01", "y": 64.2, "name": "STMTrack", "nameShort": "STMTrack", "nameDetails": null, "paperSlug": "stmtrack-template-free-visual-tracking-with", "usesAdditionalData": false}, {"x": "2021-12-15", "y": 64.5, "name": "FEAR-L", "nameShort": "FEAR-L", "nameDetails": null, "paperSlug": "fear-fast-efficient-accurate-and-robust", "usesAdditionalData": false}, {"x": "2021-12-15", "y": 62.3, "name": "FEAR-M", "nameShort": "FEAR-M", "nameDetails": null, "paperSlug": "fear-fast-efficient-accurate-and-robust", "usesAdditionalData": false}, {"x": "2021-12-15", "y": 61.9, "name": "FEAR-XS", "nameShort": "FEAR-XS", "nameDetails": null, "paperSlug": "fear-fast-efficient-accurate-and-robust", "usesAdditionalData": false}, {"x": "2022-03-21", "y": 71.2, "name": "MixFormer-1k", "nameShort": "MixFormer-1k", "nameDetails": null, "paperSlug": "mixformer-end-to-end-tracking-with-iterative-1", "usesAdditionalData": false}, {"x": "2022-03-21", "y": 70.7, "name": "MixFormer", "nameShort": "MixFormer", "nameDetails": null, "paperSlug": "mixformer-end-to-end-tracking-with-iterative-1", "usesAdditionalData": false}, {"x": "2022-03-22", "y": 73.7, "name": "OSTrack-384", "nameShort": "OSTrack-384", "nameDetails": null, "paperSlug": "joint-feature-learning-and-relation-modeling", "usesAdditionalData": false}, {"x": "2022-05-26", "y": 72.9, "name": "SwinV2-L 1K-MIM", "nameShort": "SwinV2-L 1K-MIM", "nameDetails": null, "paperSlug": "revealing-the-dark-secrets-of-masked-image", "usesAdditionalData": false}, {"x": "2022-05-26", "y": 70.8, "name": "SwinV2-B 1K-MIM", "nameShort": "SwinV2-B 1K-MIM", "nameDetails": null, "paperSlug": "revealing-the-dark-secrets-of-masked-image", "usesAdditionalData": false}, {"x": "2022-07-20", "y": 69.6, "name": "AiATrack", "nameShort": "AiATrack", "nameDetails": null, "paperSlug": "aiatrack-attention-in-attention-for", "usesAdditionalData": false}, {"x": "2022-08-11", "y": 67.5, "name": "SLT-TransT", "nameShort": "SLT-TransT", "nameDetails": null, "paperSlug": "towards-sequence-level-training-for-visual", "usesAdditionalData": false}, {"x": "2023-02-06", "y": 75.7, "name": "MixViT-L", "nameShort": "MixViT-L", "nameDetails": "ConvMAE", "paperSlug": "mixformer-end-to-end-tracking-with-iterative-2", "usesAdditionalData": false}, {"x": "2023-02-27", "y": 76.6, "name": "TATrack-L-GOT", "nameShort": "TATrack-L-GOT", "nameDetails": null, "paperSlug": "target-aware-tracking-with-long-term-context", "usesAdditionalData": false}, {"x": "2023-04-02", "y": 75.9, "name": "DropMAE", "nameShort": "DropMAE", "nameDetails": null, "paperSlug": "dropmae-masked-autoencoders-with-spatial", "usesAdditionalData": false}, {"x": "2023-04-27", "y": 74.8, "name": "SeqTrack-L384", "nameShort": "SeqTrack-L384", "nameDetails": null, "paperSlug": "seqtrack-sequence-to-sequence-learning-for", "usesAdditionalData": false}, {"x": "2023-11-03", "y": 77.4, "name": "HIPTrack", "nameShort": "HIPTrack", "nameDetails": null, "paperSlug": "learning-historical-status-prompt-for", "usesAdditionalData": false}, {"x": "2023-12-28", "y": 79.5, "name": "ARTrackV2-L", "nameShort": "ARTrackV2-L", "nameDetails": null, "paperSlug": "artrackv2-prompting-autoregressive-tracker", "usesAdditionalData": false}, {"x": "2024-01-03", "y": 78.2, "name": "ODTrack-L", "nameShort": "ODTrack-L", "nameDetails": null, "paperSlug": "odtrack-online-dense-temporal-token-learning", "usesAdditionalData": false}, {"x": "2024-01-03", "y": 77.0, "name": "ODTrack-B", "nameShort": "ODTrack-B", "nameDetails": null, "paperSlug": "odtrack-online-dense-temporal-token-learning", "usesAdditionalData": false}, {"x": "2024-03-08", "y": 78.9, "name": "LoRAT-g-378", "nameShort": "LoRAT-g-378", "nameDetails": null, "paperSlug": "tracking-meets-lora-faster-training-larger", "usesAdditionalData": false}, {"x": "2024-03-08", "y": 77.5, "name": "LoRAT-L-378", "nameShort": "LoRAT-L-378", "nameDetails": null, "paperSlug": "tracking-meets-lora-faster-training-larger", "usesAdditionalData": false}, {"x": "2024-03-28", "y": 77.9, "name": "RTracker-L", "nameShort": "RTracker-L", "nameDetails": null, "paperSlug": "rtracker-recoverable-tracking-via-pn-tree", "usesAdditionalData": false}]}}}}</script> <script id="evaluation-table-metrics" type="application/json">[{"id": 5798, "name": "Average Overlap", "is_loss": false, "is_fixed": false}, {"id": 5799, "name": "Success Rate 0.5", "is_loss": false, "is_fixed": false}, {"id": 33891, "name": "Success Rate 0.75", "is_loss": false, "is_fixed": false}]</script> <script id="evaluation-table-data" type="application/json">[{"table_id": 3401, "row_id": 134206, "rank": 1, "method": "SAMURAI-L", "mlmodel": {}, "method_short": "SAMURAI-L", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2024-11-18", "metrics": {"Average Overlap": "81.7", "Success Rate 0.5": "92.2", "Success Rate 0.75": "76.9"}, "raw_metrics": {"Average Overlap": 81.7, "Success Rate 0.5": 92.2, "Success Rate 0.75": 76.9}, "uses_additional_data": false, "paper": {"id": 1569727, "title": "SAMURAI: Adapting Segment Anything Model for Zero-Shot Visual Tracking with Motion-Aware Memory", "url": "/paper/samurai-adapting-segment-anything-model-for-1", "published": "2024-11-18T00:00:00.000000", "code": true, "review_url": null}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 125856, "rank": 2, "method": "MITS", "mlmodel": {}, "method_short": "MITS", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2023-08-25", "metrics": {"Average Overlap": "80.4", "Success Rate 0.5": "89.8", "Success Rate 0.75": "75.8"}, "raw_metrics": {"Average Overlap": 80.4, "Success Rate 0.5": 89.8, "Success Rate 0.75": 75.8}, "uses_additional_data": false, "paper": {"id": 1269714, "title": "Integrating Boxes and Masks: A Multi-Object Framework for Unified Visual Tracking and Segmentation", "url": "/paper/integrating-boxes-and-masks-a-multi-object", "published": "2023-08-25T00:00:00.000000", "code": true, "review_url": "/paper/integrating-boxes-and-masks-a-multi-object/review/?hl=125856"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 116443, "rank": 3, "method": "ARTrackV2-L", "mlmodel": {}, "method_short": "ARTrackV2-L", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2023-12-28", "metrics": {"Average Overlap": "79.5", "Success Rate 0.5": "87.8", "Success Rate 0.75": "79.6"}, "raw_metrics": {"Average Overlap": 79.5, "Success Rate 0.5": 87.8, "Success Rate 0.75": 79.6}, "uses_additional_data": false, "paper": {"id": 1351532, "title": "ARTrackV2: Prompting Autoregressive Tracker Where to Look and How to Describe", "url": "/paper/artrackv2-prompting-autoregressive-tracker", "published": "2023-12-28T00:00:00.000000", "code": true, "review_url": null}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 126453, "rank": 4, "method": "LoRAT-g-378", "mlmodel": {}, "method_short": "LoRAT-g-378", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2024-03-08", "metrics": {"Average Overlap": "78.9", "Success Rate 0.5": "87.8", "Success Rate 0.75": "80.7"}, "raw_metrics": {"Average Overlap": 78.9, "Success Rate 0.5": 87.8, "Success Rate 0.75": 80.7}, "uses_additional_data": false, "paper": {"id": 1396051, "title": "Tracking Meets LoRA: Faster Training, Larger Model, Stronger Performance", "url": "/paper/tracking-meets-lora-faster-training-larger", "published": "2024-03-08T00:00:00.000000", "code": true, "review_url": null}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 103506, "rank": 5, "method": "ARTrack-L", "mlmodel": {}, "method_short": "ARTrack-L", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2023-01-01", "metrics": {"Average Overlap": "78.5", "Success Rate 0.5": "87.4", "Success Rate 0.75": "77.8"}, "raw_metrics": {"Average Overlap": 78.5, "Success Rate 0.5": 87.4, "Success Rate 0.75": 77.8}, "uses_additional_data": false, "paper": {"id": 1209977, "title": "Autoregressive Visual Tracking", "url": "/paper/autoregressive-visual-tracking", "published": "2023-01-01T00:00:00.000000", "code": true, "review_url": null}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 117270, "rank": 6, "method": "ODTrack-L", "mlmodel": {}, "method_short": "ODTrack-L", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2024-01-03", "metrics": {"Average Overlap": "78.2", "Success Rate 0.5": null, "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 78.2, "Success Rate 0.5": null, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 1353180, "title": "ODTrack: Online Dense Temporal Token Learning for Visual Tracking", "url": "/paper/odtrack-online-dense-temporal-token-learning", "published": "2024-01-03T00:00:00.000000", "code": true, "review_url": null}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 123681, "rank": 7, "method": "RTracker-L", "mlmodel": {}, "method_short": "RTracker-L", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2024-03-28", "metrics": {"Average Overlap": "77.9", "Success Rate 0.5": "87", "Success Rate 0.75": "76.9"}, "raw_metrics": {"Average Overlap": 77.9, "Success Rate 0.5": 87.0, "Success Rate 0.75": 76.9}, "uses_additional_data": false, "paper": {"id": 1410187, "title": "RTracker: Recoverable Tracking via PN Tree Structured Memory", "url": "/paper/rtracker-recoverable-tracking-via-pn-tree", "published": "2024-03-28T00:00:00.000000", "code": true, "review_url": "/paper/rtracker-recoverable-tracking-via-pn-tree/review/?hl=123681"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 126454, "rank": 8, "method": "LoRAT-L-378", "mlmodel": {}, "method_short": "LoRAT-L-378", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2024-03-08", "metrics": {"Average Overlap": "77.5", "Success Rate 0.5": "86.2", "Success Rate 0.75": "78.1"}, "raw_metrics": {"Average Overlap": 77.5, "Success Rate 0.5": 86.2, "Success Rate 0.75": 78.1}, "uses_additional_data": false, "paper": {"id": 1396051, "title": "Tracking Meets LoRA: Faster Training, Larger Model, Stronger Performance", "url": "/paper/tracking-meets-lora-faster-training-larger", "published": "2024-03-08T00:00:00.000000", "code": true, "review_url": null}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 121726, "rank": 9, "method": "HIPTrack", "mlmodel": {}, "method_short": "HIPTrack", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2023-11-03", "metrics": {"Average Overlap": "77.4", "Success Rate 0.5": "88.0", "Success Rate 0.75": "74.5"}, "raw_metrics": {"Average Overlap": 77.4, "Success Rate 0.5": 88.0, "Success Rate 0.75": 74.5}, "uses_additional_data": false, "paper": {"id": 1314446, "title": "HIPTrack: Visual Tracking with Historical Prompts", "url": "/paper/learning-historical-status-prompt-for", "published": "2023-11-03T00:00:00.000000", "code": true, "review_url": "/paper/learning-historical-status-prompt-for/review/?hl=121726"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 117269, "rank": 10, "method": "ODTrack-B", "mlmodel": {}, "method_short": "ODTrack-B", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2024-01-03", "metrics": {"Average Overlap": "77.0", "Success Rate 0.5": null, "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 77.0, "Success Rate 0.5": null, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 1353180, "title": "ODTrack: Online Dense Temporal Token Learning for Visual Tracking", "url": "/paper/odtrack-online-dense-temporal-token-learning", "published": "2024-01-03T00:00:00.000000", "code": true, "review_url": null}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 98994, "rank": 11, "method": "TATrack-L-GOT", "mlmodel": {}, "method_short": "TATrack-L-GOT", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2023-02-27", "metrics": {"Average Overlap": "76.6", "Success Rate 0.5": "85.7", "Success Rate 0.75": "73.4"}, "raw_metrics": {"Average Overlap": 76.6, "Success Rate 0.5": 85.7, "Success Rate 0.75": 73.4}, "uses_additional_data": false, "paper": {"id": 1163625, "title": "Target-Aware Tracking with Long-term Context Attention", "url": "/paper/target-aware-tracking-with-long-term-context", "published": "2023-02-27T00:00:00.000000", "code": true, "review_url": "/paper/target-aware-tracking-with-long-term-context/review/?hl=98994"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 100691, "rank": 12, "method": "DropMAE", "mlmodel": {}, "method_short": "DropMAE", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2023-04-02", "metrics": {"Average Overlap": "75.9", "Success Rate 0.5": "86.8", "Success Rate 0.75": "72"}, "raw_metrics": {"Average Overlap": 75.9, "Success Rate 0.5": 86.8, "Success Rate 0.75": 72.0}, "uses_additional_data": false, "paper": {"id": 1184444, "title": "DropMAE: Masked Autoencoders with Spatial-Attention Dropout for Tracking Tasks", "url": "/paper/dropmae-masked-autoencoders-with-spatial", "published": "2023-04-02T00:00:00.000000", "code": true, "review_url": "/paper/dropmae-masked-autoencoders-with-spatial/review/?hl=100691"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 78904, "rank": 13, "method": "NeighborTrack-OSTrack", "mlmodel": {}, "method_short": "NeighborTrack-OSTrack", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-11-12", "metrics": {"Average Overlap": "75.7", "Success Rate 0.5": "85.72", "Success Rate 0.75": "73.3"}, "raw_metrics": {"Average Overlap": 75.7, "Success Rate 0.5": 85.72, "Success Rate 0.75": 73.3}, "uses_additional_data": false, "paper": {"id": 1110501, "title": "NeighborTrack: Improving Single Object Tracking by Bipartite Matching with Neighbor Tracklets", "url": "/paper/neighbortrack-improving-single-object", "published": "2022-11-12T00:00:00.000000", "code": true, "review_url": "/paper/neighbortrack-improving-single-object/review/?hl=78904"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 96516, "rank": 14, "method": "MixViT-L(ConvMAE)", "mlmodel": {}, "method_short": "MixViT-L", "method_details": "ConvMAE", "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2023-02-06", "metrics": {"Average Overlap": "75.7", "Success Rate 0.5": "85.3", "Success Rate 0.75": "75.1"}, "raw_metrics": {"Average Overlap": 75.7, "Success Rate 0.5": 85.3, "Success Rate 0.75": 75.1}, "uses_additional_data": false, "paper": {"id": 1153024, "title": "MixFormer: End-to-End Tracking with Iterative Mixed Attention", "url": "/paper/mixformer-end-to-end-tracking-with-iterative-2", "published": "2023-02-06T00:00:00.000000", "code": true, "review_url": "/paper/mixformer-end-to-end-tracking-with-iterative-2/review/?hl=96516"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 78769, "rank": 15, "method": "MixFormer-L", "mlmodel": {}, "method_short": "MixFormer-L", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-03-21", "metrics": {"Average Overlap": "75.6", "Success Rate 0.5": "85.73", "Success Rate 0.75": "72.8"}, "raw_metrics": {"Average Overlap": 75.6, "Success Rate 0.5": 85.73, "Success Rate 0.75": 72.8}, "uses_additional_data": false, "paper": {"id": 979868, "title": "MixFormer: End-to-End Tracking with Iterative Mixed Attention", "url": "/paper/mixformer-end-to-end-tracking-with-iterative-1", "published": "2022-03-21T00:00:00.000000", "code": true, "review_url": "/paper/mixformer-end-to-end-tracking-with-iterative-1/review/?hl=78769"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 102161, "rank": 16, "method": "SeqTrack-L384", "mlmodel": {}, "method_short": "SeqTrack-L384", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2023-04-27", "metrics": {"Average Overlap": "74.8", "Success Rate 0.5": "81.9", "Success Rate 0.75": "72.2"}, "raw_metrics": {"Average Overlap": 74.8, "Success Rate 0.5": 81.9, "Success Rate 0.75": 72.2}, "uses_additional_data": false, "paper": {"id": 1198710, "title": "Unified Sequence-to-Sequence Learning for Single- and Multi-Modal Visual Object Tracking", "url": "/paper/seqtrack-sequence-to-sequence-learning-for", "published": "2023-04-27T00:00:00.000000", "code": true, "review_url": "/paper/seqtrack-sequence-to-sequence-learning-for/review/?hl=102161"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 53951, "rank": 17, "method": "OSTrack-384", "mlmodel": {}, "method_short": "OSTrack-384", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-03-22", "metrics": {"Average Overlap": "73.7", "Success Rate 0.5": "83.2", "Success Rate 0.75": "70.8"}, "raw_metrics": {"Average Overlap": 73.7, "Success Rate 0.5": 83.2, "Success Rate 0.75": 70.8}, "uses_additional_data": false, "paper": {"id": 982210, "title": "Joint Feature Learning and Relation Modeling for Tracking: A One-Stream Framework", "url": "/paper/joint-feature-learning-and-relation-modeling", "published": "2022-03-22T00:00:00.000000", "code": true, "review_url": "/paper/joint-feature-learning-and-relation-modeling/review/?hl=53951"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 71115, "rank": 18, "method": "SwinV2-L 1K-MIM", "mlmodel": {}, "method_short": "SwinV2-L 1K-MIM", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-05-26", "metrics": {"Average Overlap": "72.9", "Success Rate 0.5": null, "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 72.9, "Success Rate 0.5": null, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 1016616, "title": "Revealing the Dark Secrets of Masked Image Modeling", "url": "/paper/revealing-the-dark-secrets-of-masked-image", "published": "2022-05-26T00:00:00.000000", "code": true, "review_url": "/paper/revealing-the-dark-secrets-of-masked-image/review/?hl=71115"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 49614, "rank": 19, "method": "MixFormer-1k", "mlmodel": {}, "method_short": "MixFormer-1k", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-03-21", "metrics": {"Average Overlap": "71.2", "Success Rate 0.5": "79.9", "Success Rate 0.75": "65.8"}, "raw_metrics": {"Average Overlap": 71.2, "Success Rate 0.5": 79.9, "Success Rate 0.75": 65.8}, "uses_additional_data": false, "paper": {"id": 979868, "title": "MixFormer: End-to-End Tracking with Iterative Mixed Attention", "url": "/paper/mixformer-end-to-end-tracking-with-iterative-1", "published": "2022-03-21T00:00:00.000000", "code": true, "review_url": "/paper/mixformer-end-to-end-tracking-with-iterative-1/review/?hl=49614"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 71113, "rank": 20, "method": "SwinV2-B 1K-MIM", "mlmodel": {}, "method_short": "SwinV2-B 1K-MIM", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-05-26", "metrics": {"Average Overlap": "70.8", "Success Rate 0.5": null, "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 70.8, "Success Rate 0.5": null, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 1016616, "title": "Revealing the Dark Secrets of Masked Image Modeling", "url": "/paper/revealing-the-dark-secrets-of-masked-image", "published": "2022-05-26T00:00:00.000000", "code": true, "review_url": "/paper/revealing-the-dark-secrets-of-masked-image/review/?hl=71113"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 49613, "rank": 21, "method": "MixFormer", "mlmodel": {}, "method_short": "MixFormer", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-03-21", "metrics": {"Average Overlap": "70.7", "Success Rate 0.5": "80.0", "Success Rate 0.75": "67.8"}, "raw_metrics": {"Average Overlap": 70.7, "Success Rate 0.5": 80.0, "Success Rate 0.75": 67.8}, "uses_additional_data": false, "paper": {"id": 979868, "title": "MixFormer: End-to-End Tracking with Iterative Mixed Attention", "url": "/paper/mixformer-end-to-end-tracking-with-iterative-1", "published": "2022-03-21T00:00:00.000000", "code": true, "review_url": "/paper/mixformer-end-to-end-tracking-with-iterative-1/review/?hl=49613"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 60112, "rank": 22, "method": "AiATrack", "mlmodel": {}, "method_short": "AiATrack", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-07-20", "metrics": {"Average Overlap": "69.6", "Success Rate 0.5": "80.0", "Success Rate 0.75": "63.2"}, "raw_metrics": {"Average Overlap": 69.6, "Success Rate 0.5": 80.0, "Success Rate 0.75": 63.2}, "uses_additional_data": false, "paper": {"id": 1046871, "title": "AiATrack: Attention in Attention for Transformer Visual Tracking", "url": "/paper/aiatrack-attention-in-attention-for", "published": "2022-07-20T00:00:00.000000", "code": true, "review_url": "/paper/aiatrack-attention-in-attention-for/review/?hl=60112"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 47328, "rank": 23, "method": "SwinTrack-B", "mlmodel": {}, "method_short": "SwinTrack-B", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2021-12-02", "metrics": {"Average Overlap": "69.4", "Success Rate 0.5": "78", "Success Rate 0.75": "64.3"}, "raw_metrics": {"Average Overlap": 69.4, "Success Rate 0.5": 78.0, "Success Rate 0.75": 64.3}, "uses_additional_data": false, "paper": {"id": 924745, "title": "SwinTrack: A Simple and Strong Baseline for Transformer Tracking", "url": "/paper/swintrack-a-simple-and-strong-baseline-for", "published": "2021-12-02T00:00:00.000000", "code": true, "review_url": "/paper/swintrack-a-simple-and-strong-baseline-for/review/?hl=47328"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 29093, "rank": 24, "method": "STARK", "mlmodel": {}, "method_short": "STARK", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2021-03-31", "metrics": {"Average Overlap": "68.8", "Success Rate 0.5": "78.1", "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 68.8, "Success Rate 0.5": 78.1, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 772584, "title": "Learning Spatio-Temporal Transformer for Visual Tracking", "url": "/paper/learning-spatio-temporal-transformer-for", "published": "2021-03-31T00:00:00.000000", "code": true, "review_url": "/paper/learning-spatio-temporal-transformer-for/review/?hl=29093"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 63872, "rank": 25, "method": "SLT-TransT", "mlmodel": {}, "method_short": "SLT-TransT", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-08-11", "metrics": {"Average Overlap": "67.5", "Success Rate 0.5": "76.8", "Success Rate 0.75": "60.3"}, "raw_metrics": {"Average Overlap": 67.5, "Success Rate 0.5": 76.8, "Success Rate 0.75": 60.3}, "uses_additional_data": false, "paper": {"id": 1057994, "title": "Towards Sequence-Level Training for Visual Tracking", "url": "/paper/towards-sequence-level-training-for-visual", "published": "2022-08-11T00:00:00.000000", "code": true, "review_url": "/paper/towards-sequence-level-training-for-visual/review/?hl=63872"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 39966, "rank": 26, "method": "TREG", "mlmodel": {}, "method_short": "TREG", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2021-04-01", "metrics": {"Average Overlap": "66.8", "Success Rate 0.5": "77.8", "Success Rate 0.75": "57.2"}, "raw_metrics": {"Average Overlap": 66.8, "Success Rate 0.5": 77.8, "Success Rate 0.75": 57.2}, "uses_additional_data": false, "paper": {"id": 773477, "title": "Target Transformed Regression for Accurate Tracking", "url": "/paper/target-transformed-regression-for-accurate", "published": "2021-04-01T00:00:00.000000", "code": true, "review_url": "/paper/target-transformed-regression-for-accurate/review/?hl=39966"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 29178, "rank": 27, "method": "Siam R-CNN", "mlmodel": {}, "method_short": "Siam R-CNN", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2019-11-28", "metrics": {"Average Overlap": "64.9", "Success Rate 0.5": "72.8", "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 64.9, "Success Rate 0.5": 72.8, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 174798, "title": "Siam R-CNN: Visual Tracking by Re-Detection", "url": "/paper/siam-r-cnn-visual-tracking-by-re-detection", "published": "2019-11-28T00:00:00.000000", "code": true, "review_url": "/paper/siam-r-cnn-visual-tracking-by-re-detection/review/?hl=29178"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 73769, "rank": 28, "method": "FEAR-L", "mlmodel": {}, "method_short": "FEAR-L", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2021-12-15", "metrics": {"Average Overlap": "64.5", "Success Rate 0.5": null, "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 64.5, "Success Rate 0.5": null, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 932013, "title": "FEAR: Fast, Efficient, Accurate and Robust Visual Tracker", "url": "/paper/fear-fast-efficient-accurate-and-robust", "published": "2021-12-15T00:00:00.000000", "code": true, "review_url": "/paper/fear-fast-efficient-accurate-and-robust/review/?hl=73769"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 45140, "rank": 29, "method": "STMTrack", "mlmodel": {}, "method_short": "STMTrack", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2021-04-01", "metrics": {"Average Overlap": "64.2", "Success Rate 0.5": "73.7", "Success Rate 0.75": "57.5"}, "raw_metrics": {"Average Overlap": 64.2, "Success Rate 0.5": 73.7, "Success Rate 0.75": 57.5}, "uses_additional_data": false, "paper": {"id": 773488, "title": "STMTrack: Template-free Visual Tracking with Space-time Memory Networks", "url": "/paper/stmtrack-template-free-visual-tracking-with", "published": "2021-04-01T00:00:00.000000", "code": true, "review_url": "/paper/stmtrack-template-free-visual-tracking-with/review/?hl=45140"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 73768, "rank": 30, "method": "FEAR-M", "mlmodel": {}, "method_short": "FEAR-M", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2021-12-15", "metrics": {"Average Overlap": "62.3", "Success Rate 0.5": null, "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 62.3, "Success Rate 0.5": null, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 932013, "title": "FEAR: Fast, Efficient, Accurate and Robust Visual Tracker", "url": "/paper/fear-fast-efficient-accurate-and-robust", "published": "2021-12-15T00:00:00.000000", "code": true, "review_url": "/paper/fear-fast-efficient-accurate-and-robust/review/?hl=73768"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 73767, "rank": 31, "method": "FEAR-XS", "mlmodel": {}, "method_short": "FEAR-XS", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2021-12-15", "metrics": {"Average Overlap": "61.9", "Success Rate 0.5": null, "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 61.9, "Success Rate 0.5": null, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 932013, "title": "FEAR: Fast, Efficient, Accurate and Robust Visual Tracker", "url": "/paper/fear-fast-efficient-accurate-and-robust", "published": "2021-12-15T00:00:00.000000", "code": true, "review_url": "/paper/fear-fast-efficient-accurate-and-robust/review/?hl=73767"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 29511, "rank": 32, "method": "TRASFUST", "mlmodel": {}, "method_short": "TRASFUST", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2020-07-08", "metrics": {"Average Overlap": "61.7", "Success Rate 0.5": "72.9", "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 61.7, "Success Rate 0.5": 72.9, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 207679, "title": "Tracking-by-Trackers with a Distilled and Reinforced Model", "url": "/paper/a-distilled-model-for-tracking-and-tracker", "published": "2020-07-08T00:00:00.000000", "code": true, "review_url": "/paper/a-distilled-model-for-tracking-and-tracker/review/?hl=29511"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 17856, "rank": 33, "method": "Ocean", "mlmodel": {}, "method_short": "Ocean", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2020-06-18", "metrics": {"Average Overlap": "61.1", "Success Rate 0.5": "72.1", "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 61.1, "Success Rate 0.5": 72.1, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 203186, "title": "Ocean: Object-aware Anchor-free Tracking", "url": "/paper/ocean-object-aware-anchor-free-tracking", "published": "2020-06-18T00:00:00.000000", "code": true, "review_url": "/paper/ocean-object-aware-anchor-free-tracking/review/?hl=17856"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 29180, "rank": 34, "method": "DiMP", "mlmodel": {}, "method_short": "DiMP", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2019-04-15", "metrics": {"Average Overlap": "61.1", "Success Rate 0.5": "71.7", "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 61.1, "Success Rate 0.5": 71.7, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 111589, "title": "Learning Discriminative Model Prediction for Tracking", "url": "/paper/190407220", "published": "2019-04-15T00:00:00.000000", "code": true, "review_url": "/paper/190407220/review/?hl=29180"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 29179, "rank": 35, "method": "ATOM", "mlmodel": {}, "method_short": "ATOM", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2018-11-19", "metrics": {"Average Overlap": "61.0", "Success Rate 0.5": "74.2", "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 61.0, "Success Rate 0.5": 74.2, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 62640, "title": "ATOM: Accurate Tracking by Overlap Maximization", "url": "/paper/atom-accurate-tracking-by-overlap", "published": "2018-11-19T00:00:00.000000", "code": true, "review_url": "/paper/atom-accurate-tracking-by-overlap/review/?hl=29179"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 3401, "row_id": 44691, "rank": 36, "method": "SiamFC++", "mlmodel": {}, "method_short": "SiamFC++", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2019-11-14", "metrics": {"Average Overlap": "61.0", "Success Rate 0.5": "74.2", "Success Rate 0.75": null}, "raw_metrics": {"Average Overlap": 61.0, "Success Rate 0.5": 74.2, "Success Rate 0.75": null}, "uses_additional_data": false, "paper": {"id": 172496, "title": "SiamFC++: Towards Robust and Accurate Visual Tracking with Target Estimation Guidelines", "url": "/paper/siamfc-towards-robust-and-accurate-visual", "published": "2019-11-14T00:00:00.000000", "code": true, "review_url": "/paper/siamfc-towards-robust-and-accurate-visual/review/?hl=44691"}, "external_source_url": null, "tags": [], "reports": []}]</script> <script id="community-chart-data" type="application/json">{"all": {"yAxis": {"title": "Average Overlap", "includeZero": false, "gridColor": "#ddd", "valueFormatString": ""}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": []}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": []}}}, "uses_additional_data": {"yAxis": {"title": "Average Overlap", "includeZero": false, "gridColor": "#ddd", "valueFormatString": ""}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": []}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": []}}}, "no_additional_data": {"yAxis": {"title": "Average Overlap", "includeZero": false, "gridColor": "#ddd", "valueFormatString": ""}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": []}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": []}}}}</script> <script id="community-table-metrics" type="application/json">[]</script> <script id="community-table-data" type="application/json">[]</script> <script id="dataset-details" type="application/json">[{"name": "GOT-10k", "fullName": "Generic Object Tracking Benchmark", "url": "/dataset/got-10k", "description": "The GOT-10k dataset contains more than 10,000 video segments of real-world moving objects and over 1.5 million manually labelled bounding boxes. The dataset contains more than 560 classes of real-world moving objects and 80+ classes of motion patterns.", "imagePath": "https://production-media.paperswithcode.com/datasets/got10k.jpg", "iconName": "film", "color": "#F37668"}]</script> <script id="sota-page-details" type="application/json">{"task_main_area_name": "Computer Vision", "task_name": "Visual Object Tracking", "dataset_name": "GOT-10k", "description": "", "mirror_url": null, "has_competition_entries": false}</script> <script type="application/javascript"> let evaluationChartData = JSON.parse( document.getElementById("evaluation-chart-data").textContent ); let evaluationTableMetrics = JSON.parse( document.getElementById("evaluation-table-metrics").textContent ); let evaluationTableData = JSON.parse( document.getElementById("evaluation-table-data").textContent ); let communityChartData = JSON.parse( document.getElementById("community-chart-data").textContent ); let communityTableMetrics = JSON.parse( document.getElementById("community-table-metrics").textContent ); let communityTableData = JSON.parse( document.getElementById("community-table-data").textContent ); let datasetDetails = JSON.parse( document.getElementById("dataset-details").textContent ); let sotaPageDetails = JSON.parse( document.getElementById("sota-page-details").textContent ); // Containers let sotaPageContainer = document.getElementById("sota-page"); // Breadcrumbs let breadcrumbs = [ { title: "Browse", url: "/sota" }, { title: sotaPageDetails.task_main_area_name, url: "/area/computer-vision" }, { title: sotaPageDetails.task_name, url: "/task/visual-object-tracking" }, { title: sotaPageDetails.dataset_name + " dataset", url: "/dataset/got-10k" } ]; let highlight = ( null ); function datasetsSearchUrl(query) { return "/datasets?q="+encodeURIComponent(query); } function newDatasetUrl(datasetName) { return "/contribute/dataset/new?name="+encodeURIComponent(datasetName); } const SOTA_AUTOCOMPLETE_PAPER_URL = "/sota/autocomplete/paper"; const VIEW_PAPER_URL = "/paper/PAPER_SLUG"; </script> <!-- End SOTA Table Generation --> </div> <div class="footer"> <div class="footer-contact"> <span class="footer-contact-item">Contact us on:</span> <a class="footer-contact-item" href="mailto:hello@paperswithcode.com"> <span class=" icon-wrapper icon-ion" data-name="mail"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M424 80H88a56.06 56.06 0 0 0-56 56v240a56.06 56.06 0 0 0 56 56h336a56.06 56.06 0 0 0 56-56V136a56.06 56.06 0 0 0-56-56zm-14.18 92.63l-144 112a16 16 0 0 1-19.64 0l-144-112a16 16 0 1 1 19.64-25.26L256 251.73l134.18-104.36a16 16 0 0 1 19.64 25.26z"/></svg></span> hello@paperswithcode.com </a>. <span class="footer-contact-item"> Papers With Code is a free resource with all data licensed under <a rel="noreferrer" href="https://creativecommons.org/licenses/by-sa/4.0/">CC-BY-SA</a>. </span> </div> <div class="footer-links"> <a href="/site/terms">Terms</a> <a href="/site/data-policy">Data policy</a> <a href="/site/cookies-policy">Cookies policy</a> <a href="/about#team" class="fair-logo"> from <img src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAANAAAAAgCAMAAABU6AZfAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAJcEhZcwAAFiUAABYlAUlSJPAAAABFUExURUdwTBwqMhwqMxsqMhkqMxsqMhwqMgCA+hwrMxJIgBsrMxsqMgJ28AF58wF38BsqMwB58hsqMwF17wF07hwrMwRm4QJz7Wj6SIIAAAAUdFJOUwDP87wcPIT+4A1tVti1Ta0smZVzG3JP8wAABR9JREFUWMO1memWpCoMgF0QxX1//0e9kCAkAadq5tzKjzndQmM+szNFEWQ9puu6xn02BXm4j23bTsdapKJAMguFgRVT/Ejyx4uH5hgvL1PUfm69jEd6bN05GTJvXF5X/hfRcPyWe2kTLDFdRA4ENVMbZZJGMt3ppEttNMDC2X/Qa7MK1OrveZoKz2/445I+U4znuvaExxKZLFCqtym/A6rzn+OjbHj8ubwDmfESslvtgWea13WeckQPUKJTf/4USHkDnVXzCrT74DnmeX+8rjgcxA4QBmPpyAKdOm+5XwFpgHH/bG9AMzLMqM9DxxCQaM0qLr7U4xE/AgIDVRBHlcoDeYd7lFee6GZOBvaaskD8S6nut0Dg0ItZEt+IQAfjseIzRDvS/WCxWQJ17phqEGqepQBS/VaXZa0H/4XUYMVt6nr309DEjYvduPT2gWELQTr0iQbC1+SADOg/kjVvspGqX6zSRAgEKbqOf6zgd82AVB+8s0YNm5NL6Y8MGzttwKt0krP9+9A/+hzQTALoUX5MnxW7iCIEUmD7IVZb8G0G1HRE9UqbWKkEUFPSR0MWqH5eB65XmgzQdN3WGjxReROxPD2LROeBIEiD7UGLraBAjMcS9W9AquTPckBgoMqEWG1SIGN57otn5KO9Y30N4rq6MQFC5TX1cEWBfJLY+mbQ5ZMUm8UK7F1A9GNc90T3enkpCZhCdUzfdQq0Wp774gnZao55YU3SgkmAVBez1eDfR4BABd/XqY36ichyaLUnyJZ8jatimUBjqQTouK2M3OGs4miiiduN5bkHCL15C9Zw7heBRMHYSMRxIGyYFsPqpwTqactT8w0P0OSA9iRY9jQvrDyIAhCoAjrrR90I1PNCpcivHEh+cATUmS5xoCaNB3ggMzqgRO/RYPIb1WviDkB4sv22kB8ghQcgUIFWzyUmaQ6kpf5DCoTFh5fwQQCt493e9ypD5Xjq7S5cMQeEubpBf2oKCoSMohPzduBAi2yimhRIc3NvrOd+gCxPexvhcGPM3SRoJpbmIhAGSudTNgNCR+qIRL05UCebsxTIiAYOX6sEkONphRkw9A9ZjADIZIDg857we5MBSiQHVMlWJgXyeTBIyVpGD4RttHC4yVtENHn7K5ASdeM3QGX2sKcKBCBmITYmrGii9TOQT7JYwxOgrhbyby4XJrvs54kuR8vlCg4XEgEOEs8Q8R5DYZboCwEESpTmi/Hhc1Lo8zxPlghZjpbLqWVGUGxSes1y4W2lkkC+Wf0C6GPaxtZo0VQW4nOhsJLqAg01HXqgGN0+083MegKoYLdisbDqzHVG1iZJYe0EUDoB+dj149gDRCCgt2lZ1zA5nhvCyEwvrc/b3N/HiZlMgINmZaR/aX3MJluf7Kepo8+F5tRfUh1wR0odzg8Srnm9w7L5SyB/p6H9Ptt0Vj310ngAlDHbnLo3mGc00sJiQ+4KEM+I8xC7fWv5VGcz3Y0C2ZCa70sgf0tXbnbY1jXpln3W6jYXDG4jNthdrfVWn8n4gAVAZe+0GgaEaeGFx4XRQyTM9yWQnNuIAy5/HPAWPuDJ8Yc66sYvSeY/8dhlYqH0kuQzkFQ03nnHCyI/gtc0GfM7BVPmL5J0yHPkXm6d3u6v/TLw3GL5ayDr6WW47awHYmS1VC+XJOVQcCCZBPk13SCvgmcb8uI/UqjqdvlOlk3j5OU20C0putdO1ZWNo0a8oumXslx0vMYaNrfPURt2hnp5G2rhtsEP5j/3Wqt0fQd1YgAAAABJRU5ErkJggg=="> </a> </div> </div> <script> // MathJax window.MathJax = { tex: { inlineMath: [ ["$", "$"], ["\\(", "\\)"], ], }, }; const mathjaxScript = document.createElement("script"); mathjaxScript.src = "https://production-assets.paperswithcode.com/static/js/mathjax/tex-chtml.js"; document.head.appendChild(mathjaxScript); </script> <script src="https://production-assets.paperswithcode.com/perf/766.4af6b88b.js" defer></script><script src="https://production-assets.paperswithcode.com/perf/2.6da00df7.js" defer></script><script src="https://production-assets.paperswithcode.com/perf/351.a22a9607.js" defer></script><script src="https://production-assets.paperswithcode.com/perf/452.d3ecdfa4.js" defer></script><script src="https://production-assets.paperswithcode.com/perf/553.357efc0e.js" defer></script><script src="https://production-assets.paperswithcode.com/perf/sota.table.040f2c99.js" defer></script> </body> </html>