CINXE.COM
KoNViD-1k Benchmark (Video Quality Assessment) | Papers With Code
<!doctype html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"> <script> const GTAG_ENABLED = true ; const GTAG_TRACKING_ID = "UA-121182717-1"; const SENTRY_DSN_FRONTEND = "".trim(); const GLOBAL_CSRF_TOKEN = 'v1ShMS8mOqYXPWW1zfbXaL6zafCDvlaGcM64MRVg7k36NVEhUzhRXiBk0RYkGoLp'; const MEDIA_URL = "https://production-media.paperswithcode.com/"; const ASSETS_URL = "https://production-assets.paperswithcode.com"; run_after_frontend_loaded = window.run_after_frontend_loaded || []; </script> <link rel="preconnect" href="https://production-assets.paperswithcode.com"><link rel="dns-prefetch" href="https://production-assets.paperswithcode.com"><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/65e877e527022735c1a1.woff2" crossorigin><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/917632e36982ca7933c8.woff2" crossorigin><link rel="preload" as="font" type="font/woff2" href="https://production-assets.paperswithcode.com/perf/fonts/f1405bd8a987c2ea8a67.woff2" crossorigin><script>(()=>{if(GTAG_ENABLED){const t=document.createElement("script");function n(){window.dataLayer.push(arguments)}t.src=`https://www.googletagmanager.com/gtag/js?id=${GTAG_TRACKING_ID}`,document.head.appendChild(t),window.dataLayer=window.dataLayer||[],window.gtag=n,n("js",new Date),n("config",GTAG_TRACKING_ID),window.captureOutboundLink=function(t){n("event","click",{event_category:"outbound",event_label:t})}}else window.captureOutboundLink=function(n){document.location=n}})();</script><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/766.4af6b88b.js"><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/2.6da00df7.js"><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/351.a22a9607.js"><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/452.d3ecdfa4.js"><link rel="preload" as="style" href="https://production-assets.paperswithcode.com/perf/553.4050647d.css"><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/553.357efc0e.js"><link rel="preload" as="style" href="https://production-assets.paperswithcode.com/perf/918.c41196c3.css"><link rel="preload" as="style" href="https://production-assets.paperswithcode.com/perf/sota.table.fe0fcc15.css"><link rel="preload" as="script" href="https://production-assets.paperswithcode.com/perf/sota.table.040f2c99.js"><link rel="stylesheet" href="https://production-assets.paperswithcode.com/perf/553.4050647d.css"><link rel="stylesheet" href="https://production-assets.paperswithcode.com/perf/918.c41196c3.css"><link rel="stylesheet" href="https://production-assets.paperswithcode.com/perf/sota.table.fe0fcc15.css"> <!-- Metadata --> <title>KoNViD-1k Benchmark (Video Quality Assessment) | Papers With Code</title> <meta name="description" content="The current state-of-the-art on KoNViD-1k is DOVER (end-to-end). See a full comparison of 21 papers with code." /> <!-- Open Graph protocol metadata --> <meta property="og:title" content="Papers with Code - KoNViD-1k Benchmark (Video Quality Assessment)"> <meta property="og:description" content="The current state-of-the-art on KoNViD-1k is DOVER (end-to-end). See a full comparison of 21 papers with code."> <meta property="og:image" content="https://production-media.paperswithcode.com/sota-thumbs/video-quality-assessment-on-konvid-1k-large_b69ec50f.png"> <meta property="og:url" content="https://paperswithcode.com/sota/video-quality-assessment-on-konvid-1k"> <!-- Twitter metadata --> <meta name="twitter:card" content="summary_large_image"> <meta name="twitter:site" content="@paperswithcode"> <meta name="twitter:title" content="Papers with Code - KoNViD-1k Benchmark (Video Quality Assessment)"> <meta name="twitter:description" content="The current state-of-the-art on KoNViD-1k is DOVER (end-to-end). See a full comparison of 21 papers with code."> <meta name="twitter:creator" content="@paperswithcode"> <meta name="twitter:url" content="https://paperswithcode.com/sota/video-quality-assessment-on-konvid-1k"> <meta name="twitter:domain" content="paperswithcode.com"> <!-- JSON LD --> <script type="application/ld+json">{ "@context": "http://schema.org", "@graph": { "@type": "ItemList", "name": "KoNViD-1k Benchmark (Video Quality Assessment)", "description": "The current state-of-the-art on KoNViD-1k is DOVER (end-to-end). See a full comparison of 21 papers with code.", "url": "https://paperswithcode.com/sota/video-quality-assessment-on-konvid-1k", "image": "https://production-media.paperswithcode.com/sota-thumbs/video-quality-assessment-on-konvid-1k-large_b69ec50f.png" } }</script> <meta name="theme-color" content="#fff"/> <link rel="manifest" href="https://production-assets.paperswithcode.com/static/manifest.web.json"> </head> <body> <nav class="navbar navbar-expand-lg navbar-light header"> <a class="navbar-brand" href="/"> <span class=" icon-wrapper" data-name="pwc"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path d="M88 128h48v256H88zm144 0h48v256h-48zm-72 16h48v224h-48zm144 0h48v224h-48zm72-16h48v256h-48z"/><path d="M104 104V56H16v400h88v-48H64V104zm304-48v48h40v304h-40v48h88V56z"/></svg></span> </a> <div class="navbar-mobile-twitter d-lg-none"> <a rel="noreferrer" href="https://twitter.com/paperswithcode"> <span class=" icon-wrapper icon-fa icon-fa-brands" data-name="twitter"><svg viewBox="0 0 512.001 515.25" xmlns="http://www.w3.org/2000/svg"><path d="M459.37 152.016c.326 4.548.326 9.097.326 13.645 0 138.72-105.583 298.558-298.559 298.558C101.685 464.22 46.457 447 0 417.114c8.447.973 16.568 1.298 25.34 1.298 49.054 0 94.213-16.568 130.274-44.832-46.132-.975-84.792-31.188-98.113-72.772 6.499.975 12.996 1.624 19.819 1.624 9.42 0 18.843-1.3 27.613-3.573-48.08-9.747-84.142-51.98-84.142-102.984v-1.3c13.968 7.798 30.213 12.67 47.43 13.32-28.263-18.843-46.78-51.006-46.78-87.391 0-19.492 5.196-37.36 14.294-52.954 51.654 63.674 129.3 105.258 216.364 109.807-1.624-7.797-2.599-15.918-2.599-24.04 0-57.827 46.782-104.934 104.934-104.934 30.214 0 57.502 12.67 76.671 33.136 23.715-4.548 46.455-13.319 66.599-25.34-7.798 24.367-24.366 44.834-46.132 57.828 21.117-2.274 41.584-8.122 60.426-16.244-14.292 20.791-32.161 39.309-52.628 54.253z"/></svg></span> </a> </div> <button class="navbar-toggler" type="button" data-toggle="collapse" data-bs-toggle="collapse" data-target="#top-menu" data-bs-target="#top-menu" aria-controls="top-menu" aria-expanded="false" aria-label="Toggle navigation" > <span class="navbar-toggler-icon"></span> </button> <div class="collapse navbar-collapse" id="top-menu"> <ul class="navbar-nav mr-auto navbar-nav__left light-header"> <li class="nav-item header-search"> <form action="/search" method="get" id="id_global_search_form" autocomplete="off"> <input type="text" name="q_meta" style="display:none" id="q_meta" /> <input type="hidden" name="q_type" id="q_type" /> <input id="id_global_search_input" autocomplete="off" value="" name='q' class="global-search" type="search" placeholder='Search'/> <button type="submit" class="icon"><span class=" icon-wrapper icon-fa icon-fa-light" data-name="search"><svg viewBox="0 0 512.025 520.146" xmlns="http://www.w3.org/2000/svg"><path d="M508.5 482.6c4.7 4.7 4.7 12.3 0 17l-9.9 9.9c-4.7 4.7-12.3 4.7-17 0l-129-129c-2.2-2.3-3.5-5.3-3.5-8.5v-10.2C312 396 262.5 417 208 417 93.1 417 0 323.9 0 209S93.1 1 208 1s208 93.1 208 208c0 54.5-21 104-55.3 141.1H371c3.2 0 6.2 1.2 8.5 3.5zM208 385c97.3 0 176-78.7 176-176S305.3 33 208 33 32 111.7 32 209s78.7 176 176 176z"/></svg></span></button> </form> </li> <li class="nav-item"> <a class="nav-link" href="/sota"> Browse State-of-the-Art </a> </li> <li class="nav-item"> <a class="nav-link" href="/datasets"> Datasets </a> </li> <li class="nav-item"> <a class="nav-link" href="/methods">Methods</a> </li> <li class="nav-item dropdown"> <a class="nav-link dropdown-toggle" role="button" id="navbarDropdownRepro" data-toggle="dropdown" data-bs-toggle="dropdown" aria-haspopup="true" aria-expanded="false" > More </a> <div class="dropdown-menu" aria-labelledby="navbarDropdownRepro"> <a class="dropdown-item" href="/newsletter">Newsletter</a> <a class="dropdown-item" href="/rc2022">RC2022</a> <div class="dropdown-divider"></div> <a class="dropdown-item" href="/about">About</a> <a class="dropdown-item" href="/trends">Trends</a> <a class="dropdown-item" href="https://portal.paperswithcode.com/"> Portals </a> <a class="dropdown-item" href="/libraries"> Libraries </a> </div> </li> </ul> <ul class="navbar-nav ml-auto navbar-nav__right navbar-subscribe justify-content-center align-items-center"> <li class="nav-item"> <a class="nav-link" rel="noreferrer" href="https://twitter.com/paperswithcode"> <span class="nav-link-social-icon icon-wrapper icon-fa icon-fa-brands" data-name="twitter"><svg viewBox="0 0 512.001 515.25" xmlns="http://www.w3.org/2000/svg"><path d="M459.37 152.016c.326 4.548.326 9.097.326 13.645 0 138.72-105.583 298.558-298.559 298.558C101.685 464.22 46.457 447 0 417.114c8.447.973 16.568 1.298 25.34 1.298 49.054 0 94.213-16.568 130.274-44.832-46.132-.975-84.792-31.188-98.113-72.772 6.499.975 12.996 1.624 19.819 1.624 9.42 0 18.843-1.3 27.613-3.573-48.08-9.747-84.142-51.98-84.142-102.984v-1.3c13.968 7.798 30.213 12.67 47.43 13.32-28.263-18.843-46.78-51.006-46.78-87.391 0-19.492 5.196-37.36 14.294-52.954 51.654 63.674 129.3 105.258 216.364 109.807-1.624-7.797-2.599-15.918-2.599-24.04 0-57.827 46.782-104.934 104.934-104.934 30.214 0 57.502 12.67 76.671 33.136 23.715-4.548 46.455-13.319 66.599-25.34-7.798 24.367-24.366 44.834-46.132 57.828 21.117-2.274 41.584-8.122 60.426-16.244-14.292 20.791-32.161 39.309-52.628 54.253z"/></svg></span> </a> </li> <li class="nav-item"> <a id="signin-link" class="nav-link" href="/accounts/login?next=/sota/video-quality-assessment-on-konvid-1k">Sign In</a> </li> </ul> </div> </nav> <!-- Page modals --> <div class="modal fade" id="emailModal" tabindex="-1" role="dialog" aria-labelledby="emailModalLabel" aria-hidden="true"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <h3 class="modal-title" id="emailModalLabel">Subscribe to the PwC Newsletter</h3> <button type="button" class="close" data-dismiss="modal" data-bs-dismiss="modal" aria-label="Close"> <span aria-hidden="true">×</span> </button> </div> <form action="" method="post"> <div class="modal-body"> <div class="modal-body-info-text"> Stay informed on the latest trending ML papers with code, research developments, libraries, methods, and datasets.<br/><br/> <a href="/newsletter">Read previous issues</a> </div> <input type="hidden" name="csrfmiddlewaretoken" value="v1ShMS8mOqYXPWW1zfbXaL6zafCDvlaGcM64MRVg7k36NVEhUzhRXiBk0RYkGoLp"> <input placeholder="Enter your email" type="email" class="form-control pwc-email" name="address" id="id_address" max_length="100" required> </div> <div class="modal-footer"> <button type="submit" class="btn btn-primary">Subscribe</button> </div> </form> </div> </div> </div> <!-- Login --> <div class="modal fade" id="loginModal" tabindex="-1" role="dialog" aria-labelledby="loginModalLabel" aria-hidden="true"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <h5 class="modal-title" id="loginModalLabel">Join the community</h5> <button type="button" class="close btn-close" data-dismiss="modal" data-bs-dismiss="modal" aria-label="Close"> <span aria-hidden="true">×</span> </button> </div> <div class="login-modal-message"> You need to <a href="/accounts/login?next=/sota/video-quality-assessment-on-konvid-1k">log in</a> to edit.<br/> You can <a href="/accounts/register?next=/sota/video-quality-assessment-on-konvid-1k">create a new account</a> if you don't have one.<br/><br/> </div> </div> </div> </div> <div class="container content content-buffer "> <div class="leaderboard-header"> <a href="/task/video-quality-assessment"> <span class="badge badge-primary"> <img src="https://production-media.paperswithcode.com/tasks/default.gif"> <span>Video Quality Assessment</span> </span> </a> </div> <div id="sota-page"> <div class="text-center"> <img src="https://production-assets.paperswithcode.com/perf/images/spin-1s-32px-ed14c515.gif"> </div> </div> <link href="https://production-assets.paperswithcode.com/static/fonts/font-awesome/css/all.min.css" rel="stylesheet" /> <script type="application/javascript"> const CSRF_TOKEN = "v1ShMS8mOqYXPWW1zfbXaL6zafCDvlaGcM64MRVg7k36NVEhUzhRXiBk0RYkGoLp"; const USER_IS_AUTHENTICATED = false; const LOGIN_REQUIRED = true; </script> <script type="module" src="https://unpkg.com/ionicons@5.1.2/dist/ionicons/ionicons.esm.js" ></script> <script nomodule="" src="https://unpkg.com/ionicons@5.1.2/dist/ionicons/ionicons.js" ></script> <!-- Start SOTA Table Generation --> <script id="evaluation-chart-data" type="application/json">{"all": {"yAxis": {"title": "PLCC", "includeZero": false, "gridColor": "#ddd", "valueFormatString": "", "minimum": 0.74825, "maximum": 0.91925}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": [{"x": "2019-06-20", "y": 0.7688, "name": "TLVQM", "nameShort": "TLVQM", "nameDetails": null, "paperSlug": "two-level-approach-for-no-reference-consumer", "usesAdditionalData": false}, {"x": "2019-08-01", "y": 0.7754, "name": "VSFA", "nameShort": "VSFA", "nameDetails": null, "paperSlug": "quality-assessment-of-in-the-wild-videos", "usesAdditionalData": false}, {"x": "2020-05-20", "y": 0.7803, "name": "VIDEVAL", "nameShort": "VIDEVAL", "nameDetails": null, "paperSlug": "ugc-vqa-benchmarking-blind-video-quality", "usesAdditionalData": false}, {"x": "2021-01-26", "y": 0.8175, "name": "RAPIQUE", "nameShort": "RAPIQUE", "nameDetails": null, "paperSlug": "rapique-rapid-and-accurate-video-quality", "usesAdditionalData": false}, {"x": "2021-08-19", "y": 0.834, "name": "BVQA-2022", "nameShort": "BVQA-2022", "nameDetails": null, "paperSlug": "blindly-assess-quality-of-in-the-wild-videos", "usesAdditionalData": true}, {"x": "2021-10-25", "y": 0.842, "name": "CONTRIQUE", "nameShort": "CONTRIQUE", "nameDetails": null, "paperSlug": "image-quality-assessment-using-contrastive", "usesAdditionalData": true}, {"x": "2022-04-29", "y": 0.86, "name": "SimpleVQA", "nameShort": "SimpleVQA", "nameDetails": null, "paperSlug": "a-deep-learning-based-no-reference-quality", "usesAdditionalData": false}, {"x": "2022-07-06", "y": 0.892, "name": "FAST-VQA ", "nameShort": "FAST-VQA ", "nameDetails": "finetuned on KonViD-1k", "paperSlug": "fast-vqa-efficient-end-to-end-video-quality", "usesAdditionalData": true}, {"x": "2022-10-11", "y": 0.898, "name": "FasterVQA ", "nameShort": "FasterVQA ", "nameDetails": "fine-tuned", "paperSlug": "neighbourhood-representative-sampling-for", "usesAdditionalData": true}, {"x": "2022-11-09", "y": 0.905, "name": "DOVER ", "nameShort": "DOVER ", "nameDetails": "end-to-end", "paperSlug": "disentangling-aesthetic-and-technical-effects", "usesAdditionalData": false}]}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": [{"x": "2020-11-27", "y": 0.77, "name": "PVQ", "nameShort": "PVQ", "nameDetails": null, "paperSlug": "patch-vq-patching-up-the-video-quality", "usesAdditionalData": true}, {"x": "2021-09-17", "y": 0.7625, "name": "ChipQA", "nameShort": "ChipQA", "nameDetails": null, "paperSlug": "chipqa-no-reference-video-quality-prediction", "usesAdditionalData": false}, {"x": "2022-06-20", "y": 0.86, "name": "DisCoVQA", "nameShort": "DisCoVQA", "nameDetails": null, "paperSlug": "discovqa-temporal-distortion-content", "usesAdditionalData": false}, {"x": "2022-06-29", "y": 0.849, "name": "CONVIQT", "nameShort": "CONVIQT", "nameDetails": null, "paperSlug": "conviqt-contrastive-video-quality-estimator", "usesAdditionalData": true}, {"x": "2022-07-06", "y": 0.855, "name": "FAST-VQA ", "nameShort": "FAST-VQA ", "nameDetails": "trained on LSVQ only", "paperSlug": "fast-vqa-efficient-end-to-end-video-quality", "usesAdditionalData": true}, {"x": "2022-08-31", "y": 0.835, "name": "2BiVQA", "nameShort": "2BiVQA", "nameDetails": null, "paperSlug": "2bivqa-double-bi-lstm-based-video-quality", "usesAdditionalData": true}, {"x": "2022-10-09", "y": 0.8562, "name": "HVS-5M", "nameShort": "HVS-5M", "nameDetails": null, "paperSlug": "hvs-revisited-a-comprehensive-video-quality", "usesAdditionalData": false}, {"x": "2022-11-09", "y": 0.894, "name": "DOVER ", "nameShort": "DOVER ", "nameDetails": "head-only", "paperSlug": "disentangling-aesthetic-and-technical-effects", "usesAdditionalData": false}, {"x": "2024-07-16", "y": 0.8668, "name": "ReLaX-VQA ", "nameShort": "ReLaX-VQA ", "nameDetails": "finetuned on KoNViD-1k", "paperSlug": "relax-vqa-residual-fragment-and-layer-stack", "usesAdditionalData": true}, {"x": "2024-07-16", "y": 0.8473, "name": "ReLaX-VQA", "nameShort": "ReLaX-VQA", "nameDetails": null, "paperSlug": "relax-vqa-residual-fragment-and-layer-stack", "usesAdditionalData": false}, {"x": "2024-07-16", "y": 0.8427, "name": "ReLaX-VQA ", "nameShort": "ReLaX-VQA ", "nameDetails": "trained on LSVQ only", "paperSlug": "relax-vqa-residual-fragment-and-layer-stack", "usesAdditionalData": true}]}}}, "uses_additional_data": {"yAxis": {"title": "PLCC", "includeZero": false, "gridColor": "#ddd", "valueFormatString": "", "minimum": 0.7572, "maximum": 0.9108}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": [{"x": "2020-11-27", "y": 0.77, "name": "PVQ", "nameShort": "PVQ", "nameDetails": null, "paperSlug": "patch-vq-patching-up-the-video-quality", "usesAdditionalData": true}, {"x": "2021-08-19", "y": 0.834, "name": "BVQA-2022", "nameShort": "BVQA-2022", "nameDetails": null, "paperSlug": "blindly-assess-quality-of-in-the-wild-videos", "usesAdditionalData": true}, {"x": "2021-10-25", "y": 0.842, "name": "CONTRIQUE", "nameShort": "CONTRIQUE", "nameDetails": null, "paperSlug": "image-quality-assessment-using-contrastive", "usesAdditionalData": true}, {"x": "2022-06-29", "y": 0.849, "name": "CONVIQT", "nameShort": "CONVIQT", "nameDetails": null, "paperSlug": "conviqt-contrastive-video-quality-estimator", "usesAdditionalData": true}, {"x": "2022-07-06", "y": 0.892, "name": "FAST-VQA ", "nameShort": "FAST-VQA ", "nameDetails": "finetuned on KonViD-1k", "paperSlug": "fast-vqa-efficient-end-to-end-video-quality", "usesAdditionalData": true}, {"x": "2022-10-11", "y": 0.898, "name": "FasterVQA ", "nameShort": "FasterVQA ", "nameDetails": "fine-tuned", "paperSlug": "neighbourhood-representative-sampling-for", "usesAdditionalData": true}]}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": [{"x": "2022-07-06", "y": 0.855, "name": "FAST-VQA ", "nameShort": "FAST-VQA ", "nameDetails": "trained on LSVQ only", "paperSlug": "fast-vqa-efficient-end-to-end-video-quality", "usesAdditionalData": true}, {"x": "2022-08-31", "y": 0.835, "name": "2BiVQA", "nameShort": "2BiVQA", "nameDetails": null, "paperSlug": "2bivqa-double-bi-lstm-based-video-quality", "usesAdditionalData": true}, {"x": "2024-07-16", "y": 0.8668, "name": "ReLaX-VQA ", "nameShort": "ReLaX-VQA ", "nameDetails": "finetuned on KoNViD-1k", "paperSlug": "relax-vqa-residual-fragment-and-layer-stack", "usesAdditionalData": true}, {"x": "2024-07-16", "y": 0.8427, "name": "ReLaX-VQA ", "nameShort": "ReLaX-VQA ", "nameDetails": "trained on LSVQ only", "paperSlug": "relax-vqa-residual-fragment-and-layer-stack", "usesAdditionalData": true}]}}}, "no_additional_data": {"yAxis": {"title": "PLCC", "includeZero": false, "gridColor": "#ddd", "valueFormatString": "", "minimum": 0.74825, "maximum": 0.91925}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": [{"x": "2019-06-20", "y": 0.7688, "name": "TLVQM", "nameShort": "TLVQM", "nameDetails": null, "paperSlug": "two-level-approach-for-no-reference-consumer", "usesAdditionalData": false}, {"x": "2019-08-01", "y": 0.7754, "name": "VSFA", "nameShort": "VSFA", "nameDetails": null, "paperSlug": "quality-assessment-of-in-the-wild-videos", "usesAdditionalData": false}, {"x": "2020-05-20", "y": 0.7803, "name": "VIDEVAL", "nameShort": "VIDEVAL", "nameDetails": null, "paperSlug": "ugc-vqa-benchmarking-blind-video-quality", "usesAdditionalData": false}, {"x": "2021-01-26", "y": 0.8175, "name": "RAPIQUE", "nameShort": "RAPIQUE", "nameDetails": null, "paperSlug": "rapique-rapid-and-accurate-video-quality", "usesAdditionalData": false}, {"x": "2022-04-29", "y": 0.86, "name": "SimpleVQA", "nameShort": "SimpleVQA", "nameDetails": null, "paperSlug": "a-deep-learning-based-no-reference-quality", "usesAdditionalData": false}, {"x": "2022-11-09", "y": 0.905, "name": "DOVER ", "nameShort": "DOVER ", "nameDetails": "end-to-end", "paperSlug": "disentangling-aesthetic-and-technical-effects", "usesAdditionalData": false}]}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": [{"x": "2021-09-17", "y": 0.7625, "name": "ChipQA", "nameShort": "ChipQA", "nameDetails": null, "paperSlug": "chipqa-no-reference-video-quality-prediction", "usesAdditionalData": false}, {"x": "2022-06-20", "y": 0.86, "name": "DisCoVQA", "nameShort": "DisCoVQA", "nameDetails": null, "paperSlug": "discovqa-temporal-distortion-content", "usesAdditionalData": false}, {"x": "2022-10-09", "y": 0.8562, "name": "HVS-5M", "nameShort": "HVS-5M", "nameDetails": null, "paperSlug": "hvs-revisited-a-comprehensive-video-quality", "usesAdditionalData": false}, {"x": "2022-11-09", "y": 0.894, "name": "DOVER ", "nameShort": "DOVER ", "nameDetails": "head-only", "paperSlug": "disentangling-aesthetic-and-technical-effects", "usesAdditionalData": false}, {"x": "2024-07-16", "y": 0.8473, "name": "ReLaX-VQA", "nameShort": "ReLaX-VQA", "nameDetails": null, "paperSlug": "relax-vqa-residual-fragment-and-layer-stack", "usesAdditionalData": false}]}}}}</script> <script id="evaluation-table-metrics" type="application/json">[{"id": 37539, "name": "PLCC", "is_loss": false, "is_fixed": false}]</script> <script id="evaluation-table-data" type="application/json">[{"table_id": 13855, "row_id": 77955, "rank": 1, "method": "DOVER (end-to-end)", "mlmodel": {}, "method_short": "DOVER ", "method_details": "end-to-end", "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-11-09", "metrics": {"PLCC": "0.905"}, "raw_metrics": {"PLCC": 0.905}, "uses_additional_data": false, "paper": {"id": 1108569, "title": "Exploring Video Quality Assessment on User Generated Contents from Aesthetic and Technical Perspectives", "url": "/paper/disentangling-aesthetic-and-technical-effects", "published": "2022-11-09T00:00:00.000000", "code": true, "review_url": "/paper/disentangling-aesthetic-and-technical-effects/review/?hl=77955"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 71515, "rank": 2, "method": "FasterVQA (fine-tuned)", "mlmodel": {}, "method_short": "FasterVQA ", "method_details": "fine-tuned", "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-10-11", "metrics": {"PLCC": "0.898"}, "raw_metrics": {"PLCC": 0.898}, "uses_additional_data": true, "paper": {"id": 1089459, "title": "Neighbourhood Representative Sampling for Efficient End-to-end Video Quality Assessment", "url": "/paper/neighbourhood-representative-sampling-for", "published": "2022-10-11T00:00:00.000000", "code": true, "review_url": "/paper/neighbourhood-representative-sampling-for/review/?hl=71515"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 77952, "rank": 3, "method": "DOVER (head-only)", "mlmodel": {}, "method_short": "DOVER ", "method_details": "head-only", "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-11-09", "metrics": {"PLCC": "0.894"}, "raw_metrics": {"PLCC": 0.894}, "uses_additional_data": false, "paper": {"id": 1108569, "title": "Exploring Video Quality Assessment on User Generated Contents from Aesthetic and Technical Perspectives", "url": "/paper/disentangling-aesthetic-and-technical-effects", "published": "2022-11-09T00:00:00.000000", "code": true, "review_url": "/paper/disentangling-aesthetic-and-technical-effects/review/?hl=77952"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 58706, "rank": 4, "method": "FAST-VQA (finetuned on KonViD-1k)", "mlmodel": {}, "method_short": "FAST-VQA ", "method_details": "finetuned on KonViD-1k", "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-07-06", "metrics": {"PLCC": "0.892"}, "raw_metrics": {"PLCC": 0.892}, "uses_additional_data": true, "paper": {"id": 1038716, "title": "FAST-VQA: Efficient End-to-end Video Quality Assessment with Fragment Sampling", "url": "/paper/fast-vqa-efficient-end-to-end-video-quality", "published": "2022-07-06T00:00:00.000000", "code": true, "review_url": "/paper/fast-vqa-efficient-end-to-end-video-quality/review/?hl=58706"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 125837, "rank": 5, "method": "ReLaX-VQA (finetuned on KoNViD-1k)", "mlmodel": {}, "method_short": "ReLaX-VQA ", "method_details": "finetuned on KoNViD-1k", "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2024-07-16", "metrics": {"PLCC": "0.8668"}, "raw_metrics": {"PLCC": 0.8668}, "uses_additional_data": true, "paper": {"id": 1484618, "title": "ReLaX-VQA: Residual Fragment and Layer Stack Extraction for Enhancing Video Quality Assessment", "url": "/paper/relax-vqa-residual-fragment-and-layer-stack", "published": "2024-07-16T00:00:00.000000", "code": true, "review_url": "/paper/relax-vqa-residual-fragment-and-layer-stack/review/?hl=125837"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 84794, "rank": 6, "method": "SimpleVQA", "mlmodel": {}, "method_short": "SimpleVQA", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-04-29", "metrics": {"PLCC": "0.860"}, "raw_metrics": {"PLCC": 0.86}, "uses_additional_data": false, "paper": {"id": 1001886, "title": "A Deep Learning based No-reference Quality Assessment Model for UGC Videos", "url": "/paper/a-deep-learning-based-no-reference-quality", "published": "2022-04-29T00:00:00.000000", "code": true, "review_url": "/paper/a-deep-learning-based-no-reference-quality/review/?hl=84794"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 84798, "rank": 7, "method": "DisCoVQA", "mlmodel": {}, "method_short": "DisCoVQA", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-06-20", "metrics": {"PLCC": "0.860"}, "raw_metrics": {"PLCC": 0.86}, "uses_additional_data": false, "paper": {"id": 1029746, "title": "DisCoVQA: Temporal Distortion-Content Transformers for Video Quality Assessment", "url": "/paper/discovqa-temporal-distortion-content", "published": "2022-06-20T00:00:00.000000", "code": true, "review_url": "/paper/discovqa-temporal-distortion-content/review/?hl=84798"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 84790, "rank": 8, "method": "HVS-5M", "mlmodel": {}, "method_short": "HVS-5M", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-10-09", "metrics": {"PLCC": "0.8562"}, "raw_metrics": {"PLCC": 0.8562}, "uses_additional_data": false, "paper": {"id": 1089191, "title": "HVS Revisited: A Comprehensive Video Quality Assessment Framework", "url": "/paper/hvs-revisited-a-comprehensive-video-quality", "published": "2022-10-09T00:00:00.000000", "code": false, "review_url": "/paper/hvs-revisited-a-comprehensive-video-quality/review/?hl=84790"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 58705, "rank": 9, "method": "FAST-VQA (trained on LSVQ only)", "mlmodel": {}, "method_short": "FAST-VQA ", "method_details": "trained on LSVQ only", "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-07-06", "metrics": {"PLCC": "0.855"}, "raw_metrics": {"PLCC": 0.855}, "uses_additional_data": true, "paper": {"id": 1038716, "title": "FAST-VQA: Efficient End-to-end Video Quality Assessment with Fragment Sampling", "url": "/paper/fast-vqa-efficient-end-to-end-video-quality", "published": "2022-07-06T00:00:00.000000", "code": true, "review_url": "/paper/fast-vqa-efficient-end-to-end-video-quality/review/?hl=58705"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 117695, "rank": 10, "method": "CONVIQT", "mlmodel": {}, "method_short": "CONVIQT", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-06-29", "metrics": {"PLCC": "0.849"}, "raw_metrics": {"PLCC": 0.849}, "uses_additional_data": true, "paper": {"id": 1035093, "title": "CONVIQT: Contrastive Video Quality Estimator", "url": "/paper/conviqt-contrastive-video-quality-estimator", "published": "2022-06-29T00:00:00.000000", "code": true, "review_url": "/paper/conviqt-contrastive-video-quality-estimator/review/?hl=117695"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 125832, "rank": 11, "method": "ReLaX-VQA", "mlmodel": {}, "method_short": "ReLaX-VQA", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2024-07-16", "metrics": {"PLCC": "0.8473"}, "raw_metrics": {"PLCC": 0.8473}, "uses_additional_data": false, "paper": {"id": 1484618, "title": "ReLaX-VQA: Residual Fragment and Layer Stack Extraction for Enhancing Video Quality Assessment", "url": "/paper/relax-vqa-residual-fragment-and-layer-stack", "published": "2024-07-16T00:00:00.000000", "code": true, "review_url": "/paper/relax-vqa-residual-fragment-and-layer-stack/review/?hl=125832"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 125838, "rank": 12, "method": "ReLaX-VQA (trained on LSVQ only)", "mlmodel": {}, "method_short": "ReLaX-VQA ", "method_details": "trained on LSVQ only", "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2024-07-16", "metrics": {"PLCC": "0.8427"}, "raw_metrics": {"PLCC": 0.8427}, "uses_additional_data": true, "paper": {"id": 1484618, "title": "ReLaX-VQA: Residual Fragment and Layer Stack Extraction for Enhancing Video Quality Assessment", "url": "/paper/relax-vqa-residual-fragment-and-layer-stack", "published": "2024-07-16T00:00:00.000000", "code": true, "review_url": "/paper/relax-vqa-residual-fragment-and-layer-stack/review/?hl=125838"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 119265, "rank": 13, "method": "CONTRIQUE", "mlmodel": {}, "method_short": "CONTRIQUE", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2021-10-25", "metrics": {"PLCC": "0.842"}, "raw_metrics": {"PLCC": 0.842}, "uses_additional_data": true, "paper": {"id": 895196, "title": "Image Quality Assessment using Contrastive Learning", "url": "/paper/image-quality-assessment-using-contrastive", "published": "2021-10-25T00:00:00.000000", "code": true, "review_url": "/paper/image-quality-assessment-using-contrastive/review/?hl=119265"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 65028, "rank": 14, "method": "2BiVQA", "mlmodel": {}, "method_short": "2BiVQA", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2022-08-31", "metrics": {"PLCC": "0.835"}, "raw_metrics": {"PLCC": 0.835}, "uses_additional_data": true, "paper": {"id": 1067667, "title": "2BiVQA: Double Bi-LSTM based Video Quality Assessment of UGC Videos", "url": "/paper/2bivqa-double-bi-lstm-based-video-quality", "published": "2022-08-31T00:00:00.000000", "code": true, "review_url": "/paper/2bivqa-double-bi-lstm-based-video-quality/review/?hl=65028"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 65076, "rank": 15, "method": "BVQA-2022", "mlmodel": {}, "method_short": "BVQA-2022", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2021-08-19", "metrics": {"PLCC": "0.834"}, "raw_metrics": {"PLCC": 0.834}, "uses_additional_data": true, "paper": {"id": 853915, "title": "Blindly Assess Quality of In-the-Wild Videos via Quality-aware Pre-training and Motion Perception", "url": "/paper/blindly-assess-quality-of-in-the-wild-videos", "published": "2021-08-19T00:00:00.000000", "code": true, "review_url": "/paper/blindly-assess-quality-of-in-the-wild-videos/review/?hl=65076"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 42974, "rank": 16, "method": "RAPIQUE", "mlmodel": {}, "method_short": "RAPIQUE", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2021-01-26", "metrics": {"PLCC": "0.8175"}, "raw_metrics": {"PLCC": 0.8175}, "uses_additional_data": false, "paper": {"id": 739188, "title": "RAPIQUE: Rapid and Accurate Video Quality Prediction of User Generated Content", "url": "/paper/rapique-rapid-and-accurate-video-quality", "published": "2021-01-26T00:00:00.000000", "code": true, "review_url": "/paper/rapique-rapid-and-accurate-video-quality/review/?hl=42974"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 42970, "rank": 17, "method": "VIDEVAL", "mlmodel": {}, "method_short": "VIDEVAL", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2020-05-20", "metrics": {"PLCC": "0.7803"}, "raw_metrics": {"PLCC": 0.7803}, "uses_additional_data": false, "paper": {"id": 198374, "title": "UGC-VQA: Benchmarking Blind Video Quality Assessment for User Generated Content", "url": "/paper/ugc-vqa-benchmarking-blind-video-quality", "published": "2020-05-29T00:00:00.000000", "code": true, "review_url": "/paper/ugc-vqa-benchmarking-blind-video-quality/review/?hl=42970"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 42982, "rank": 18, "method": "VSFA", "mlmodel": {}, "method_short": "VSFA", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2019-08-01", "metrics": {"PLCC": "0.7754"}, "raw_metrics": {"PLCC": 0.7754}, "uses_additional_data": false, "paper": {"id": 148828, "title": "Quality Assessment of In-the-Wild Videos", "url": "/paper/quality-assessment-of-in-the-wild-videos", "published": "2019-08-01T00:00:00.000000", "code": true, "review_url": "/paper/quality-assessment-of-in-the-wild-videos/review/?hl=42982"}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 42977, "rank": 19, "method": "PVQ", "mlmodel": {}, "method_short": "PVQ", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2020-11-27", "metrics": {"PLCC": "0.770 "}, "raw_metrics": {"PLCC": 0.77}, "uses_additional_data": true, "paper": {"id": 237737, "title": "Patch-VQ: 'Patching Up' the Video Quality Problem", "url": "/paper/patch-vq-patching-up-the-video-quality", "published": "2020-11-27T00:00:00.000000", "code": true, "review_url": null}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 42985, "rank": 20, "method": "TLVQM", "mlmodel": {}, "method_short": "TLVQM", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2019-06-20", "metrics": {"PLCC": "0.7688"}, "raw_metrics": {"PLCC": 0.7688}, "uses_additional_data": false, "paper": {"id": 228528, "title": "Two-Level Approach for No-Reference Consumer Video Quality Assessment", "url": "/paper/two-level-approach-for-no-reference-consumer", "published": "2019-06-20T00:00:00.000000", "code": true, "review_url": null}, "external_source_url": null, "tags": [], "reports": []}, {"table_id": 13855, "row_id": 42980, "rank": 21, "method": "ChipQA", "mlmodel": {}, "method_short": "ChipQA", "method_details": null, "mlmodel_short": null, "mlmodeldetails": null, "evaluation_date": "2021-09-17", "metrics": {"PLCC": "0.7625"}, "raw_metrics": {"PLCC": 0.7625}, "uses_additional_data": false, "paper": {"id": 870405, "title": "ChipQA: No-Reference Video Quality Prediction via Space-Time Chips", "url": "/paper/chipqa-no-reference-video-quality-prediction", "published": "2021-09-17T00:00:00.000000", "code": true, "review_url": "/paper/chipqa-no-reference-video-quality-prediction/review/?hl=42980"}, "external_source_url": null, "tags": [], "reports": []}]</script> <script id="community-chart-data" type="application/json">{"all": {"yAxis": {"title": "PLCC", "includeZero": false, "gridColor": "#ddd", "valueFormatString": ""}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": []}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": []}}}, "uses_additional_data": {"yAxis": {"title": "PLCC", "includeZero": false, "gridColor": "#ddd", "valueFormatString": ""}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": []}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": []}}}, "no_additional_data": {"yAxis": {"title": "PLCC", "includeZero": false, "gridColor": "#ddd", "valueFormatString": ""}, "data": {"trend": {"name": "State-of-the-art methods", "type": "line", "showInLegend": true, "markerSize": 10, "toolTipContent": "{name}: {y}", "color": "#21ccc7", "dataPoints": []}, "other": {"name": "Other methods", "type": "scatter", "showInLegend": true, "color": "#ddd", "markerSize": 10, "toolTipContent": "{name}: {y}", "dataPoints": []}}}}</script> <script id="community-table-metrics" type="application/json">[]</script> <script id="community-table-data" type="application/json">[]</script> <script id="dataset-details" type="application/json">[{"name": "KoNViD-1k", "fullName": "KoNViD-1k VQA Database", "url": "/dataset/konvid-1k", "description": "Subjective video quality assessment (VQA) strongly depends on semantics, context, and the types of visual distortions. A lot of existing VQA databases cover small numbers of video sequences with artificial distortions. When testing newly developed Quality of Experience (QoE) models and metrics, they are commonly evaluated against subjective data from such databases, that are the result of perception experiments. However, since the aim of these QoE models is to accurately predict natural videos, these artificially distorted video databases are an insufficient basis for learning. Additionally, the small sizes make them only marginally usable for state-of-the-art learning systems, such as deep learning. In order to give a better basis for development and evaluation of objective VQA methods, we have created a larger datasets of natural, real-world video sequences with corresponding subjective mean opinion scores (MOS) gathered through crowdsourcing.\n\u200b\nWe took YFCC100m as a baseline databas", "imagePath": null, "iconName": "film", "color": "#F37668"}]</script> <script id="sota-page-details" type="application/json">{"task_main_area_name": "Time Series", "task_name": "Video Quality Assessment", "dataset_name": "KoNViD-1k", "description": "", "mirror_url": null, "has_competition_entries": false}</script> <script type="application/javascript"> let evaluationChartData = JSON.parse( document.getElementById("evaluation-chart-data").textContent ); let evaluationTableMetrics = JSON.parse( document.getElementById("evaluation-table-metrics").textContent ); let evaluationTableData = JSON.parse( document.getElementById("evaluation-table-data").textContent ); let communityChartData = JSON.parse( document.getElementById("community-chart-data").textContent ); let communityTableMetrics = JSON.parse( document.getElementById("community-table-metrics").textContent ); let communityTableData = JSON.parse( document.getElementById("community-table-data").textContent ); let datasetDetails = JSON.parse( document.getElementById("dataset-details").textContent ); let sotaPageDetails = JSON.parse( document.getElementById("sota-page-details").textContent ); // Containers let sotaPageContainer = document.getElementById("sota-page"); // Breadcrumbs let breadcrumbs = [ { title: "Browse", url: "/sota" }, { title: sotaPageDetails.task_main_area_name, url: "/area/time-series" }, { title: sotaPageDetails.task_name, url: "/task/video-quality-assessment" }, { title: sotaPageDetails.dataset_name + " dataset", url: "/dataset/konvid-1k" } ]; let highlight = ( null ); function datasetsSearchUrl(query) { return "/datasets?q="+encodeURIComponent(query); } function newDatasetUrl(datasetName) { return "/contribute/dataset/new?name="+encodeURIComponent(datasetName); } const SOTA_AUTOCOMPLETE_PAPER_URL = "/sota/autocomplete/paper"; const VIEW_PAPER_URL = "/paper/PAPER_SLUG"; </script> <!-- End SOTA Table Generation --> </div> <div class="footer"> <div class="footer-contact"> <span class="footer-contact-item">Contact us on:</span> <a class="footer-contact-item" href="mailto:hello@paperswithcode.com"> <span class=" icon-wrapper icon-ion" data-name="mail"><svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><path d="M424 80H88a56.06 56.06 0 0 0-56 56v240a56.06 56.06 0 0 0 56 56h336a56.06 56.06 0 0 0 56-56V136a56.06 56.06 0 0 0-56-56zm-14.18 92.63l-144 112a16 16 0 0 1-19.64 0l-144-112a16 16 0 1 1 19.64-25.26L256 251.73l134.18-104.36a16 16 0 0 1 19.64 25.26z"/></svg></span> hello@paperswithcode.com </a>. <span class="footer-contact-item"> Papers With Code is a free resource with all data licensed under <a rel="noreferrer" href="https://creativecommons.org/licenses/by-sa/4.0/">CC-BY-SA</a>. </span> </div> <div class="footer-links"> <a href="/site/terms">Terms</a> <a href="/site/data-policy">Data policy</a> <a href="/site/cookies-policy">Cookies policy</a> <a href="/about#team" class="fair-logo"> from <img src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAANAAAAAgCAMAAABU6AZfAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAJcEhZcwAAFiUAABYlAUlSJPAAAABFUExURUdwTBwqMhwqMxsqMhkqMxsqMhwqMgCA+hwrMxJIgBsrMxsqMgJ28AF58wF38BsqMwB58hsqMwF17wF07hwrMwRm4QJz7Wj6SIIAAAAUdFJOUwDP87wcPIT+4A1tVti1Ta0smZVzG3JP8wAABR9JREFUWMO1memWpCoMgF0QxX1//0e9kCAkAadq5tzKjzndQmM+szNFEWQ9puu6xn02BXm4j23bTsdapKJAMguFgRVT/Ejyx4uH5hgvL1PUfm69jEd6bN05GTJvXF5X/hfRcPyWe2kTLDFdRA4ENVMbZZJGMt3ppEttNMDC2X/Qa7MK1OrveZoKz2/445I+U4znuvaExxKZLFCqtym/A6rzn+OjbHj8ubwDmfESslvtgWea13WeckQPUKJTf/4USHkDnVXzCrT74DnmeX+8rjgcxA4QBmPpyAKdOm+5XwFpgHH/bG9AMzLMqM9DxxCQaM0qLr7U4xE/AgIDVRBHlcoDeYd7lFee6GZOBvaaskD8S6nut0Dg0ItZEt+IQAfjseIzRDvS/WCxWQJ17phqEGqepQBS/VaXZa0H/4XUYMVt6nr309DEjYvduPT2gWELQTr0iQbC1+SADOg/kjVvspGqX6zSRAgEKbqOf6zgd82AVB+8s0YNm5NL6Y8MGzttwKt0krP9+9A/+hzQTALoUX5MnxW7iCIEUmD7IVZb8G0G1HRE9UqbWKkEUFPSR0MWqH5eB65XmgzQdN3WGjxReROxPD2LROeBIEiD7UGLraBAjMcS9W9AquTPckBgoMqEWG1SIGN57otn5KO9Y30N4rq6MQFC5TX1cEWBfJLY+mbQ5ZMUm8UK7F1A9GNc90T3enkpCZhCdUzfdQq0Wp774gnZao55YU3SgkmAVBez1eDfR4BABd/XqY36ichyaLUnyJZ8jatimUBjqQTouK2M3OGs4miiiduN5bkHCL15C9Zw7heBRMHYSMRxIGyYFsPqpwTqactT8w0P0OSA9iRY9jQvrDyIAhCoAjrrR90I1PNCpcivHEh+cATUmS5xoCaNB3ggMzqgRO/RYPIb1WviDkB4sv22kB8ghQcgUIFWzyUmaQ6kpf5DCoTFh5fwQQCt493e9ypD5Xjq7S5cMQeEubpBf2oKCoSMohPzduBAi2yimhRIc3NvrOd+gCxPexvhcGPM3SRoJpbmIhAGSudTNgNCR+qIRL05UCebsxTIiAYOX6sEkONphRkw9A9ZjADIZIDg857we5MBSiQHVMlWJgXyeTBIyVpGD4RttHC4yVtENHn7K5ASdeM3QGX2sKcKBCBmITYmrGii9TOQT7JYwxOgrhbyby4XJrvs54kuR8vlCg4XEgEOEs8Q8R5DYZboCwEESpTmi/Hhc1Lo8zxPlghZjpbLqWVGUGxSes1y4W2lkkC+Wf0C6GPaxtZo0VQW4nOhsJLqAg01HXqgGN0+083MegKoYLdisbDqzHVG1iZJYe0EUDoB+dj149gDRCCgt2lZ1zA5nhvCyEwvrc/b3N/HiZlMgINmZaR/aX3MJluf7Kepo8+F5tRfUh1wR0odzg8Srnm9w7L5SyB/p6H9Ptt0Vj310ngAlDHbnLo3mGc00sJiQ+4KEM+I8xC7fWv5VGcz3Y0C2ZCa70sgf0tXbnbY1jXpln3W6jYXDG4jNthdrfVWn8n4gAVAZe+0GgaEaeGFx4XRQyTM9yWQnNuIAy5/HPAWPuDJ8Yc66sYvSeY/8dhlYqH0kuQzkFQ03nnHCyI/gtc0GfM7BVPmL5J0yHPkXm6d3u6v/TLw3GL5ayDr6WW47awHYmS1VC+XJOVQcCCZBPk13SCvgmcb8uI/UqjqdvlOlk3j5OU20C0putdO1ZWNo0a8oumXslx0vMYaNrfPURt2hnp5G2rhtsEP5j/3Wqt0fQd1YgAAAABJRU5ErkJggg=="> </a> </div> </div> <script> // MathJax window.MathJax = { tex: { inlineMath: [ ["$", "$"], ["\\(", "\\)"], ], }, }; const mathjaxScript = document.createElement("script"); mathjaxScript.src = "https://production-assets.paperswithcode.com/static/js/mathjax/tex-chtml.js"; document.head.appendChild(mathjaxScript); </script> <script src="https://production-assets.paperswithcode.com/perf/766.4af6b88b.js" defer></script><script src="https://production-assets.paperswithcode.com/perf/2.6da00df7.js" defer></script><script src="https://production-assets.paperswithcode.com/perf/351.a22a9607.js" defer></script><script src="https://production-assets.paperswithcode.com/perf/452.d3ecdfa4.js" defer></script><script src="https://production-assets.paperswithcode.com/perf/553.357efc0e.js" defer></script><script src="https://production-assets.paperswithcode.com/perf/sota.table.040f2c99.js" defer></script> </body> </html>