CINXE.COM
Image Processing: Algorithms and Systems XXI (IPAS)
<!DOCTYPE html> <html id="MainHtml" lang="en" class="html-main"> <head id="ctl01_Head1"><script type="text/javascript"> !function(v,y,T){var S=v.location,k="script",D="instrumentationKey",C="ingestionendpoint",I="disableExceptionTracking",E="ai.device.",b="toLowerCase",w=(D[b](),"crossOrigin"),N="POST",e="appInsightsSDK",t=T.name||"appInsights",n=((T.name||v[e])&&(v[e]=t),v[t]||function(l){var u=!1,d=!1,g={initialize:!0,queue:[],sv:"6",version:2,config:l};function m(e,t){var n={ },a="Browser";return n[E+"id"]=a[b](),n[E+"type"]=a,n["ai.operation.name"]=S&&S.pathname||"_unknown_",n["ai.internal.sdkVersion"]="javascript:snippet_"+(g.sv||g.version),{time:(a=new Date).getUTCFullYear()+"-"+i(1+a.getUTCMonth())+"-"+i(a.getUTCDate())+"T"+i(a.getUTCHours())+":"+i(a.getUTCMinutes())+":"+i(a.getUTCSeconds())+"."+(a.getUTCMilliseconds()/1e3).toFixed(3).slice(2,5)+"Z",iKey:e,name:"Microsoft.ApplicationInsights."+e.replace(/-/g,"")+"."+t,sampleRate:100,tags:n,data:{baseData:{ver:2}}};function i(e){e = "" + e;return 1===e.length?"0"+e:e}}var e,n,f=l.url||T.src;function a(e){var t,n,a,i,o,s,r,c,p;u=!0,g.queue=[],d||(d=!0,i=f,r=(c=function(){var e,t={ },n=l.connectionString;if(n)for(var a=n.split(";"),i=0;i<a.length;i++){var o=a[i].split("=");2===o.length&&(t[o[0][b]()]=o[1])}return t[C]||(t[C]="https://"+((e=(n=t.endpointsuffix)?t.location:null)?e+".":"")+"dc."+(n||"services.visualstudio.com")),t}()).instrumentationkey||l[D]||"",c=(c=c[C])?c+"/v2/track":l.endpointUrl,(p=[]).push((t="SDK LOAD Failure: Failed to load Application Insights SDK script (See stack for details)",n=i,o=c,(s=(a=m(r,"Exception")).data).baseType="ExceptionData",s.baseData.exceptions=[{typeName:"SDKLoadFailed",message:t.replace(/\./g,"-"),hasFullStack:!1,stack:t+"\nSnippet failed to load ["+n+"] -- Telemetry is disabled\nHelp Link: https://go.microsoft.com/fwlink/?linkid=2128109\nHost: "+(S&&S.pathname||"_unknown_")+"\nEndpoint: "+o,parsedStack:[]}],a)),p.push((s=i,t=c,(o=(n=m(r,"Message")).data).baseType="MessageData",(a=o.baseData).message='AI (Internal): 99 message:"'+("SDK LOAD Failure: Failed to load Application Insights SDK script (See stack for details) ("+s+")").replace(/\"/g,"")+'"',a.properties={endpoint:t},n)),i=p,r=c,JSON&&((o=v.fetch)&&!T.useXhr?o(r,{method:N,body:JSON.stringify(i),mode:"cors"}):XMLHttpRequest&&((s=new XMLHttpRequest).open(N,r),s.setRequestHeader("Content-type","application/json"),s.send(JSON.stringify(i)))))}function i(e,t){d || setTimeout(function () { !t && g.core || a() }, 500)}f&&((n=y.createElement(k)).src=f,!(o=T[w])&&""!==o||"undefined"==n[w]||(n[w]=o),n.onload=i,n.onerror=a,n.onreadystatechange=function(e,t){"loaded" !== n.readyState && "complete" !== n.readyState || i(0, t)},e=n,T.ld<0?y.getElementsByTagName("head")[0].appendChild(e):setTimeout(function(){y.getElementsByTagName(k)[0].parentNode.appendChild(e)},T.ld||0));try{g.cookie = y.cookie}catch(h){ }function t(e){for(;e.length;)!function(t){g[t] = function () { var e = arguments; u || g.queue.push(function () { g[t].apply(g, e) }) }}(e.pop())}var s,r,o="track",c="TrackPage",p="TrackEvent",o=(t([o+"Event",o+"PageView",o+"Exception",o+"Trace",o+"DependencyData",o+"Metric",o+"PageViewPerformance","start"+c,"stop"+c,"start"+p,"stop"+p,"addTelemetryInitializer","setAuthenticatedUserContext","clearAuthenticatedUserContext","flush"]),g.SeverityLevel={Verbose:0,Information:1,Warning:2,Error:3,Critical:4},(l.extensionConfig||{ }).ApplicationInsightsAnalytics||{ });return!0!==l[I]&&!0!==o[I]&&(t(["_"+(s="onerror")]),r=v[s],v[s]=function(e,t,n,a,i){var o=r&&r(e,t,n,a,i);return!0!==o&&g["_"+s]({message:e,url:t,lineNumber:n,columnNumber:a,error:i,evt:v.event}),o},l.autoExceptionInstrumented=!0),g}(T.cfg));function a(){T.onInit && T.onInit(n)}(v[t]=n).queue&&0===n.queue.length?(n.queue.push(a),n.trackPageView({ })):a()}(window,document,{ src: "https://js.monitor.azure.com/scripts/b/ai.2.min.js", crossOrigin: "anonymous", onInit: function (sdk) { window.appInsights.context.telemetryTrace.traceID = "f2327534a3722d44183f27dcb253fe34"; sdk.addTelemetryInitializer(function (envelope) { envelope.data = envelope.data || {}; envelope.data.TenantId = "SIST"; }); }, cfg: { // Application Insights Configuration connectionString: "InstrumentationKey=96392a35-9d5a-4f7c-adcb-1a186bd1a320;IngestionEndpoint=https://eastus-8.in.applicationinsights.azure.com/;LiveEndpoint=https://eastus.livediagnostics.monitor.azure.com/", autoTrackPageVisitTime:true, enableRequestHeaderTracking: true, disableCookiesUsage: true }}); </script><meta charset="UTF-8" /><title> Image Processing: Algorithms and Systems XXI (IPAS) </title> <meta name="Description" content="This conference integrates theoretical research on image processing algorithms with the more applied research on image processing systems." /> <meta property="og:description" content="This conference integrates theoretical research on image processing algorithms with the more applied research on image processing systems." /> <meta name="Keywords" content="Filtering and Denoising, Fusion Algorithms, Video Processing, Tools and Systems, Color" /> <link rel="stylesheet" href="/Assets/css/10-UltraWaveResponsive.css?v=638750753400000000"><!–– Begin Google Script ––> <script type="text/javascript"> var _gaq = _gaq || []; _gaq.push(['_setAccount', 'UA-12227505-1']); _gaq.push(['_trackPageview']); (function() { var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); })(); </script> <!–– End Google Script ––> <!–– Begin Informz Script ––> <script> //------------------------------------------------------- var z_account = "EDE8547A-6395-48C6-AF0F-9F17F3CE9387"; var z_collector = "ist.informz.net"; var z_cookieDomain = ".imaging.org"; //------------------------------------------------------- (function (e, o, n, r, t, a, s) { e[t] || (e.GlobalSnowplowNamespace = e.GlobalSnowplowNamespace || [], e.GlobalSnowplowNamespace.push(t), e[t] = function () { (e[t].q = e[t].q || []).push(arguments) }, e[t].q = e[t].q || [], a = o.createElement(n), s = o.getElementsByTagName(n)[0], a.async = 1, a.src = r, s.parentNode.insertBefore(a, s)) }(window, document, "script", "https://"+z_collector+"/web_trk/sp.js", "informz_trk")), informz_trk("newTracker", "infz", z_collector + "/web_trk/collector/", { appId: z_account, cookieDomain: z_cookieDomain }), informz_trk("setUserIdFromLocation", "_zs"), informz_trk("enableActivityTracking", 30, 15); informz_trk("trackPageView", null); </script> <!–– End Informz Script ––> <link rel="shortcut icon" href="/images/ist-logo-white.png" /> <link rel="canonical" href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_IPAS.aspx" /> <script type="text/javascript"> //<![CDATA[ var gDocumentTitle = document.title; var gPostBackFormObject = null; var gWindowOnLoad = new Array(); var gPostBackFormProcessSubmit = true; var gWebRoot = ''; var gWebSiteRoot = 'https://www.imaging.org'; var gPageTheme = 'IST-GlacierBay-Responsive'; var gWebsiteKey = '6d978a6f-475d-46cc-bcf2-7a9e3d5f8f82'; var gHKey = ''; var gIsPostBack = false; var gDefaultConfirmMessage = 'Continue without saving changes?'; var gIsEasyEditEnabled = false; var gMasterPage = '/templates/masterpages/wst-ist_naturalheritagesitesresponsive.master'; var gCurrentCultureCode = 'en-US'; if (gWebsiteKey != 'fbdf17a3-cae7-4943-b1eb-71b9c0dd65d2' && gWebsiteKey != 'fad2fd17-7e27-4c96-babe-3291ecde4822') { gIsEasyEditEnabled = false; } //]]> </script><script src="/AsiCommon/Scripts/Modernizr/Modernizr.min.js" type="text/javascript"></script><meta name="viewport" content="width=device-width, initial-scale=1.0" /> <script type="text/javascript"> // Add a class to the <html> tag of browsers that do not support Media Queries var mqSupported = Modernizr.mq('only all'); if (!mqSupported) { document.documentElement.className += ' no-mqs'; } </script> <link href="../../../../../../App_Themes/IST-GlacierBay-Responsive/99-GlacierBay_Responsive.css" type="text/css" rel="stylesheet" /><script src="/AsiCommon/Scripts/Jquery/Jquery.min.js" type="text/javascript" ></script><script src="/AsiCommon/Scripts/Jquery/jquery-migrate.min.js" type="text/javascript" ></script><script src="/AsiCommon/Scripts/Jquery/jquery-ui.min.js" type="text/javascript" ></script><style type="text/css">@import url("//fonts.googleapis.com/css?family=Raleway:200"); @import url('https://fonts.googleapis.com/css2?family=Open+Sans:ital,wght@0,300;0,400;0,600;0,700;0,800;1,300;1,400;1,600;1,700;1,800&display=swap'); @import url('https://fonts.googleapis.com/css2?family=Montserrat:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,800;1,900&display=swap'); @import url('https://fonts.googleapis.com/css2?family=Jost:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,800;1,900&display=swap'); @import url('https://fonts.googleapis.com/css2?family=Work+Sans:ital,wght@0,100;0,200;0,300;0,400;0,500;0,550;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,550;1,600;1,700;1,800;1,900&display=swap'); :root { --header-font-family: 'Jost', sans-serif; --body-font-family: 'Montserrat', sans-serif; } body { font-family: 'Montserrat', sans-serif; color: #0c0c0c; font-size: 14px; line-height:1.4; } h1 { color: #737373; background-color: white; font-family: 'Jost', sans-serif; font-size: 24px; font-weight: 500; text-transform: none; text-align: left; margin-top: 10px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 0px; padding-bottom: 0px; padding-left: 0; padding-right: 0; line-height: 1em; } h2 { color: #ffffff; background-color: #145098; font-family: 'Open Sans', sans-serif; font-size: 18px; text-transform: uppercase; font-weight: 400; display: block; padding-top: 2px; padding-bottom: 2px; padding-left: .33em; padding-right: 3px; margin-top: 1em; margin-bottom: .5em; margin-left: 0px; margin-right: 0px; margin-inline-start: 0px; margin-inline-end: 0px; border-bottom: 1px none #145098; } h3, h3.rightlink { color: #e68600; font-size: 18px; font-weight: 400; font-family: 'Jost', sans-serif; text-transform: uppercase; display: block; border-bottom: 0px solid #ddd; margin: 20px 20px 20px 20px; margin-block-start: 0em; margin-block-end: 0em; margin-inline-start: 0px; margin-inline-end: 0px; } h4, .h4, .SectionLabel { color: #000; font-size: 18px; font-weight: 400; font-family: 'Jost', sans-serif; text-transform: none; display: block; } h5 { color: #e68600; font-size: 16px; font-weight: 400; font-family: 'Jost', sans-serif; text-transform: none; display: block; border-bottom: #e68600 1px solid; } h6 { color: #000; font-size: 16px; font-weight: normal; font-family: 'Jost', sans-serif; text-transform: none; display: block; } a, .Link, .RadGrid.RadGrid .rgRow a, .RadGrid.RadGrid .rgAltRow a, .RadGrid.RadGrid .rgEditForm a, .k-grid.k-grid a, .RadToolTip.RadToolTip a { color: #0032a0; text-decoration: none; } a:visited, .Link:visited, .RadGrid.RadGrid .rgRow a:visited, .RadGrid.RadGrid .rgAltRow a:visited, .RadGrid.RadGrid .rgEditForm a:visited, .k-grid.k-grid a:visited, .RadToolTip.RadToolTip a:visited { color: #0032a0; text-decoration: none; } a:hover, a:focus, a:active, .Link:hover, .Link:focus, .Link:active, .RadGrid.RadGrid .rgRow a:hover, .RadGrid.RadGrid .rgRow a:focus, .RadGrid.RadGrid .rgRow a:active, .RadGrid.RadGrid .rgAltRow a:hover, .RadGrid.RadGrid .rgAltRow a:focus, .RadGrid.RadGrid .rgAltRow a:active, .RadGrid.RadGrid .rgEditForm a:hover, .RadGrid.RadGrid .rgEditForm a:focus, .RadGrid.RadGrid .rgEditForm a:active, .k-grid.k-grid a:hover, .k-grid.k-grid a:focus, .k-grid.k-grid a:active, .RadToolTip.RadToolTip a:hover, .RadToolTip.RadToolTip a:focus, .RadToolTip.RadToolTip a:active { color: #7f7f7f; text-decoration: underline ; } p { font-size: 14px; font-weight: 400; } .PrimaryButton, .UsePrimaryButton .TextButton, .RadGrid input.PrimaryButton { background-color: #9a9a9a; border-color: transparent; } .ISTButton { border: none; border-radius: 8px; background-color: #c00000; color: #ffffff; padding: 6px 12px; text-align: center; text-decoration: none; display: inline-block; font-size: 14px; margin: 4px 2px; cursor: pointer; } .ISTButton:hover { background-color: #9a9a9a; color: #ffffff; text-decoration: none; } .ISTButton:visited { color: #ffffff; text-decoration: none; } body:not(.Wrapper-HomePage) #hd, body:not(.Wrapper-HomePage) .InternalHeaderArea { height: 140px; background: #fff; } body:not(.Wrapper-HomePage) #hd, body:not(.Wrapper-HomePage) .InternalHeaderArea { min-height: 140px; height: 140px; box-shadow: 0 0.3em 0.6em #888; } body:not(.Wrapper-HomePage) .header-top-container #masterLogoArea { top: 19px; } body:not(.Wrapper-HomePage) .primary-navigation-area { margin-top: 10px; width: 100%; margin-left: 0px; } div#ctl01_ciPrimaryNavigation_NavControl_NavMenu { margin-top: -10px; margin-bottom: -6px; } body:not(.Wrapper-HomePage) .header-bottom-container { min-height: 40px; margin-top: 35px; top: 85px; background-color: #004a80; } .navbar-collapse, .searchbar-collapse { padding-left: 0; padding-right: 0; border: none; background: none; padding-top: 0px; } .RadMenu.RadMenu_NaturalHeritageSites .rmText, .RadMenu.RadMenu_NaturalHeritageSites .rmHorizontal .rmText, .RadMenu.RadMenu_NaturalHeritageSites .rmSlide .rmText { padding: 10px 12px; } .RadMenu.RadMenu_NaturalHeritageSites .rmSlide ul.rmGroup { padding-right:0px; } .RadMenu.MegaDropDownMenuOuter ul.rmLevel1 div.rmSlide, .RadMenu.MegaDropDownMenuOuter ul.rmLevel1 ul div.rmSlide { position:absolute; } .RadMenu.MegaDropDownMenuOuter ul.rmVertical.rmGroup.rmLevel1 li ul.rmVertical.rmGroup.rmLevel2 { width:200px; display:none; position:absolute; visibility:hidden; left:-108px; } .RadMenu.MegaDropDownMenuOuter ul.rmVertical.rmGroup.rmLevel1 li, ul.rmVertical.rmGroup.rmLevel2 li { font-size:14px; line-height:16px; padding: 1px 0px; } .RadMenu.MegaDropDownMenuOuter ul.rmLevel2 > .rmItem { width:200px; } .RadMenu_NaturalHeritageSites .rmSlide .rmExpandRight, .RadMenu_NaturalHeritageSites .rmSlide .rmGroup a.rmLink.rmExpandRight { background-image: url(/images/ResponsiveNavMed.png); background-position: 100% 0px; background-position-y:center; margin-right:10px; } .RadMenu.RadMenu_NaturalHeritageSites .rmTwoLevels .rmLevel1 > .rmItem > .rmLink, .RadMenu.RadMenu_NaturalHeritageSites .rmTwoLevels .rmLevel1 > .rmItem.rmDisabled > .rmLink:hover, .RadMenu.RadMenu_NaturalHeritageSites .rmTwoLevels .rmLevel1 > .rmItem.rmDisabled > .rmLink:focus { text-transform: capitalize; } .RadMenu.RadMenu_NaturalHeritageSites .rmRootGroup > .rmItem > .rmLink { color: #fff; font-family: Arial, Helvetica, sans-serif; font-size: 16px; font-weight: 200; letter-spacing: 0px; text-transform: uppercase; } .RadMenu.RadMenu_NaturalHeritageSites .rmSlide .rmText { padding-right: 0px; } ul.rmVertical.rmGroup.rmLevel1 { min-width:340px; } .RadMenu.RadMenu_NaturalHeritageSites .rmRootGroup > .rmItem > .rmLink:hover, .RadMenu.RadMenu_NaturalHeritageSites .rmTwoLevels .rmLevel1 > .rmItem.rmDisabled > .rmLink:focus { color: #004a80; } .RadMenu_NaturalHeritageSites .rmSlide .rmLink:hover, .RadMenu_NaturalHeritageSites .rmSlide .rmLink:focus, .RadMenu_NaturalHeritageSites .rmSlide .rmLink.rmFocused, .RadMenu_NaturalHeritageSites .rmSlide .rmLevel1 > .rmItem > .rmLink:hover, .RadMenu_NaturalHeritageSites .rmSlide .rmLevel1 > .rmItem > .rmLink:focus, .RadMenu_NaturalHeritageSites .rmSlide .rmLevel1 > .rmItem > .rmLink.rmFocused { background-color: transparent; color: #004a80; font-weight: bold; } ul.rmRootGroup.rmToggleHandles.rmHorizontal { height: 45px; margin: 0px; } nav#asi_BreadCrumbNav { font-family: arial, sans-serif; font-size: 13px; } .panel-title > a, .panel-title > a:link, .panel-title > a:hover, .panel-title > a:focus, .panel-title > a:active, .panel-title > a:visited { color: #145098; text-decoration: none; font-family: Arial, sans-serif; font-weight: normal; font-size: 24px; background: none; } h2.panel-title { background: none; color: #145098 } .event h2, .h2, .PanelTitle { background: none; } .HomeLogin h2 { background-color: #fff; text-transform: none; font-family: arial,sans-serif; padding-left: 0px; } .RadTabStrip .rtsUL, .RadTabStripVertical .rtsUL { font-family: arial, sans-serif; font-size: 12px; font-weight: bold; } .RadTabStrip_MetroTouch.RadTabStrip_MetroTouch .rtsLevel1 .rtsSelected { background-color: #004a80 !important; border-color: #ccc!important; color: white; text-decoration: none !important; border-top-left-radius: 10px; border-top-right-radius: 10px; } .RadTabStrip_MetroTouch .rtsLevel1 .rtsLink { margin: 0 0 0 5px; border: 1px solid #e0dfdf; background-color: #f9f9f9; border-top-left-radius: 10px; border-top-right-radius: 10px; margin-left:5px; } div#ctl01_TemplateBody_WebPartManager1_gwpciMAINContentCollectionOrganizerCommon2_ciMAINContentCollectionOrganizerCommon2_radPage { font-family: arial, sans-serif; font-size: 14px; } .obo-panel { margin-top: 50px; } .navbar-header .NavigationLink { color: #004a80; padding-left: 0px; } .UtilityNavigation .account-menu > li > a, .UtilityNavigation .account-menu > li > a:hover, .UtilityNavigation .account-menu > li > a:visited, .UtilityNavigation .account-menu > li > a:active { color: #004a80; } .account-menu .profile-picture-wrapper { position: relative; display: none; width: 34px; } .HeaderSocial { float: left; margin-top: -75px; margin-left: -50px; } li#ctl01_ciUtilityNavigation_ctl01_OBOToggleLI { display: none; } .header-top-container .navbar-header { position: absolute; right: 0; top: 10px; } .header-search .search-field { padding: 0 0 0 7px; padding-left: 34px !important; background-position: 4px -15px !important; background-color: #fff; border: #004a80 .25px solid; height: 25px; background-size: 16px; } a.ste-toggle.off { margin-top: -4px; background-color: #fff; } .UtilitySection.UtilityAccountArea { font-family: Arial, san-serif; } .UtilitySection.UtilitySearch { float:right; margin-top:-8px; } .obo-label { font-weight: bold; font-family: arial, sans-serif; font-size: 12px; } .obo-actions { display: inline-block; font-family: arial, sans-serif; font-size: 12px; } .ISTLeftNav_BigButtonSingle { background-color: #eeeeee; font-family: jost, sans-serif; font-size: 13px; text-transform: uppercase; color: #0c0c0c; margin-top: 10px; } .ISTLeftNav_BigButtonSingle a { color: #0c0c0c; } .ISTLeftNav_BigButtonSingle a:visited { color: #0c0c0c; } .ISTLeftNav_BigButtonSingle li a:hover{ color: #004a80; font-weight: bold; text-decoration: none; } #masterContentArea { margin-top: 50px; } .footer-content { background-color: #fff; color: #004a80; text-align: center; border-top: #004a80 1px solid; padding-top:40px; } .footer-nav { float: left; position: absolute; width: 45%; } .footer-col { float: left; width: 30%; text-align: left; } .footer-col ul { list-style: none; text-transform: none; padding-left: 0; } .footer-col li { padding-top: 10px; font-size: 15px; font-family: 'Jost'; } .footer-col a, .footer-col a:link { text-decoration: none; color: #004a80; } .footer-col a:hover { text-decoration: underline; color: #191919; } .FooterLogo { margin-left: 40px; } .FooterLogo img { margin-right: 0px; width: 120px; } .FooterSocial { text-align: left; padding: 25px 0 0 0; height: 95px; } .FooterSocial p { margin: 13px 20px 0px 0; float: left; } .FooterTop { float: right; width: 20%; } .FooterBottom { font-family: Arial, Sans-Serif; margin-top: 0px; } .FooterBottom p { font-size: 11px; } .FooterBottom a, .FooterBottom a:link { text-decoration: none; color: #004a80; } .FooterBottom a:hover { text-decoration: underline; color: #191919; } .footer-nav-copyright { background-color: #fff; color: #004a80; } .backToTop { bottom: 0px; background: transparent url(/images/icons/BackToTop.png) 3px 0 no-repeat !important; background-color: #fff !important; } @media (max-width: 992px) { body:not(.Wrapper-HomePage) .primary-navigation-area { margin: 0px; } div#ste_container_ciUtilityNavigation { float: left; } div#ste_container_ciNewUtilityNavigationCommon2 { float: right; } .navbar-header .NavigationLink { color: #004a80; padding-left: 10px; } .OnBehalfOfContainer { width: 100%; float: right; } .header-container .nav-auxiliary { margin-top: -35px; position: relative; } .header-container .UtilityNavigation.nav-auxiliary { margin-bottom: 10px; margin-top: -10px; } .ClearFix.header-bottom-container { background-color: #fff; } .header-logo-container { height: 100px; } .HeaderSocial { display:none; } body:not(.Wrapper-HomePage) #hd, body:not(.Wrapper-HomePage) .InternalHeaderArea { height: 0px; min-height: 1px; } body:not(.Wrapper-HomePage) .header-bottom-container { min-height: 40px; margin-top: 0px; top: 65px; background-color: #fff; } div#mainContentWrapper { margin-top: 30px; } #masterContentArea { margin-top:30px; } .RadMenu.RadMenu_NaturalHeritageSites .rmRootGroup > .rmItem > .rmLink { color: #004a80; } .RadMenu.RadMenu_NaturalHeritageSites .rmRootGroup > .rmItem > .rmLink:expanded { color: #004a80; } .RadMenu.RadMenu_NaturalHeritageSites .rmRootGroup > .rmItem > .rmLink:hover { color: #004a80; } .RadMenu_NaturalHeritageSites .rmSlide .rmExpandRight, .RadMenu_NaturalHeritageSites .rmSlide .rmGroup a.rmLink.rmExpandRight { background-image: none; } nav#asi_BreadCrumbNav { margin-left: 0px; } li#ctl01_ciUtilityNavigation_ctl01_SignInLI { padding-right: 10px; } #menuv-container { width: 100%; } .about .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 95%; margin-bottom: 10px; } .about .row { position: relative; } .about .col-sm-9 { margin-bottom: 250px; } .member .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 95%; margin-bottom: 10px; } .member .row { position: relative; } .member .col-sm-9 { margin-bottom: 100px; } .honors .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 95%; margin-bottom: 10px; } .honors .row { position: relative; } .honors .col-sm-9 { margin-bottom: 600px; } .pubs .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 95%; margin-bottom: 10px; } .pubs .row { position: relative; } .pubs .col-sm-9 { margin-bottom: 400px; } .standards .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 95%; margin-bottom: 10px; } .standards .row { position: relative; } .standards .col-sm-9 { margin-bottom: 400px; } .JournPI .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 88%; margin-bottom: 10px; } .JournPI .row { position: relative; } .JournPI .col-sm-9 { margin-bottom: 700px; } } @media (max-width: 767px) { .footer-col { float: left; width: 100%; text-align: left; } .footer-col ul { padding-bottom: 10px; margin-left: 5px; } .footer-col li { padding-top: 0px; } .footer-content { background-color: #fff; color: #004a80; text-align: center; border-top: #004a80 1px solid; padding-top: 40px; float: left; position: absolute; width: 100%; } .footer-nav { float: left; position: relative; width: 100%; } .FooterTop { float: left; width: 100%; margin: 30px 0 20px 0; } .FooterLogo { margin-left: 0px; text-align: left; float: left; } .FooterLogo img { margin-right: 0px; width: 100px; } .FooterSocial { padding: 0; height: 95px; } .FooterSocial p { margin: 13px 0px 0px 40px; float: right; } .FooterSocialText { float: right; width: 60%; text-align: right; } .FooterSocialImg { float: right; width: 60%; text-align: right; } .FooterCopyright { padding: 0px; text-align: left; } .FooterPolicies { float: left; } } </style><link href="/WebResource.axd?d=vsKpXOsoEwDUfVug9KOqSkSpSXVaNBxqYq0TMVVWWh_D58LwxadHoTawtKs_sAzbrfD81FIKiaLMHfE8C-_1biM4aI5bWvBCogQPX0gqgkMOYJs3nRZiavvQhAxxuDxC0&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><link href="/WebResource.axd?d=HkmiNSkKJcp9zYO7iQ4LQYfcigDVRny_bpX66yTnidFPn8M0fokYr_cMbxH1xEMngudkfC-ZQGay1WGMDdRErxNZUgamrg-I8VpzCrtaoIMj66mt5nON1ewFVLY5L_q3Yce4_RXJFjxRuseRI8WsKg2&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><link href="/WebResource.axd?d=501uTrfxke03AuVmqnFlcktdinHr9iVATciqFqhrqcOKTkomCd2QsHcrpjM_-xP-CqLQMjvRfrLOklibzCo45kW9flZoyUMNvcu5Mzw18RmO3Ap9KCdkr0PDfDKg4UD2xehjr4iqZqrdKJTFAyVnpJ_TSCRLyoWqgAqgqCSum1w1&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><link href="/WebResource.axd?d=lsHxUYuoKn-ifTgGVgyNZQIZvqADQrnISEv67X7zPJZKRrk0V0-vW7xMMI_xGUYrNRgY0V24wprRwBJ4JAXB-PL_gpUMjcMBvixi6LjgrUFLgqzRtutTUA_SoLdglMWBbO_rKbCsgkpfQKzxCPGYpg2&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><link href="/WebResource.axd?d=QKX1AO1vX8ebWKfbb4eOTEvhmgWml__oci93TQX-3srWTTq0hFmFYaJFDB9UKx-GfZqFRMKXn0jqsLJL5ywha6wcGEkFGP5adK9HNSwDXjbr9acE3_Dn_vJGCZ6A4DPI0&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><link href="/WebResource.axd?d=_s8C6V0hVmZ50IR7zUlbBIx5hUAxbrT46F-Qe8ZhKIJt5wIqEePTdOgtTXhtJoRp9My5-D7o68asFZMfXdfLr664FIRyOvnOI7-P_C5gb1tC239yby2f-BLyA5vIZfs93zIzosPTIzGVgEB2TKXRYc6QuLCwX1Z_PNYRI9JRlaA1&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><meta property="og:title" content="Image Processing: Algorithms and Systems XXI (IPAS)" /><style type="text/css"> .ctl01_TemplateBody_ContentPage1_WebPartZone1_Page1_0 { border-color:Black;border-width:1px;border-style:Solid; } .ctl01_TemplateBody_ContentPage2_WebPartZone2_Page1_0 { border-color:Black;border-width:1px;border-style:Solid; } </style></head> <body id="MainBody"> <a href="#mainContentWrapper" class="screen-reader-text show-on-focus">Skip to main content</a> <form method="post" action="/IST/IST/Conferences/EI/EI2023/Conference/C_IPAS.aspx?" id="aspnetForm" class="form-main" novalidate=""> <div class="aspNetHidden"> <input type="hidden" name="__WPPS" id="__WPPS" value="s" /> <input type="hidden" name="__CTRLKEY" id="__CTRLKEY" value="" /> <input type="hidden" name="__SHIFTKEY" id="__SHIFTKEY" value="" /> <input type="hidden" name="ctl01_ScriptManager1_TSM" id="ctl01_ScriptManager1_TSM" value="" /> <input type="hidden" name="PageInstanceKey" id="PageInstanceKey" value="7d2c8128-2eaa-4b8c-9651-1b64ffd30de3" /> <input type="hidden" name="__RequestVerificationToken" id="__RequestVerificationToken" value="MkJuLw_pUOZDBYcrqK_RizMJNnTe8zzNwKS7TWrbQ4_AApFI8hczW9f55Ue8xWkP51OXAWkKW3tGuuIzkVXh7aRk4z7sF_GRP9oRe6F28wc1" /> <input type="hidden" name="TemplateUserMessagesID" id="TemplateUserMessagesID" value="ctl01_TemplateUserMessages_ctl00_Messages" /> <input type="hidden" name="PageIsDirty" id="PageIsDirty" value="false" /> <input type="hidden" name="IsControlPostBack" id="IsControlPostBack" value="1" /> <input type="hidden" name="__EVENTTARGET" id="__EVENTTARGET" value="" /> <input type="hidden" name="__EVENTARGUMENT" id="__EVENTARGUMENT" value="" /> <input type="hidden" name="NavMenuClientID" id="NavMenuClientID" value="ctl01_ciPrimaryNavigation_NavControl_NavMenu" /> <input type="hidden" name="AtomObjectPrimaryKeyctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_TaggedListRepeater_ctl00_ctl00" id="AtomObjectPrimaryKeyctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_TaggedListRepeater_ctl00_ctl00" value="/wEUKwABKClYU3lzdGVtLkd1aWQsIG1zY29ybGliLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49Yjc3YTVjNTYxOTM0ZTA4OSQ4ZDg3MWRmOS1kOWE4LTRiNmEtOTJlOS0xOTNjNTczYzQ0OTQ=" /> <input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="O6Yae38NLbAmociASmeVxLLAGd9s7ZCB1LWOwiJRbDj9vZyv1W5PrnyhCrPt0bwWG3vXYXV7hiqpSAzhdqgO/NKUDdiAIyJC6mGvuQMJupx4EsydQ2yJfyLy1w3++HUvNv4BHUN9aF3xOwXgA3drEQRWoXH3nYZt7B//7PDFRzlqX1x9PtGacZgbnYM6N1bzMwJ5gojyDa91FJDgfbrNCgOr1ucJre95ZTKigP++n0xbjmx4vSy0Vj5ZBaBm9FvcFlMO3KCYqnhnqXGi8lOIsPo6INpUwsL4hvqRUGogfu8d9PrtUIMPh+yreftUTdRoE0Ga2Z6m/GC1Nqg64w3JSZJkOypuM/G089n6P0lf2dXj9WttCB96+si+ZPYdpf2PPYXDvxJKWNHJ+qkQZbWi5JX+DQ8s6tTPzLrIASJsjJJpyLtNSaFC26Cs2Em6D6XALqoegCUxzvr1iVj4XIRmbBZTdWFmDFXp0iBfJcMXwpurOIi1jEQ4HUarasLh6bhqY7Gf70FGKcioSgWEtdfC5CLT4MKH7vel/OSHUULZBuuMl0B1pZ+bvynr2oo1AQZaxU7ysmVRkEMeSvg+Je9auS6plHvJcxMEgWGBxrsVgz/t+RKYWxMVvQ1BjDu851c4e5sZEbi5jVDM3jZummDdnA==" /> </div> <script type="text/javascript"> //<![CDATA[ var theForm = document.forms['aspnetForm']; function __doPostBack(eventTarget, eventArgument) { if (!theForm.onsubmit || (theForm.onsubmit() != false)) { theForm.__EVENTTARGET.value = eventTarget; theForm.__EVENTARGUMENT.value = eventArgument; theForm.submit(); } } //]]> </script> <script src="/WebResource.axd?d=pynGkmcFUV13He1Qd6_TZLwPOoJkZw-ail94-Z1e8o9JKy5XzVIKyzXBuAdScQY_NLwimceAOofHK9IKX_cayw2&t=638628063619783110" type="text/javascript"></script> <script src="/ScriptResource.axd?d=xtDSkLnUefsDkXzy1V9OGa6OCagMXmkl_on3a8y8NGDPptd6ziBM_mXXpJaW6g3bAnmjPrqbW2lbph1KssEF59UbMuFmudeQGDDUAWBHVZTdtlKFn7LFGi9mzCfs5F5gse9haA3GMbufpptHVBPI0A2&t=539c0818" type="text/javascript"></script> <script src="/ScriptResource.axd?d=p_deTm7itTQEj4Yl_o9V5132R7IOH_FNkOrNg0Wu4z-0Jjzbg8oBmdxEdRBkchw1tEKKp1WoH6jZO5LAJJGzCkc-KY5mVc68zRsR3i7g0WA6qQgfRzQnMZfrhMdR7PmIqa76dpt6oxpcUJLoqJ_Ouw2&t=ffffffffc7a8e318" type="text/javascript"></script> <script type="text/javascript"> //<![CDATA[ function CopyMoveContentItem_Callback(dialogWindow) { if (!dialogWindow.result) return; eval(dialogWindow.Argument.replace('[[RESULT]]',dialogWindow.result)); } //]]> </script> <script src="/Telerik.Web.UI.WebResource.axd?_TSM_HiddenField_=ctl01_ScriptManager1_TSM&compress=1&_TSM_CombinedScripts_=%3b%3bAjaxControlToolkit%3aen-US%3a0c8c847b-b611-49a7-8e75-2196aa6e72fa%3aea597d4b%3ab25378d2%3bTelerik.Web.UI%2c+Version%3d2024.3.805.462%2c+Culture%3dneutral%2c+PublicKeyToken%3d121fae78165ba3d4%3aen-US%3a169c7ca7-1df1-4370-a5b9-ee71a36cb3f0%3a16e4e7cd%3a33715776%3af7645509%3a24ee1bba%3ae330518b%3a2003d0b8%3ac128760b%3a1e771326%3a88144a7a%3ac8618e41%3a1a73651d%3a333f8d94%3a8e6f0d33%3a1f3a7489%3a6a6d718d%3aed16cbdc%3a19620875%3a874f8ea2%3ab2e06756%3af46195d3%3a92fe8ea0%3afa31b949%3a4877f69a%3a490a9d4e%3abd8f85e4" type="text/javascript"></script> <script src="/AsiCommon/Scripts/Asi.js?v=-1289050844" type="text/javascript"></script> <script src="/iparts/Common/ContentCollectionOrganizer/ContentCollectionOrganizer.js" type="text/javascript"></script> <script type="text/javascript"> //<![CDATA[ var PageMethods = function() { PageMethods.initializeBase(this); this._timeout = 0; this._userContext = null; this._succeeded = null; this._failed = null; } PageMethods.prototype = { _get_path:function() { var p = this.get_path(); if (p) return p; else return PageMethods._staticInstance.get_path();}, GetActionLink:function(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,succeededCallback, failedCallback, userContext) { return this._invoke(this._get_path(), 'GetActionLink',false,{action:action,templateType:templateType,docType:docType,hierarchyKey:hierarchyKey,documentVersionKey:documentVersionKey,folderHierarchyKey:folderHierarchyKey,itemCount:itemCount,closeWindowOnCommit:closeWindowOnCommit,websiteKey:websiteKey,pageInstanceKey:pageInstanceKey},succeededCallback,failedCallback,userContext); }, GetWindowProperties:function(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,succeededCallback, failedCallback, userContext) { return this._invoke(this._get_path(), 'GetWindowProperties',false,{action:action,templateType:templateType,docType:docType,hierarchyKey:hierarchyKey,documentVersionKey:documentVersionKey,folderHierarchyKey:folderHierarchyKey,itemCount:itemCount,closeWindowOnCommit:closeWindowOnCommit,websiteKey:websiteKey,pageInstanceKey:pageInstanceKey},succeededCallback,failedCallback,userContext); }, GetAddressCompletionList:function(prefixText,succeededCallback, failedCallback, userContext) { return this._invoke(this._get_path(), 'GetAddressCompletionList',false,{prefixText:prefixText},succeededCallback,failedCallback,userContext); }, CheckForPasteConflict:function(selectedKeys,targetHierarchyKey,succeededCallback, failedCallback, userContext) { return this._invoke(this._get_path(), 'CheckForPasteConflict',false,{selectedKeys:selectedKeys,targetHierarchyKey:targetHierarchyKey},succeededCallback,failedCallback,userContext); }} PageMethods.registerClass('PageMethods',Sys.Net.WebServiceProxy); PageMethods._staticInstance = new PageMethods(); PageMethods.set_path = function(value) { PageMethods._staticInstance.set_path(value); } PageMethods.get_path = function() { return PageMethods._staticInstance.get_path(); } PageMethods.set_timeout = function(value) { PageMethods._staticInstance.set_timeout(value); } PageMethods.get_timeout = function() { return PageMethods._staticInstance.get_timeout(); } PageMethods.set_defaultUserContext = function(value) { PageMethods._staticInstance.set_defaultUserContext(value); } PageMethods.get_defaultUserContext = function() { return PageMethods._staticInstance.get_defaultUserContext(); } PageMethods.set_defaultSucceededCallback = function(value) { PageMethods._staticInstance.set_defaultSucceededCallback(value); } PageMethods.get_defaultSucceededCallback = function() { return PageMethods._staticInstance.get_defaultSucceededCallback(); } PageMethods.set_defaultFailedCallback = function(value) { PageMethods._staticInstance.set_defaultFailedCallback(value); } PageMethods.get_defaultFailedCallback = function() { return PageMethods._staticInstance.get_defaultFailedCallback(); } PageMethods.set_enableJsonp = function(value) { PageMethods._staticInstance.set_enableJsonp(value); } PageMethods.get_enableJsonp = function() { return PageMethods._staticInstance.get_enableJsonp(); } PageMethods.set_jsonpCallbackParameter = function(value) { PageMethods._staticInstance.set_jsonpCallbackParameter(value); } PageMethods.get_jsonpCallbackParameter = function() { return PageMethods._staticInstance.get_jsonpCallbackParameter(); } PageMethods.set_path("C_IPAS.aspx"); PageMethods.GetActionLink= function(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,onSuccess,onFailed,userContext) {PageMethods._staticInstance.GetActionLink(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,onSuccess,onFailed,userContext); } PageMethods.GetWindowProperties= function(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,onSuccess,onFailed,userContext) {PageMethods._staticInstance.GetWindowProperties(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,onSuccess,onFailed,userContext); } PageMethods.GetAddressCompletionList= function(prefixText,onSuccess,onFailed,userContext) {PageMethods._staticInstance.GetAddressCompletionList(prefixText,onSuccess,onFailed,userContext); } PageMethods.CheckForPasteConflict= function(selectedKeys,targetHierarchyKey,onSuccess,onFailed,userContext) {PageMethods._staticInstance.CheckForPasteConflict(selectedKeys,targetHierarchyKey,onSuccess,onFailed,userContext); } var gtc = Sys.Net.WebServiceProxy._generateTypedConstructor; Type.registerNamespace('Asi.Web.UI.Common.BSA'); if (typeof(Asi.Web.UI.Common.BSA.WindowProperties) === 'undefined') { Asi.Web.UI.Common.BSA.WindowProperties=gtc("Asi.Web.UI.Common.BSA.WindowProperties"); Asi.Web.UI.Common.BSA.WindowProperties.registerClass('Asi.Web.UI.Common.BSA.WindowProperties'); } Type.registerNamespace('Asi.Web.UI'); if (typeof(Asi.Web.UI.PageOperation) === 'undefined') { Asi.Web.UI.PageOperation = function() { throw Error.invalidOperation(); } Asi.Web.UI.PageOperation.prototype = {None: 0,Edit: 1,New: 2,Execute: 3,Select: 4,SelectAndReturnValue: 5,Delete: 6,Purge: 7,Import: 8,Export: 9,Publish: 10,Versions: 11,Refresh: 12,Cut: 13,Copy: 14,Paste: 15,Undo: 16,SelectAll: 17,Search: 18,Preview: 19,Submit: 20,RequestDelete: 21,PurgeAll: 22,Download: 23} Asi.Web.UI.PageOperation.registerEnum('Asi.Web.UI.PageOperation', true); } if (typeof(Asi.Web.UI.TemplateType) === 'undefined') { Asi.Web.UI.TemplateType = function() { throw Error.invalidOperation(); } Asi.Web.UI.TemplateType.prototype = {I: 0,D: 1,P: 2,E: 3,T: 4,F: 5,A: 6} Asi.Web.UI.TemplateType.registerEnum('Asi.Web.UI.TemplateType', true); } //]]> </script> <object hidden type='application/json'><param name='__ClientContext' id='__ClientContext' value='{"baseUrl":"/","isAnonymous":true,"tenantId":"SIST","loggedInPartyId":"54444","selectedPartyId":"54444","websiteRoot":"https://www.imaging.org/","virtualDir":"","appTimeZoneOffset":-14400000.0,"cookieConsent":2}'></object> <input type="hidden" name="ctl01$lastClickedElementId" id="lastClickedElementId" /> <script type="text/javascript"> //<![CDATA[ Sys.WebForms.PageRequestManager._initialize('ctl01$ScriptManager1', 'aspnetForm', ['tctl01$UserMessagesUpdatePanel','','tctl01$TemplateBody$PublishUpdatePanel','','tctl01$TemplateBody$WebPartManager1$gwpciConfCCO$ciConfCCO$updatePanel','','tctl01$RadAjaxManager1SU',''], ['ctl01$ScriptManager1','','ctl01$TemplateBody$WebPartManager1$gwpciConfCCO$ciConfCCO$radTab_Top',''], [], 3600, 'ctl01'); //]]> </script> <input type="hidden" name="ctl01$ScriptManager1" id="ctl01_ScriptManager1" /> <script type="text/javascript"> //<![CDATA[ Sys.Application.setServerId("ctl01_ScriptManager1", "ctl01$ScriptManager1"); Sys.Application._enableHistoryInScriptManager(); //]]> </script> <iframe id="__historyFrame" src="/ScriptResource.axd?d=E3ym3gK4REOQV5TPZDPJaiIw5Cgi2rPOK7nyXRtzoPgpk1yd1b87ei8KH5_fkAt70T0QYm1pZ7m9ImPi8JGnfHhIkfSBs-ZhN2Ihi-Yv1tYjHtYMiPLR58gmKp2ebse1lavnyuHgioTsG0HY0y3_WA2" style="display:none;"> </iframe> <script type="text/javascript"> </script> <div id="ctl01_masterWrapper" class="wrapper"> <a id="PageTop" class="sr-only">Top of the Page</a> <header id="hd" class="header ClearFix navbar" data-height-offset="true"> <div class="header-top-container"> <div class="header-container"> <div id="masterLogoArea" class="header-logo-container pull-left" data-label="Logo"> <div ID="WTZone1_Page1" class="WTZone "> <div id="ste_container_ciLogoContent" class="ContentItemContainer"> <div id="ste_container_HeaderLogoSpan" class="ContentItemContainer"><strong style="color: transparent;"><a href="https://www.imaging.org/" style="color: transparent;"> <div style="height: 55px; width: 400px;"><img src="/images/IST_website_banner.png" style="width: 434px; height: 93px;"></div> </a></strong></div><div class="ContentRecordPageButtonPanel"> </div> </div> </div> </div> <div class="navbar-header nav-auxiliary pull-right" id="auxiliary-container" data-label="Utility"> <div ID="WTZone2_Page1" class="WTZone "> <div id="ste_container_ciUtilityNavigation" class="ContentItemContainer"><div id="ctl01_ciUtilityNavigation_UtilityPlaceholder" class="UtilityNavigation nav-auxiliary" role="navigation"> <div class="UtilitySection UtilityAccountArea"> <ul id="ctl01_ciUtilityNavigation_ctl01_AccountMenu" class="account-menu"> <li id="ctl01_ciUtilityNavigation_ctl01_SignInLI"> <a id="ctl01_ciUtilityNavigation_ctl01_LoginStatus1" class="sign-in-link" translate="yes" href="javascript:__doPostBack('ctl01$ciUtilityNavigation$ctl01$LoginStatus1$ctl02','')">Sign in</a> </li> </ul> <script> function ToggleOBO(toggle_id) { var $this = jQuery("#" + toggle_id); var enabled = $this.hasClass("on"); var button; if (enabled) // click the clear button button = window.$get('ctl01_ciUtilityNavigation_ctl01_OBOControlPanel_ClearContactButton'); else // click the select button button = window.$get('ctl01_ciUtilityNavigation_ctl01_OBOControlPanel_SelectContactButton'); if (button != null) button.click(); } jQuery(document).ready(function () { jQuery('.website-item:gt(2)').hide(); jQuery('.js-show-more-sites').show(); jQuery('.js-show-more-sites a').on("click", function () { jQuery('.website-item:not(:visible):lt(5)').fadeIn(function () { if (jQuery('.website-item:not(:visible)').length == 0) { jQuery('.website-item a').last().focus(); jQuery('.js-show-more-sites').remove(); } }); return false; }); }); </script> </div><div class="UtilitySection UtilityNavigationList"> <ul class='NavigationUnorderedList'><li id="ctl01_ciUtilityNavigation_ctl03__rptWrapper__rptWrapper_rpt_ctl01_NavigationListItem" class="NavigationListItem nav-aux-button nav-aux-cart"><a id="ctl01_ciUtilityNavigation_ctl03__rptWrapper__rptWrapper_rpt_ctl01_NavigationLink" class="NavigationLink" onClick="MenuLI_OnClick('/IST/IST/iMIS/Store/StoreLayouts/Cart_OLD.aspx?hkey=f596492d-3628-4f58-b5ac-bbf9ff3a0784')" href="/IST/IST/iMIS/Store/StoreLayouts/Cart_OLD.aspx?hkey=f596492d-3628-4f58-b5ac-bbf9ff3a0784"><span class="nav-text" translate="yes">Cart </span><span><span data-cartlink='y' class="cartEmpty"><span class="cartSprite"></span></span></span></a></li><li id="ctl01_ciUtilityNavigation_ctl03__rptWrapper__rptWrapper_rpt_ctl02_NavigationListItem" class="NavigationListItem"><a id="ctl01_ciUtilityNavigation_ctl03__rptWrapper__rptWrapper_rpt_ctl02_NavigationLink" class="NavigationLink" onClick="MenuLI_OnClick('/IST/IST/MyAccount/CreateAccount.aspx?hkey=44e40f35-cbf4-4ac4-929b-39a8d9f1668b')" href="/IST/IST/MyAccount/CreateAccount.aspx?hkey=44e40f35-cbf4-4ac4-929b-39a8d9f1668b"><span class="nav-text" translate="yes">Create Account</span></a></li></ul> </div><div class="UtilitySection UtilityNavigationToggle"> <button onclick="return false;" class="navbar-toggle collapsed menu-toggle" data-toggle="collapse" data-target=".navbar-collapse" data-parent=".navbar"><span class="sr-only" translate="yes">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> </div><div class="UtilitySection UtilitySTEToggle ste-section"> </div> </div> <script type="text/javascript"> //<![CDATA[ // adapted from bs.collapse - if the search bar is open when opening the menu, close it (and vise versa) function UtilityAreaAction(utilityControl, hideArea) { jQuery(utilityControl).on('show.bs.collapse', function() { var actives = jQuery(hideArea).find('> .in'); if (actives && actives.length) { var hasData = actives.data('bs.collapse'); if (hasData && hasData.transitioning) return; actives.collapse('hide'); hasData || actives.data('bs.collapse', null); } }); } jQuery(document).ready(function () { UtilityAreaAction('.navbar-collapse', '.nav-auxiliary'); UtilityAreaAction('.searchbar-collapse', '.primary-navigation-area'); }); //]]> </script> </div> <div id="ste_container_ciNewUtilityNavigationCommon2" class="ContentItemContainer"><div id="ctl01_ciNewUtilityNavigationCommon2_UtilityPlaceholder" class="UtilityNavigation nav-auxiliary"> <div class="UtilitySection UtilityNavigationToggle"> <button onclick="return false;" class="navbar-toggle collapsed searchbar-toggle" data-toggle="collapse" data-target=".searchbar-collapse" data-parent=".navbar"><span class="sr-only" translate="yes">Toggle search</span> <span class="icon-search"></span> </button> </div><div class="UtilitySection UtilitySearch"> <div class="search-container-sm" data-set="searchbar"> <div class="header-search"> <div id="SimpleSearchBlock" role="search" class="SimpleSearchBlock"><label for="ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTerms" id="ctl01_ciNewUtilityNavigationCommon2_ctl05_Prompt" class="screen-reader-text" translate="yes">Keyword search</label><input name="ctl01$ciNewUtilityNavigationCommon2$ctl05$SearchTerms" type="text" value="Keyword search" id="ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTerms" class="search-field Watermarked" onfocus="SimpleSearchField_OnFocus('ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties);" onblur="SimpleSearchField_OnBlur('ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties);" onkeypress="return clickButton(event,'ctl01_ciNewUtilityNavigationCommon2_ctl05_GoSearch');" translate="yes" /><input type="button" name="ctl01$ciNewUtilityNavigationCommon2$ctl05$GoSearch" value="Go" onclick="SimpleSearchField_ExecuteSearch('ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties); return cancelEvent();__doPostBack('ctl01$ciNewUtilityNavigationCommon2$ctl05$GoSearch','')" id="ctl01_ciNewUtilityNavigationCommon2_ctl05_GoSearch" class="TextButton" /></div> </div> </div> </div><div id="ctl01_ciNewUtilityNavigationCommon2_search-collapse" class="searchbar-collapse collapse" style="height:auto;"> <div class="search-container" data-set="searchbar"> <div class="header-search"> <div id="ResponsiveSimpleSearchBlock" role="search" class="SimpleSearchBlock"><label for="ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTerms" id="ctl01_ciNewUtilityNavigationCommon2_ctl08_Prompt" class="screen-reader-text" translate="yes">Keyword search</label><input name="ctl01$ciNewUtilityNavigationCommon2$ctl08$SearchTerms" type="text" value="Keyword search" id="ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTerms" class="search-field Watermarked" onfocus="SimpleSearchField_OnFocus('ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties);" onblur="SimpleSearchField_OnBlur('ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties);" onkeypress="return clickButton(event,'ctl01_ciNewUtilityNavigationCommon2_ctl08_GoSearch');" translate="yes" /><input type="button" name="ctl01$ciNewUtilityNavigationCommon2$ctl08$GoSearch" value="Go" onclick="SimpleSearchField_ExecuteSearch('ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties); return cancelEvent();__doPostBack('ctl01$ciNewUtilityNavigationCommon2$ctl08$GoSearch','')" id="ctl01_ciNewUtilityNavigationCommon2_ctl08_GoSearch" class="TextButton" /></div> </div> </div> </div> </div> <script type="text/javascript"> //<![CDATA[ // adapted from bs.collapse - if the search bar is open when opening the menu, close it (and vise versa) function UtilityAreaAction(utilityControl, hideArea) { jQuery(utilityControl).on('show.bs.collapse', function() { var actives = jQuery(hideArea).find('> .in'); if (actives && actives.length) { var hasData = actives.data('bs.collapse'); if (hasData && hasData.transitioning) return; actives.collapse('hide'); hasData || actives.data('bs.collapse', null); } }); } jQuery(document).ready(function () { UtilityAreaAction('.navbar-collapse', '.nav-auxiliary'); UtilityAreaAction('.searchbar-collapse', '.primary-navigation-area'); }); //]]> </script> </div> <div id="ste_container_ciNewContentHtml_4c5b80c800b04a9a932a995d85dcc730" class="ContentItemContainer"><div class="HeaderSocial"><div id="ctl01_ciNewContentHtml_4c5b80c800b04a9a932a995d85dcc730_Panel_NewContentHtml"> <a href="https://www.linkedin.com/company/society-for-imaging-science-and-technology-is&t-"><img src="/images/Icons/linkedin36blue.png" alt="" style="width: 15px; height: 13px; margin-right: 10px;"></a> </div></div></div> </div> </div> </div> </div> <div data-label="Primary" class="ClearFix header-bottom-container"> <div class="header-container"> <div ID="WTZone3_Page1" class="WTZone "> <div id="ste_container_ciPrimaryNavigation" class="ContentItemContainer"> <div id="ctl01_ciPrimaryNavigation_PrimaryNavigationArea" class="primary-navigation-area"> <div id="ctl01_ciPrimaryNavigation_PrimaryNavigationControl" class="collapse navbar-collapse nav-primary"> <nav id="ctl01_ciPrimaryNavigation_NavControl_NavWrapper" aria-label="primary"> <div tabindex="0" id="ctl01_ciPrimaryNavigation_NavControl_NavMenu" class="RadMenu RadMenu_NaturalHeritageSites MainMenu MegaDropDownMenuOuter" Translate="Yes" PerspectiveId="80409b89-ae6d-45a9-a9d4-96d522ff2047" NavigationArea="1" MaxDataBindDepth="3" style="z-index:2999;"> <!-- 2024.3.805.462 --><input class="rmRootGroup rmToggleHandles rmHorizontal" id="ctl01_ciPrimaryNavigation_NavControl_NavMenu_ClientState" name="ctl01_ciPrimaryNavigation_NavControl_NavMenu_ClientState" type="hidden" /> </div> </nav> </div> </div></div> </div> </div> </div> <div id="HomePageContent" class="HomePageFullWidthArea" data-label="Home Content"> <div ID="WTZone4_Page1" class="WTZone "> <div id="ste_container_ciFullWidthContent" class="ContentItemContainer"> </div> </div> </div> <div class="InternalHeaderArea" data-label="Internal Header"> <div ID="WTZone5_Page1" class="WTZone "> <div id="ste_container_ciInternalFullWidthHeader" class="ContentItemContainer"> </div> </div> </div> </header> <div id="masterContentArea" class="container ClearFix"> <div role="main" class="body-container" id="ContentPanel"> <div data-label="On Behalf Of" class="ClearFix OnBehalfOfContainer"> <div ID="WTZone6_Page1" class="WTZone "> <div id="ste_container_ciOBO" class="ContentItemContainer"><div class="FloatRight"><div id="ctl01_ciOBO_UtilityPlaceholder" class="UtilityNavigation nav-auxiliary"> <div class="UtilitySection OnBehalfOf"> </div> </div> </div></div> </div> </div> <div class="col-primary" data-label="Main Content"> <div ID="WTZone7_Page1" class="WTZone "> <div id="mainContentWrapper" class="ContentPanel"> <div id="masterMainBreadcrumb" data-height-offset="true" ></div> <div id="ctl01_UserMessagesUpdatePanel"> </div> <script type="text/javascript"> </script> <div id="ctl01_TemplateBody_PublishUpdatePanel"> </div><div id="ctl01_TemplateBody_WebPartManager1___Drag" style="display:none;position:absolute;z-index:32000;filter:alpha(opacity=75);"> </div> <div> <div class="row"> <div class="col-sm-3"> <div class="ContentItemContainer"> <div id="WebPartZone1_Page1" class="WebPartZone "> <div class="iMIS-WebPart"> <div id="ste_container_ciCornerArt" class="ContentItemContainer"><div id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_ListTitle" class="panel " style="border-style:None;"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_ListTitle_Head" class="panel-heading"> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_ListTitle_BodyContainer" class="panel-body-container"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_ListTitle_Body" class="panel-body"> <div class="TaggedListPanel"> <span><div class='ContentHtml'><link href="https://fonts.googleapis.com/css2?family=Open+Sans:ital,wght@0,300;0,400;0,600;0,700;0,800;1,300;1,400;1,600;1,700;1,800&display=swap" rel="stylesheet" /> <link href="https://fonts.googleapis.com/css2?family=Montserrat:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,900&display=swap" rel="stylesheet" /> <link href="https://fonts.googleapis.com/css2?family=Jost:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,800;1,900&display=swap" rel="stylesheet" /></div><div class='ContentHtml'><style type="text/css"> #ConfContent { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 14px; font-weight: 400; } .ConfContent { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 14px; font-weight: 400; } #SymposiumContent { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 14px; font-weight: 400; } .SymposiumContent { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 14px; font-weight: 400; } ul { margin: 0 0 0 2em; padding: 0; } ol { margin: 0 0 0 2em; padding: 0; } P + UL { margin: 0em 0em 1em 2em; } P + OL { margin: 0em 0em 1em 2em; } P+ H2 { margin: 0 0 1em 0; } H2 + P { margin: 0 0 1em 0; } H2 + UL { margin: 0 0 1em 2em; } H2 + OL { margin: 0 0 1em 2em; } H3 + UL { margin: 0 0 1em 2em; } H3 + OL { margin: 0 0 1em 2em; } H3 + H2 { margin: 0 0 3em 2em; } H1 { color: #d2232a; background-color: white; font-family: 'Jost', sans-serif; font-size: 24px; font-weight: 500; text-transform: none; text-align: left; margin-top: 10px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 0px; padding-bottom: 0px; padding-left: 0; padding-right: 0; line-height: 1em; } #ConfContent h1 { color: #e8ba03; background-color: white; font-family: 'Jost', sans-serif; font-size: 24px; font-weight: 500; text-transform: none; text-align: left; margin-top: 10px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 0px; padding-bottom: 0px; padding-left: 0px; padding-right: 0px; line-height: 1.2em; } .ConfContent h1 { color: #e8ba03; background-color: white; font-family: 'Jost', sans-serif; font-size: 24px; font-weight: 500; text-transform: none; text-align: left; margin-top: 10px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 0px; padding-bottom: 0px; padding-left: 0px; padding-right: 0px; line-height: 1.2em; } H2 { color: #ffffff; background-color: #9f9f9f; font-family: 'Open Sans', sans-serif; font-size: 18px; text-transform: uppercase; font-weight: 400; display: block; padding-top: 2px; padding-bottom: 2px; padding-left: .33em; padding-right: 3px; margin-top: 1em; margin-bottom: .5em; margin-left: 0px; margin-right: 0px; margin-inline-start: 0px; margin-inline-end: 0px; } H3 { color: #d2232a; font-size: 15px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; display: block; border-bottom: 0px solid #ddd; margin: 20px 20px 20px 20px; margin-block-start: 0em; margin-block-end: 0em; margin-inline-start: 0px; margin-inline-end: 0px; } H4 { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 14px; font-weight: 500; } H5 { color: #d2232a; background-color: white; font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; display: block; border-bottom: 1px solid #ddd; margin: 0em 0; margin-block-start: 0em; margin-block-end: 0em; margin-inline-start: 0px; margin-inline-end: 0px; } H6 { color: #808080; font-family: 'Jost', sans-serif; font-size: 14px; font-weight: 500; } /* unvisited link */ a:link { color: #0032a0; font-weight: normal; } /* visited link */ a:visited { color: #0032a0; font-weight: normal; } /* mouse over link */ a:hover { color: #7f7f7f; } /* selected link */ a:active { color: #3b3b3b; font-weight: bold; } /* unvisited link */ #ConfContent a:link { color: #0032a0; font-weight: normal; } /* visited link */ #ConfContent a:visited { color: #0032a0; font-weight: normal; } /* mouse over link */ #ConfContent a:hover { color: #7f7f7f; } /* selected link */ #ConfContent a:active { color: #3b3b3b; font-weight: bold; } /* unvisited link */ .ConfContent a:link { color: #0032a0; font-weight: normal; } /* visited link */ .ConfContent a:visited { color: #0032a0; font-weight: normal; } /* mouse over link */ .ConfContent a:hover { color: #7f7f7f; } /* selected link */ .ConfContent a:active { color: #3b3b3b; font-weight: bold; } .ConfKeywords { font-size: 13px; font-family: 'Open Sans', sans-serif; color: #8C8C8C; font-weight: 400; font-style: italic; margin-top: 0em; margin-bottom: 0em; padding-bottom: 0em; padding-top: 0em; } #toplinkshead { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; display: block; border-bottom: 0px solid #ddd; margin: 20px 20px 20px 20px; margin-block-start: 1em; margin-block-end: 0em; margin-inline-start: 0px; margin-inline-end: 0px; } #toplinks { text-transform: uppercase; color: #0032a0; font-family: 'Jost', sans-serif; font-weight: 400; font-size: 14px; display: block; margin-bottom: 0; margin-left: 27px; } /* unvisited link */ #toplinks a:link { color: #0032a0; } /* visited link */ #toplinks a:visited { color: #0032a0; } /* mouse over link */ #toplinks a:hover { color: #7f7f7f; decoration: underline; } /* selected link */ #toplinks a:active { color: #7f7f7f; decoration: underline; } /* unvisited link */ #toplinkshead a:link { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; } /* visited link */ #toplinkshead a:visited { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; } /* mouse over link */ #toplinkshead a:hover { color: #7f7f7f; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; decoration: underline; } /* selected link */ #toplinkshead a:active { color: #7f7f7f; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; decoration: underline; } .toplinkshead { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; display: block; border-bottom: 0px solid #ddd; margin: 20px 20px 20px 20px; margin-block-start: 1em; margin-block-end: 0em; margin-inline-start: 0px; margin-inline-end: 0px; } #toplinks { text-transform: uppercase; color: #0032a0; font-family: 'Jost', sans-serif; font-weight: 400; font-size: 14px; display: block; margin-bottom: 0; margin-left: 27px; } /* unvisited link */ .toplinks a:link { color: #0032a0; } /* visited link */ .toplinks a:visited { color: #0032a0; } /* mouse over link */ .toplinks a:hover { color: #7f7f7f; decoration: underline; } /* selected link */ .toplinks a:active { color: #7f7f7f; decoration: underline; } /* unvisited link */ .toplinkshead a:link { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; } /* visited link */ .toplinkshead a:visited { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; } /* mouse over link */ .toplinkshead a:hover { color: #7f7f7f; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; decoration: underline; } /* selected link */ .toplinkshead a:active { color: #7f7f7f; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; decoration: underline; } .sub { vertical-align: sub } .sup { vertical-align: super } .mixedcase { text-transform: none; } .box{ margin-top: 11px; width: 825px; max-width: 100%; margin-top: 0px; margin-bottom: 0px; margin-left: 0; margin-right: 0; } .leftbox{ margin-top: 0px; margin-left: auto; margin-right: auto; width: 100% max-width: 100%; object-fit: contain; } .box img{ max-width: 100%; max-height: 100%; display: block; /* remove extra space below image */ object-fit: cover; } .EI21coursegroup_time{ color: #4488DC; font-size: 12pt; font-weight: bold; } .SubmissionLink::before { content: " "; } .SubmissionLink { font-size: 9pt !important; font-family: 'Montserrat', sans-serif; font-weight: 600 !important; text-transform: uppercase !important; color: #d2232a !important; } /* unvisited link */ a.SubmissionLink:link { font-size: 9pt !important; font-family: 'Montserrat', sans-serif; font-weight: 600 !important; text-transform: uppercase !important; color: #d2232a !important; } /* visited link */ a.SubmissionLink:visited { font-size: 9pt !important; font-family: 'Montserrat', sans-serif; font-weight: 600 !important; text-transform: uppercase !important; color: #d2232a !important; } /* mouse over link */ a.SubmissionLink:hover { font-size: 9pt !important; font-family: 'Montserrat', sans-serif; font-weight: 600 !important; text-transform: uppercase !important; color: #d2232a !important; decoration: underline !important; } /* selected link */ a.SubmissionLink:active { font-size: 9pt !important; font-family: 'Montserrat', sans-serif; font-weight: 600 !important; text-transform: uppercase !important; color: #d2232a !important; decoration: underline !important; } .nomarginnopadding { margin: 0; padding: 0; } .SessionOrganizer { margin-left: 27px; } .CommitteeMember { font-size: 13px; font-weight: 500; } .CommitteeMemberAffil { font-size: 13px; font-weight: 400; font-style: normal; } .AwardName { font-weight: 600; color: #3f3f3f; } strong { font-weight: 600; } .BoldItalic { font-weight: 600; font-style: italic; } .Thin { font-weight: 100; } .ExtraLight { font-weight: 200; } .Light { font-weight: 300; } .Regular{ font-weight: 400; } .Medium { font-weight: 500; } .SemiBold { font-weight: 600; } .ExtraBold { font-weight: 800; } .Black { font-weight: 900; } caption { display: table-caption; caption-side: bottom; text-align: left; color: #000000; font-family: 'Open Sans', sans-serif; font-size: 12px; font-weight: 200; font-style: italic; line-height: 1.6em; padding: 0.5em; } .TopAlert { font-weight: 300; font-style: italic; font-size: 16px; font-family: Jost, sans-serif; color: #d2232a; margin-top: -2px; margin-bottom: 5px; } a.RegisterLink{ background-color: #d2232a; border-radius: 3px; color: white; padding: 6px 12px; text-align: center; text-decoration: none; display: inline-block; font-size: 14px; margin: 4px 2px; transition-duration: 0.4s; cursor: pointer; } a.RegisterLink:hover { background-color: #b3b3b3; border-radius: 3px; color: white; padding: 6px 12px; text-align: center; text-decoration: none; display: inline-block; font-size: 14px; margin: 4px 2px; transition-duration: 0.4s; cursor: pointer; } </style></div><div class='ContentHtml'><style> /*this is the all-purpose callout. It is behind the keynotes It is grey */ .callout{ background-color:#FFFBED; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } /*this is the callout for panels and special events. It is yellow */ .coloredcallout{ background-color: #F2F2F2; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } /*this is the plenary callout. It is pink */ .pinkcallout{ background-color: #FFEEED; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } #content #story .callout .session_title { line-height: normal; margin-bottom: 1px; } .group { font-size: 14pt; font-weight: bold; text-align: center; } .cat { font-size: 16pt; font-weight: bold; color: red; text-align: center; } .session_time { font-size: 10pt; font-weight: bold; display: inline-block; margin-bottom: 2ex; } .event_time { font-size: 10pt; font-weight: normal; text-align: center; color: #c00000; padding-bottom: 10px; } .date { color: #c00000; background-color: #ffffff; font-size: 18px; font-weight: 500; text-transform: uppercase; margin-top: 11px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 7px; padding-bottom: 7px; padding-left: 0px; padding-right: 0; line-height: 1em; } .session_title { font-size: 11pt; color: #3b3b3b; font-weight: bold; margin-top: 0ex; margin-bottom: 0ex; } .session_title:before{ content: ' '; display: block; border: 0; border-top: 1px solid #c00000; margin-top: 1ex; margin-bottom: 1ex; } .session_title:after { content: ' '; display: block; border: 0; border-bottom: 1px solid #c00000; margin-top: 1ex; margin-bottom: 1ex; } .chair_label { font-size: 10pt; font-weight: bold; } .chair { font-size: 10pt; } p span.author_string { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 400; font-style: italic; } p span.placeholder_desc { font-size: 12pt; font-weight: normal; } .room { font-size: 11pt; color: #c00000; } .presentation_title { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 500; } .presentation_time { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 500; } .session_notes { color: #3b3b3b; font-style: italic; font-weight: 300; font-size: 9pt; display: block; margin-bottom: 1ex; } .abstract{ color: #3b3b3b; font-style: italic; font-weight: 400; display: block; margin-bottom: 1ex; } .bio { color: #3b3b3b; font-style: italic; font-weight: 400; display: block; margin-bottom: 1ex; } .keynote-bio{ font-size: 11px; color: #3b3b3b; font-style: italic; font-weight: 400; display: block; margin-bottom: 1ex; } </style></div><div class='ContentHtml'><div class="leftbox"><a href="http://www.imaging.org/IST/IST/Conferences/EI/EI2023/EI2023.aspx"><img alt="" src="/images/IST_Images/Conferences/EI/EI2023/EI2022_250x125_January2023.png" style="margin-top: 0px; width: 100%;" /></a> </div></div><div class='ContentHtml'><style type="text/css"> /* CSS Dropdown menuv */ #menuv-container { max-width: 100%; display: block; margin-left: 0px; margin-right: auto; width: 250px } #menuv { font-size: 13px; font-family: 'Jost', sans-serif; text-transform: uppercase; max-width:100%; width:100%; float:left; margin-top: 0em; margin-right: 0em; margin-bottom: 0em; margin-left: 0em; border: auto solid #ffffff; background-color: #ffffff; /* white*/ } #menuv_NAV{ padding: 0; margin: 0; border: 0; } #menuv_NAV ul, #menuv_NAV li { list-style: none; margin: 0; padding: 0; } #menuv_NAV ul { position: relative; z-index: 597; float: left; } #menuv_NAV ul li { float: left; min-height: 1px; line-height: 1.5em; vertical-align: middle; } #menuv_NAV ul li.hover, #menuv_NAV ul li:hover { position: relative; z-index: 599; cursor: default; } #menuv_NAV ul ul { visibility: hidden; position: absolute; top: 100%; left: 0; z-index: 598; width: 100%; } #menuv_NAV ul ul li { float: none; } #menuv_NAV ul ul, #menuv_NAV ul ul ul { top: -2px; left: 99%; } #menuv_NAV ul li:hover > ul { visibility: visible; } #menuv_NAV ul li { float: none; } #menuv_NAV a { display: block; font-weight: 400 !important; } /* Custom CSS Styles */ #menuv_NAV { font-family: 'Jost', sans-serif; text-transform: uppercase; font-size: 13px; } #menuv_NAV:after, #menuv_NAV ul:after { content: ''; display: block; clear: both; } #menuv_NAV ul { background: #EEEEEE; border: 0px solid #aaaaaa; padding: 4px; width: 100%; } #menuv_NAV ul li { color: #0C0C0C; position: relative; } #menuv_NAV ul li.hover, #menuv_NAV ul li:hover { background: #cccccc; background: -moz-linear-gradient(#cccccc 0%, #cccccc100%); background: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #9f9f9f), color-stop(100%, #cccccc )); background: -webkit-linear-gradient(#cccccc 0%, #cccccc 100%); background: linear-gradient(#cccccc 0%, #cccccc 100%); color: #FFF; } #menuv_NAV ul li.hover > a, #menuv_NAV ul li:hover > a { color: #000; border: 0px solid #cccccc; } #menuv_NAV ul ul { width: 650px; } #menuv_NAV a { border: 0px solid transparent; padding: 3px 10px; } #menuv_NAV a:link, #menuv_NAV a:visited { color: #0C0C0C; text-decoration: none; } #menuv_NAV a:hover { background: #cccccc; background: -moz-linear-gradient(#cccccc 0%, #cccccc 100%); background: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #cccccc ), color-stop(100%, #cccccc )); background: -webkit-linear-gradient(#cccccc 0%, #cccccc 100%); background: linear-gradient(#cccccc 0%, #cccccc 100%); color: #FFF; } #menuv_NAV a:active { color: #ffa500; } #menuv_NAV .has-sub:hover > a:after, #menuv_NAV .has-sub.hover > a:after { border-color: transparent transparent transparent #FFF; } #menuv_NAV .has-sub > a:after { content: ''; width: 0px; height: 0px; border-style: solid; border-width: 0px 0px 0px 0px; border-color: transparent transparent transparent #808080; position: absolute; top: 50%; right: 5%; margin-top: -4px; -webkit-transform: rotate(360deg); } </style> <div id="menuv-container"> <div id="menuv_NAV"> <ul> <li> </li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/Attend___Register/IST/Conferences/EI/EI2023/Attend.aspx" target="_blank"><span style="color: #d2232a;">REGISTER</span></a></li> </ul> <ul> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=1#EntryCCO'">EI Home/About</a> <ul> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=1#EntryCCO">Home</a></li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=2#EntryCCO'">At-a-Glance</a></li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=3#EntryCCO">Awards</a> </li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=4#EntryCCO">EI History</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/TAB_Code_of_Conduct.aspx" target="_blank">Code of Conduct</a></li> <li><a href="http://www.imaging.org/IST/IST/About/Press_Releases.aspx" target="_blank">Press Releases</a> </li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx">Symposium Program</a> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx">EI Program</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=2#ProgramCCO">Symposium Plenary Speakers</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=3#ProgramCCO">EI Conferences</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=4#ProgramCCO">Conference Keynotes</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=5#ProgramCCO">Short Courses</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=6#ProgramCCO">Demonstration & Poster Sessions</a></li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=2#EntryCCO'">Program At-a-Glance</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=7#ProgramCCO">Author Index</a></li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=5#ProgramCCO">Short Courses</a></li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=3#ProgramCCO" class="top_parent">Conferences</a> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=3#ProgramCCO">EI Conferences</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_3DMP.aspx">3D Imaging and Applications 2023 (3DIA)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_AVM.aspx">Autonomous Vehicles and Machines 2023 (AVM)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_COLOR.aspx">Color Imaging XXVIII: Displaying, Processing, Hardcopy, and Applications (COLOR)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_COIMG.aspx">Computational Imaging XXI (COIMG)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_CVAA.aspx">Computer Vision and Image Analysis of Art 2023 (CVAA)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_ERVR.aspx">Engineering Reality of Virtual Reality 2023 (ERVR)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_HPCI.aspx">High Performance Computing for Imaging 2023 (HPCI)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_HVEI.aspx">Human Vision and Electronic Imaging 2023 (HVEI)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_IPAS.aspx">Image Processing: Algorithms and Systems XXI (IPAS)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_IQSP.aspx">Image Quality and System Performance XX (IQSP)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_IMAGE.aspx">Imaging and Multimedia Analytics at the Edge 2023 (IMAGE)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_ISS.aspx">Imaging Sensors and Systems 2023 (ISS)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_IRIACV.aspx">Intelligent Robotics and Industrial Applications using Computer Vision 2023 (IRIACV)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_MLSI.aspx">Machine Learning for Scientific Imaging 2023 (MLSI)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_MWSF.aspx">Media Watermarking, Security, and Forensics 2023 (MWSF)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_MOBMU.aspx">Mobile Devices and Multimedia: Enabling Technologies, Algorithms, and Applications 2023 (MOBMU)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_SDA.aspx">Stereoscopic Displays and Applications XXXIV (SD&A)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_VDA.aspx">Visualization and Data Analysis 2023 (VDA)</a></li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=2#ProgramCCO">Symposium Plenary Speakers</a></li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/AuthorSubmit.aspx" class="top_parent">Author/Submit</a> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/AuthorSubmit.aspx?Author_Info=1">Submit How-to</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/AuthorSubmit.aspx?Author_Info=2">Accepted: Next Steps</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=6#ProgramCCO">Demonstration Session</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/AuthorSubmit.aspx?Author_Info=3">Publication FAQ</a></li> <li><a href="https://www.imaging.org/PDFS/Conferences/ElectronicImaging/EI_InvitationLetterRequest_Form_Fillable.pdf">Visas and Letters of Invitation</a></li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/Attend___Register/IST/Conferences/EI/EI2023/Attend.aspx" class="top_parent">Attend/Register</a> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Attend.aspx">Registration & Fees</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Attend.aspx?Attendee_Information=2#Attendee_Information">Logistics</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Attend.aspx?Attendee_Information=3#Attendee_Information">Why Attend</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Attend.aspx?Attendee_Information=3#JustificationLetter.aspx">Justify Attendance</a> </li> <li><a href="https://www.imaging.org/PDFS/Conferences/ElectronicImaging/EI_InvitationLetterRequest_Form_Fillable.pdf">Visas and Letters of Invitation</a></li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/ExhibitSponsor.aspx" class="top_parent">Exhibit/Sponsor</a> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/ExhibitSponsor.aspx">Exhibition & Sponsorship Opportunities</a> </li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/For_Students/IST/Conferences/EI/EI2023/For_Students.aspx" class="top_parent">For Students</a> <ul> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/For_Students/IST/Conferences/EI/EI2023/For_Students.aspx" class="top_parent">Student Focus</a></li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/For_Students/IST/Conferences/EI/EI2023/For_Students.aspx?Student_Focus_Tabs=2#Student_Focus_Tabs">Student Showcase</a></li> </ul> </li> </ul> </div> <!-- end the menuv-container div --> </div> <!-- end the menuv div --></div><div class='ContentHtml'><div class="leftbox"> <table style="text-align: center; margin-left: auto; margin-right: auto;" width="100%" border="0"> <tbody> <tr> <td colspan="2" 7px;"valign="middle" align="center"> </td> </tr> <tr> <td style="height: 19px;" valign="top" align="right"> <!--- begin LinkedIn Share ---> <script src="https://platform.linkedin.com/in.js" type="text/javascript">lang: en_US</script> <script type="IN/Share" data-url="https://www.linkedin.com"></script> <!--- End LinkedIn Share ---> </td> <td style="height: 19px;" valign="top" align="left"> <!--- begin Twitter Share ---> <a href="https://twitter.com/share" class="twitter-share-button" data-count="none" data-hashtags="EI2023">Tweet</a> <script>!function(d,s,id){var js,fjs=d.getElementsByTagName(s)[0],p=/^http:/.test(d.location)?'http':'https';if(!d.getElementById(id)){js=d.createElement(s);js.id=id;js.src=p+'://platform.twitter.com/widgets.js';fjs.parentNode.insertBefore(js,fjs);}}(document, 'script', 'twitter-wjs');</script> <!--- end Twitter Share ---> </td> </tr> <tr> <td colspan="2" valign="middle" align="center"> <!--- begin Twitter Follow ---> <a href="https://twitter.com/ElectroImaging" class="twitter-follow-button" data-show-count="false">Follow @ElectroImaging</a> <script>!function(d,s,id){var js,fjs=d.getElementsByTagName(s)[0],p=/^http:/.test(d.location)?'http':'https';if(!d.getElementById(id)){js=d.createElement(s);js.id=id;js.src=p+'://platform.twitter.com/widgets.js';fjs.parentNode.insertBefore(js,fjs);}}(document, 'script', 'twitter-wjs');</script> <!--- end Twitter Follow ---><br> </td> </tr> </tbody> </table> </div> <br></div><div class='ContentHtml'><style> .greybox { background-color: #f2f2f2; text-align: center; } table.ImportantDates { color: #000000; margin-left: auto; margin-right: auto; border: 4px solid #f2f2f2; padding-top: 3px; padding-bottom: 3px; padding-left: 1px; padding-right: 1px; font-family: Jost, sans-serif; font-size: 10px; font-weight: 300; line-height: 1.1; vertical-align: top; } .ImpDateDescription{ padding-top: 3px; padding-bottom: 3px; text-align: left; font-weight: 400; vertical-align: top; } .ImpDateSubDescription{ padding-top: 3px; padding-bottom: 3px; text-indent: -6px; padding-left: 6px; text-align: left; font-weight: 400; color: #7f7f7f; vertical-align: top; } .impdatedate { padding-top: 3px; padding-bottom: 3px; text-align: center; font-weight: 400; color: #7f7f7f; font-family: Jost, sans-serif; vertical-align: top; text-align: center;" } .impdatedatesub { padding-top: 3px; padding-bottom: 3px; text-align: center; font-weight: 400; color: #7f7f7f; font-family: Jost, sans-serif; vertical-align: top; text-align: center;" } </style> <div class="leftbox"> <div class="greybox"> <a name="Deadlines" id="Deadlines"></a> <table class="ImportantDates" align="center"> <thead> <tr> <td colspan="2" style="text-align: center; white-space: nowrap;"><span style="font-weight: 500; font-size: 14px; font-family: Jost, sans-serif; color: #d2232a;">IMPORTANT DATES<br /> </span> <span style="font-size: 10px; color: #d2232a;"><em>Dates currently being confirmed; check back.</em></span> </td> </tr> <tr> <td colspan="2"> </td> </tr> </thead> <tbody> <tr> <td class="ImpDateDescription"> <br /> </td> <td class="impdate"><span style="font-weight: 500;">2022</span></td> </tr> <tr> <td class="ImpDateDescription">Call for Papers Announced</td> <td class="impdate">2 May</td> </tr> <tr> <td class="ImpDateDescription">Journal-first (JIST/JPI) Submissions <br /> </td> <td class="impdate"><br /> </td> </tr> <tr> <td class="ImpDateSubDescription">∙ Submission site Opens</td> <td class="impdatedatesub">2 May </td> </tr> <tr> <td class="ImpDateSubDescription">∙ Journal-first (JIST/JPI) Submissions Due</td> <td class="impdatedatesub">1 Aug</td> </tr> <tr> <td class="ImpDateSubDescription">∙ Final Journal-first manuscripts due</td> <td class="impdatedatesub">28 Oct</td> </tr> <tr> <td class="ImpDateDescription">Conference Papers Submissions</td> <td class="impdate"><br /> </td> </tr> <tr> <td class="ImpDateSubDescription">∙ Abstract Submission Opens</td> <td class="impdatedatesub">1 June</td> </tr> <tr> <td class="ImpDateSubDescription">∙ Priority Decision Submission Ends</td> <td class="impdatedatesub">15 July</td> </tr> <tr> <td class="ImpDateSubDescription">∙ Extended Submission Ends</td> <td class="impdatedatesub"> 19 Sept</td> </tr> <tr> <td class="ImpDateSubDescription">∙ FastTrack Conference Proceedings Manuscripts Due</td> <td class="impdatedatesub">25 Dec </td> </tr> <tr> <td class="ImpDateSubDescription">∙ All Outstanding Proceedings Manuscripts Due<br /> </td> <td class="impdatedatesub" style="white-space: nowrap;"> 6 Feb 2023</td> </tr> <tr> <td class="ImpDateDescription">Registration Opens</td> <td class="impdate" style="white-space: nowrap;">1 Dec</td> </tr> <tr> <td class="ImpDateDescription">Demonstration Applications Due</td> <td class="impdate">19 Dec</td> </tr> <tr> <td class="ImpDateDescription">Early Registration Ends</td> <td class="impdate">18 Dec</td> </tr> <tr> <td class="ImpDateDescription"><br /> </td> <td class="impdate" style="text-align: center;"><br /> <span style="font-weight: 500;">2023</span></td> </tr> <tr> <td class="ImpDateDescription">Hotel Reservation Deadline</td> <td class="impdate">6 Jan</td> </tr> <tr> <td class="ImpDateDescription">Symposium begins<br /> </td> <td class="impdate">15 Jan<br /> </td> </tr> <tr> <td class="ImpDateDescription"><br /> </td> <td class="impdate"><br /> </td> </tr> </tbody> </table> </div> </div></div></span> <div translate="yes"> </div> </div> </div> </div> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_downloadContainer" style="display:none;"> <input type="hidden" name="ctl01$TemplateBody$WebPartManager1$gwpciCornerArt$ciCornerArt$HiddenDownloadPathField" id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_HiddenDownloadPathField" /><input type="submit" name="ctl01$TemplateBody$WebPartManager1$gwpciCornerArt$ciCornerArt$downloadButton" value="Download Path" id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_downloadButton" style="display:none" /> </div></div> </div> <div class="iMIS-WebPart"> <div id="ste_container_ciSponsors_e01b3063842f4cd9a411c84775f8643c" class="ContentItemContainer"><div id="ctl01_TemplateBody_WebPartManager1_gwpciSponsors_e01b3063842f4cd9a411c84775f8643c_ciSponsors_e01b3063842f4cd9a411c84775f8643c_Panel_Sponsors"> </div></div> </div> </div> <div id="ctl01_TemplateBody_ContentPage1_downloadContainer" style="display:none;"> <input type="hidden" name="ctl01$TemplateBody$ContentPage1$HiddenDownloadPathField" id="ctl01_TemplateBody_ContentPage1_HiddenDownloadPathField" /><input type="submit" name="ctl01$TemplateBody$ContentPage1$downloadButton" value="Download Path" id="ctl01_TemplateBody_ContentPage1_downloadButton" style="display:none" /> </div></div> </div> <div class="col-sm-9"> <div class="ContentItemContainer"> <div id="WebPartZone2_Page1" class="WebPartZone "> <div class="iMIS-WebPart"> <div id="ste_container_ciConfCCO" class="ContentItemContainer"><div class="panel "> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO__Head" class="panel-heading"> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO__BodyContainer" class="panel-body-container"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO__Body" class="panel-body"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_MainContentControl" class="cco tabs-wrapper tabs-horizontal tabs-top"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radTab_Top" class="RadTabStrip RadTabStrip_MetroTouch RadTabStripTop_MetroTouch RadTabStripTop RadTabStripTop_MetroTouch_Baseline"> <div class="rtsLevel rtsLevel1"> <ul class="rtsUL"><li class="rtsLI rtsFirst"><a class="rtsLink rtsBefore" href="#"><span class="rtsOut"><span class="rtsIn"><span class="rtsTxt">About IPAS 2023</span></span></span></a></li><li class="rtsLI"><a class="rtsLink rtsSelected" href="#"><span class="rtsOut"><span class="rtsIn"><span class="rtsTxt">IPAS Program</span></span></span></a></li><li class="rtsLI"><a class="rtsLink rtsAfter" href="#"><span class="rtsOut"><span class="rtsIn"><span class="rtsTxt">For IPAS Authors</span></span></span></a></li><li class="rtsLI rtsLast"><a class="rtsLink" href="#"><span class="rtsOut"><span class="rtsIn"><span class="rtsTxt">IPAS History/Proceedings</span></span></span></a></li></ul> </div><input id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radTab_Top_ClientState" name="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radTab_Top_ClientState" type="hidden" /> </div> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage" class="RadMultiPage RadMultiPage_Default"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_1" class="rmpView rmpHidden"> <div class="ContentTabbedDisplay AddPadding"> <p class="AsiWarning">No content found</p> </div> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_2" class="rmpView"> <div class="ContentWizardDisplay ClearFix"><div> <div class="row"> <div class="col-sm-12"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Zone1PlaceHolder" class="WebPartZone"> <div id="ste_container_ConferenceHeading" class="ContentItemContainer"><style type="text/css"> /*this is the all-purpose callout. It is behind the keynotes It is grey*/ .callout{ background-color:#f2f2f2; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } /*this is the callout for panels and special events. It is yellow*/ .coloredcallout{ background-color: #fff9e6; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } /*this is the plenary callout. It used to be pink, now it's light purple*/ .pinkcallout{ background-color: #e6e6ff; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } #content #story .callout .session_title { line-height: normal; margin-bottom: 1px; } .group { font-size: 14pt; font-weight: bold; text-align: center; } .cat { font-size: 16pt; font-weight: bold; color: red; text-align: center; } .session_time { font-weight: 300; font-size: 13px; font-family: Jost, sans-serif; display: inline-block; margin-bottom: 2ex; } .event_time { font-family: 'Jost', sans-serif; font-size: 14px; font-weight: 400; text-align: center; color: #A2002D; padding-bottom: 10px; } .date { font-family: 'Jost', sans-serif; color: #A2002D; background-color: #ffffff; font-size: 18px; font-weight: 500; text-transform: uppercase; margin-top: 11px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 7px; padding-bottom: 7px; padding-left: 0px; padding-right: 0; line-height: 1em; } .session_title { font-family: 'Open Sans', sans-serif; font-size: 14px; color: #3b3b3b; font-weight: 600; margin-top: 0ex; margin-bottom: 0ex; } .session_title:before{ content: ' '; display: block; border: 0; border-bottom: 1px solid #A2002D; background: #A2002D; margin-top: 1ex; margin-bottom: 1ex; } .session_title:after { content: ' '; display: block; border: 0; border-bottom: 1px solid #A2002D; background: #A2002D; margin-top: 1ex; margin-bottom: 1ex; } .chair_label { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 600; } .chair { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 400; } p span.author_string { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 400; font-style: italic; } p span.placeholder_desc { font-size: 12pt; font-weight: normal; } .room { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #808080; } .redroom { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #b30000; } .greenroom { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #00802b; } .blueroom { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #002db3; } .yellowroom { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #ffcc00; } .purpleroom { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #6600CC; } .presentation_title { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 700; } .presentation_time { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 500; } .session_notes { color: #0C0C0C; font-weight: 300; font-style: italic; font-size: 14px; font-family: Jost, sans-serif; display: block; margin-bottom: 1ex; } .abstract{ color: #0C0C0C; font-weight: 300; /*font-style: italic;*/ font-size: 13px; font-family: Jost, sans-serif; display: block; margin-bottom: 1ex; } .bio { color: #0C0C0C; font-style: italic; font-weight: 400; display: block; margin-bottom: 1ex; } .abstract_link { color: #2e769e; cursor:pointer; } </style> <h1>Image Processing: Algorithms and Systems XXI</h1> <p class="date">Monday 16 January 2023</p> <p class="event_time">10:20 – 10:50 AM Coffee Break</p> <p class="event_time">12:30 – 2:00 PM Lunch</p> <div class="pinkcallout"> <p class="session_title">Monday 16 January PLENARY: Neural Operators for Solving PDEs</p> <span class="chair">Session Chair: Robin Jenkin, NVIDIA Corporation (United States)<br> </span> <span class="session_time">2:00 PM – 3:00 PM</span> <br> <span class="room">Cyril Magnin I/II/III<br> </span> <span></span> <p class="session_notes">Deep learning surrogate models have shown promise in modeling complex physical phenomena such as fluid flows, molecular dynamics, and material properties. However, standard neural networks assume finite-dimensional inputs and outputs, and hence, cannot withstand a change in resolution or discretization between training and testing. We introduce Fourier neural operators that can learn operators, which are mappings between infinite dimensional spaces. They are independent of the resolution or grid of training data and allow for zero-shot generalization to higher resolution evaluations. When applied to weather forecasting, neural operators capture fine-scale phenomena and have similar skill as gold-standard numerical weather models for predictions up to a week or longer, while being 4-5 orders of magnitude faster.</p> <br> <span></span> <p class="session_notes"> </p> <span class="author_string"><strong>Anima Anandkumar, </strong>Bren professor, California Institute of Technology, and senior director of AI Research, NVIDIA Corporation (United States)<span class="author_string"></span></span> <p> </p> <span></span> <p class="session_notes">Anima Anandkumar is a Bren Professor at Caltech and Senior Director of AI Research at NVIDIA. She is passionate about designing principled AI algorithms and applying them to interdisciplinary domains. She has received several honors such as the IEEE fellowship, Alfred. P. Sloan Fellowship, NSF Career Award, and Faculty Fellowships from Microsoft, Google, Facebook, and Adobe. She is part of the World Economic Forum's Expert Network. Anandkumar received her BTech from Indian Institute of Technology Madras, her PhD from Cornell University, and did her postdoctoral research at MIT and assistant professorship at University of California Irvine.</p> </div> <br> <p class="event_time">3:00 – 3:30 PM Coffee Break</p> <span> </span> <div class="pinkcallout"> <p class="session_title">EI 2023 Highlights Session</p> <span class="chair">Session Chair: Robin Jenkin, NVIDIA Corporation (United States)<br> </span> <span class="session_time">3:30 – 5:00 PM</span><br> <span class="room">Cyril Magnin II<br> </span> <p class="session_notes">Join us for a session that celebrates the breadth of what EI has to offer with short papers selected from EI conferences. </p> <p class="session_notes">NOTE: The EI-wide "EI 2023 Highlights" session is concurrent with Monday afternoon COIMG, COLOR, IMAGE, and IQSP conference sessions. </p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>IQSP-309</span> <br> <span class="presentation_title" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Evaluation of image quality metrics designed for DRI tasks with automotive cameras, </span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Valentine Klein, </span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Yiqi LI, </span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Claudio Greco, </span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Laurent Chanas, and </span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Frédéric Guichard</span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">, DXOMARK (France)</span><span class="abstract_link" final_id="IQSP-309" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IQSP-309" id="abstract-IQSP-309" onclick="toggle_me()" style="display:none; cursor:pointer;">Driving assistance is increasingly used in new car models. Most driving assistance systems are based on automotive cameras and computer vision. Computer Vision, regardless of the underlying algorithms and technology, requires the images to have good image quality, defined according to the task. This notion of good image quality is still to be defined in the case of computer vision as it has very different criteria than human vision: humans have a better contrast detection ability than image chains. The aim of this article is to compare three different metrics designed for detection of objects with computer vision: the Contrast Detection Probability (CDP) [1, 2, 3, 4], the Contrast Signal to Noise Ratio (CSNR) [5] and the Frequency of Correct Resolution (FCR) [6]. For this purpose, the computer vision task of reading the characters on a license plate will be used as a benchmark. The objective is to check the correlation between the objective metric and the ability of a neural network to perform this task. Thus, a protocol to test these metrics and compare them to the output of the neural network has been designed and the pros and cons of each of these three metrics have been noted.</p> <p> </p> <p class="presentation_time" style="text-align:left;"><span>SD&A-224</span> <br> <span class="presentation_title" final_id="SD&A-224" onclick="toggle_me()" style="cursor: pointer;">Human performance using stereo 3D in a helmet mounted display and association with individual stereo acuity, </span><span class="author_string" final_id="SD&A-224" onclick="toggle_me()" style="cursor: pointer;">Bonnie Posselt</span><span class="author_string" final_id="SD&A-224" onclick="toggle_me()" style="cursor: pointer;">, RAF Centre of Aviation Medicine (United Kingdom)</span><span class="abstract_link" final_id="SD&A-224" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="SD&A-224" id="abstract-SD&A-224" onclick="toggle_me()" style="display:none; cursor:pointer;">Binocular Helmet Mounted Displays (HMDs) are a critical part of the aircraft system, allowing information to be presented to the aviator with stereoscopic 3D (S3D) depth, potentially enhancing situational awareness and improving performance. The utility of S3D in an HMD may be linked to an individual’s ability to perceive changes in binocular disparity (stereo acuity). Though minimum stereo acuity standards exist for most military aviators, current test methods may be unable to characterise this relationship. This presentation will investigate the effect of S3D on performance when used in a warning alert displayed in an HMD. Furthermore, any effect on performance, ocular symptoms, and cognitive workload shall be evaluated in regard to individual stereo acuity measured with a variety of paper-based and digital stereo tests.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>IMAGE-281</span> <br> <span class="presentation_title" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Smartphone-enabled point-of-care blood hemoglobin testing with color accuracy-assisted spectral learning, </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Sang Mok Park<sup>1</sup>, </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Yuhyun Ji<sup>1</sup>, </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Semin Kwon<sup>1</sup>, </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Andrew R. O’Brien<sup>2</sup>, </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Ying Wang<sup>2</sup>, and </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Young L. Kim<sup>1</sup></span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Purdue University and <sup>2</sup>Indiana University School of Medicine (United States)</span><span class="abstract_link" final_id="IMAGE-281" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IMAGE-281" id="abstract-IMAGE-281" onclick="toggle_me()" style="display:none; cursor:pointer;">We develop an mHealth technology for noninvasively measuring blood Hgb levels in patients with sickle cell anemia, using the photos of peripheral tissue acquired by the built-in camera of a smartphone. As an easily accessible sensing site, the inner eyelid (i.e., palpebral conjunctiva) is used because of the relatively uniform microvasculature and the absence of skin pigments. Color correction (color reproduction) and spectral learning (spectral super-resolution spectroscopy) algorithms are integrated for accurate and precise mHealth blood Hgb testing. First, color correction using a color reference chart with multiple color patches extracts absolute color information of the inner eyelid, compensating for smartphone models, ambient light conditions, and data formats during photo acquisition. Second, spectral learning virtually transforms the smartphone camera into a hyperspectral imaging system, mathematically reconstructing high-resolution spectra from color-corrected eyelid images. Third, color correction and spectral learning algorithms are combined with a spectroscopic model for blood Hgb quantification among sickle cell patients. Importantly, single-shot photo acquisition of the inner eyelid using the color reference chart allows straightforward, real-time, and instantaneous reading of blood Hgb levels. Overall, our mHealth blood Hgb tests could potentially be scalable, robust, and sustainable in resource-limited and homecare settings.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>AVM-118</span> <br> <span class="presentation_title" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Designing scenes to quantify the performance of automotive perception systems, </span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Zhenyi Liu<sup>1</sup>, </span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Devesh Shah<sup>2</sup>, </span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Alireza Rahimpour<sup>2</sup>, </span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Joyce Farrell<sup>1</sup>, and </span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Brian Wandell<sup>1</sup></span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Stanford University and <sup>2</sup>Ford Motor Company (United States)</span><span class="abstract_link" final_id="AVM-118" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="AVM-118" id="abstract-AVM-118" onclick="toggle_me()" style="display:none; cursor:pointer;">We implemented an end-to-end simulation for perception systems, based on cameras, that are used in automotive applications. The open-source software creates complex driving scenes and simulates cameras that acquire images of these scenes. The camera images are then used by a neural network in the perception system to identify the locations of scene objects, providing the results as input to the decision system. In this paper, we design collections of test scenes that can be used to quantify the perception system’s performance under a range of (a) environmental conditions (object distance, occlusion ratio, lighting levels), and (b) camera parameters (pixel size, lens type, color filter array). We are designing scene collections to analyze performance for detecting vehicles, traffic signs and vulnerable road users in a range of environmental conditions and for a range of camera parameters. With experience, such scene collections may serve a role similar to that of standardized test targets that are used to quantify camera image quality (e.g., acuity, color).</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>VDA-403</span> <br> <span class="presentation_title" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Visualizing and monitoring the process of injection molding, </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Christian A. Steinparz<sup>1</sup>, </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Thomas Mitterlehner<sup>2</sup>, </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Bernhard Praher<sup>2</sup>, </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Klaus Straka<sup>1,</sup><sup>2</sup>, </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Holger Stitz<sup>1,</sup><sup>3</sup>, and </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Marc Streit<sup>1,</sup><sup>3</sup></span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Johannes Kepler University, <sup>2</sup>Moldsonics GmbH, and <sup>3</sup>datavisyn GmbH (Austria)</span><span class="abstract_link" final_id="VDA-403" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="VDA-403" id="abstract-VDA-403" onclick="toggle_me()" style="display:none; cursor:pointer;">In injection molding machines the molds are rarely equipped with sensor systems. The availability of non-invasive ultrasound-based in-mold sensors provides better means for guiding operators of injection molding machines throughout the production process. However, existing visualizations are mostly limited to plots of temperature and pressure over time. In this work, we present the result of a design study created in collaboration with domain experts. The resulting prototypical application uses real-world data taken from live ultrasound sensor measurements for injection molding cavities captured over multiple cycles during the injection process. Our contribution includes a definition of tasks for setting up and monitoring the machines during the process, and the corresponding web-based visual analysis tool addressing these tasks. The interface consists of a multi-view display with various levels of data aggregation that is updated live for newly streamed data of ongoing injection cycles.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>COIMG-155</span> <br> <span class="presentation_title" final_id="COIMG-155" onclick="toggle_me()" style="cursor: pointer;">Commissioning the James Webb Space Telescope, </span><span class="author_string" final_id="COIMG-155" onclick="toggle_me()" style="cursor: pointer;">Joseph M. Howard</span><span class="author_string" final_id="COIMG-155" onclick="toggle_me()" style="cursor: pointer;">, NASA Goddard Space Flight Center (United States)</span><span class="abstract_link" final_id="COIMG-155" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="COIMG-155" id="abstract-COIMG-155" onclick="toggle_me()" style="display:none; cursor:pointer;">Astronomy is arguably in a golden age, where current and future NASA space telescopes are expected to contribute to this rapid growth in understanding of our universe. The most recent addition to our space-based telescopes dedicated to astronomy and astrophysics is the James Webb Space Telescope (JWST), which launched on 25 December 2021. This talk will discuss the first six months in space for JWST, which were spent commissioning the observatory with many deployments, alignments, and system and instrumentation checks. These engineering activities help verify the proper working of the telescope prior to commencing full science operations. For the session: Computational Imaging using Fourier Ptychography and Phase Retrieval.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>HVEI-223</span> <br> <span class="presentation_title" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Critical flicker frequency (CFF) at high luminance levels, </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Alexandre Chapiro<sup>1</sup>, </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Nathan Matsuda<sup>1</sup>, </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Maliha Ashraf<sup>2</sup>, and </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Rafal Mantiuk<sup>3</sup></span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Meta (United States), <sup>2</sup>University of Liverpool (United Kingdom), and <sup>3</sup>University of Cambridge (United Kingdom)</span><span class="abstract_link" final_id="HVEI-223" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-223" id="abstract-HVEI-223" onclick="toggle_me()" style="display:none; cursor:pointer;">The critical flicker fusion (CFF) is the frequency of changes at which a temporally periodic light will begin to appear completely steady to an observer. This value is affected by several visual factors, such as the luminance of the stimulus or its location on the retina. With new high dynamic range (HDR) displays, operating at higher luminance levels, and virtual reality (VR) displays, presenting at wide fields-of-view, the effective CFF may change significantly from values expected for traditional presentation. In this work we use a prototype HDR VR display capable of luminances up to 20,000 cd/m^2 to gather a novel set of CFF measurements for never before examined levels of luminance, eccentricity, and size. Our data is useful to study the temporal behavior of the visual system at high luminance levels, as well as setting useful thresholds for display engineering.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>HPCI-228</span> <br> <span class="presentation_title" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Physics guided machine learning for image-based material decomposition of tissues from simulated breast models with calcifications, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Muralikrishnan Gopalakrishnan Meena<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Amir K. Ziabari<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Singanallur Venkatakrishnan<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Isaac R. Lyngaas<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Matthew R. Norman<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Balint Joo<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Thomas L. Beck<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Charles A. Bouman<sup>2</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Anuj Kapadia<sup>1</sup>, and </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Xiao Wang<sup>1</sup></span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Oak Ridge National Laboratory and <sup>2</sup>Purdue University (United States)</span><span class="abstract_link" final_id="HPCI-228" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HPCI-228" id="abstract-HPCI-228" onclick="toggle_me()" style="display:none; cursor:pointer;">Material decomposition of Computed Tomography (CT) scans using projection-based approaches, while highly accurate, poses a challenge for medical imaging researchers and clinicians due to limited or no access to projection data. We introduce a deep learning image-based material decomposition method guided by physics and requiring no access to projection data. The method is demonstrated to decompose tissues from simulated dual-energy X-ray CT scans of virtual human phantoms containing four materials - adipose, fibroglandular, calcification, and air. The method uses a hybrid unsupervised and supervised learning technique to tackle the material decomposition problem. We take advantage of the unique X-ray absorption rate of calcium compared to body tissues to perform a preliminary segmentation of calcification from the images using unsupervised learning. We then perform supervised material decomposition using a deep learned UNET model which is trained using GPUs in the high-performant systems at the Oak Ridge Leadership Computing Facility. The method is demonstrated on simulated breast models to decompose calcification, adipose, fibroglandular, and air.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>3DIA-104</span> <br> <span class="presentation_title" final_id="3DIA-104" onclick="toggle_me()" style="cursor: pointer;">Layered view synthesis for general images, </span><span class="author_string" final_id="3DIA-104" onclick="toggle_me()" style="cursor: pointer;">Loïc Dehan, </span><span class="author_string" final_id="3DIA-104" onclick="toggle_me()" style="cursor: pointer;">Wiebe Van Ranst, and </span><span class="author_string" final_id="3DIA-104" onclick="toggle_me()" style="cursor: pointer;">Patrick Vandewalle</span><span class="author_string" final_id="3DIA-104" onclick="toggle_me()" style="cursor: pointer;">, Katholieke University Leuven (Belgium)</span><span class="abstract_link" final_id="3DIA-104" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="3DIA-104" id="abstract-3DIA-104" onclick="toggle_me()" style="display:none; cursor:pointer;">We describe a novel method for monocular view synthesis. The goal of our work is to create a visually pleasing set of horizontally spaced views based on a single image. This can be applied in view synthesis for virtual reality and glasses-free 3D displays. Previous methods produce realistic results on images that show a clear distinction between a foreground object and the background. We aim to create novel views in more general, crowded scenes in which there is no clear distinction. Our main contributions are a computationally efficient method for realistic occlusion inpainting and blending, especially in complex scenes. Our method can be effectively applied to any image, which is shown both qualitatively and quantitatively on a large dataset of stereo images. Our method performs natural disocclusion inpainting and maintains the shape and edge quality of foreground objects.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>ISS-329</span> <br> <span class="presentation_title" final_id="ISS-329" onclick="toggle_me()" style="cursor: pointer;">A self-powered asynchronous image sensor with independent in-pixel harvesting and sensing operations, </span><span class="author_string" final_id="ISS-329" onclick="toggle_me()" style="cursor: pointer;">Ruben Gomez-Merchan, </span><span class="author_string" final_id="ISS-329" onclick="toggle_me()" style="cursor: pointer;">Juan Antonio Leñero-Bardallo, and </span><span class="author_string" final_id="ISS-329" onclick="toggle_me()" style="cursor: pointer;">Ángel Rodríguez-Vázquez</span><span class="author_string" final_id="ISS-329" onclick="toggle_me()" style="cursor: pointer;">, University of Seville (Spain)</span><span class="abstract_link" final_id="ISS-329" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="ISS-329" id="abstract-ISS-329" onclick="toggle_me()" style="display:none; cursor:pointer;">A new self-powered asynchronous sensor with a novel pixel architecture is presented. Pixels are autonomous and can harvest or sense energy independently. During the image acquisition, pixels toggle to a harvesting operation mode once they have sensed their local illumination level. With the proposed pixel architecture, most illuminated pixels provide an early contribution to power the sensor, while low illuminated ones spend more time sensing their local illumination. Thus, the equivalent frame rate is higher than the offered by conventional self-powered sensors that harvest and sense illumination in independient phases. The proposed sensor uses a Time-to-First-Spike readout that allows trading between image quality and data and bandwidth consumption. The sensor has HDR operation with a dynamic range of 80 dB. Pixel power consumption is only 70 pW. In the article, we describe the sensor’s and pixel’s architectures in detail. Experimental results are provided and discussed. Sensor specifications are benchmarked against the art.</p> <p> </p> <p class="presentation_time" style="text-align:left;"><span>COLOR-184</span> <br> <span class="presentation_title" final_id="COLOR-184" onclick="toggle_me()" style="cursor: pointer;">Color blindness and modern board games, </span><span class="author_string" final_id="COLOR-184" onclick="toggle_me()" style="cursor: pointer;">Alessandro Rizzi<sup>1</sup> and </span><span class="author_string" final_id="COLOR-184" onclick="toggle_me()" style="cursor: pointer;">Matteo Sassi<sup>2</sup></span><span class="author_string" final_id="COLOR-184" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Università degli Studi di Milano and <sup>2</sup>consultant (Italy)</span><span class="abstract_link" final_id="COLOR-184" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="COLOR-184" id="abstract-COLOR-184" onclick="toggle_me()" style="display:none; cursor:pointer;">Board game industry is experiencing a strong renewed interest. In the last few years, about 4000 new board games have been designed and distributed each year. Board game players gender balance is reaching the equality, but nowadays the male component is a slight majority. This means that (at least) around 10% of board game players are color blind. How does the board game industry deal with this ? Recently, a raising of awareness in the board game design has started but so far there is a big gap compared with (e.g.) the computer game industry. This paper presents some data about the actual situation, discussing exemplary cases of successful board games.</p> <p> </p> <script> function toggle_me() { var elm = event.srcElement var final_id = elm.getAttribute("final_id") var the_id = "abstract-" + final_id; var x = document.getElementById(the_id); if (x.style.display === "none"){ x.style.display = "block"; } else { x.style.display = "none"; } }</script> </div> <br> <span> </span> <p class="event_time">5:00 – 6:15 PM EI 2023 All-Conference Welcome Reception (in the Cyril Magnin Foyer)</p> <p class="date">Tuesday 17 January 2023</p> <p class="event_time">10:00 AM – 7:30 PM Industry Exhibition - Tuesday (in the Cyril Magnin Foyer)</p> <p class="event_time">10:20 – 10:50 AM Coffee Break</p> <p class="event_time">12:30 – 2:00 PM Lunch</p> <div class="pinkcallout"> <p class="session_title">Tuesday 17 January PLENARY: Embedded Gain Maps for Adaptive Display of High Dynamic Range Images</p> <span class="chair">Session Chair: Robin Jenkin, NVIDIA Corporation (United States)<br> </span> <span class="session_time">2:00 PM – 3:00 PM</span> <br> <span class="room">Cyril Magnin I/II/III<br> </span> <span></span> <p class="session_notes">Images optimized for High Dynamic Range (HDR) displays have brighter highlights and more detailed shadows, resulting in an increased sense of realism and greater impact. However, a major issue with HDR content is the lack of consistency in appearance across different devices and viewing environments. There are several reasons, including varying capabilities of HDR displays and the different tone mapping methods implemented across software and platforms. Consequently, HDR content authors can neither control nor predict how their images will appear in other apps.</p> <span></span> <p class="session_notes">We present a flexible system that provides consistent and adaptive display of HDR images. Conceptually, the method combines both SDR and HDR renditions within a single image and interpolates between the two dynamically at display time. We compute a Gain Map that represents the difference between the two renditions. In the file, we store a Base rendition (either SDR or HDR), the Gain Map, and some associated metadata. At display time, we combine the Base image with a scaled version of the Gain Map, where the scale factor depends on the image metadata, the HDR capacity of the display, and the viewing environment. </p> <br> <span></span> <p class="session_notes"> </p> <span class="author_string"><strong>Eric Chan, </strong>Fellow, Adobe Inc. (United States)<span class="author_string"></span></span> <p> </p> <span></span> <p class="session_notes">Eric Chan is a Fellow at Adobe, where he develops software for editing photographs. Current projects include Photoshop, Lightroom, Camera Raw, and Digital Negative (DNG). When not writing software, Chan enjoys spending time at his other keyboard, the piano. He is an enthusiastic nature photographer and often combines his photo activities with travel and hiking.</p> <br> <span class="author_string"><strong>Paul M. Hubel, </strong>director of Image Quality in Software Engineering, Apple Inc. (United States)<span class="author_string"></span></span> <p> </p> <span></span> <p class="session_notes">Paul M. Hubel is director of Image Quality in Software Engineering at Apple. He has worked on computational photography and image quality of photographic systems for many years on all aspects of the imaging chain, particularly for iPhone. He trained in optical engineering at University of Rochester, Oxford University, and MIT, and has more than 50 patents on color imaging and camera technology. Hubel is active on the ISO-TC42 committee Digital Photography, where this work is under discussion, and is currently a VP on the IS&T Board. Outside work he enjoys photography, travel, cycling, coffee roasting, and plays trumpet in several bay area ensembles.</p> </div> <br> <p class="event_time">3:00 – 3:30 PM Coffee Break</p> <p class="event_time">5:30 – 7:00 PM EI 2023 Symposium Demonstration Session (in the Cyril Magnin Foyer)</p> <p class="date">Wednesday 18 January 2023</p> <p class="event_time">10:00 AM – 3:30 PM Industry Exhibition - Wednesday (in the Cyril Magnin Foyer)</p> <p class="event_time">10:20 – 10:50 AM Coffee Break</p> <p class="event_time">12:30 – 2:00 PM Lunch</p> <div class="pinkcallout"> <p class="session_title">Wednesday 18 January PLENARY: Bringing Vision Science to Electronic Imaging: The Pyramid of Visibility</p> <span class="chair">Session Chair: Andreas Savakis, Rochester Institute of Technology (United States)<br> </span> <span class="session_time">2:00 PM – 3:00 PM</span> <br> <span class="room">Cyril Magnin I/II/III<br> </span> <span></span> <p class="session_notes">Electronic imaging depends fundamentally on the capabilities and limitations of human vision. The challenge for the vision scientist is to describe these limitations to the engineer in a comprehensive, computable, and elegant formulation. Primary among these limitations are visibility of variations in light intensity over space and time, of variations in color over space and time, and of all of these patterns with position in the visual field. Lastly, we must describe how all these sensitivities vary with adapting light level. We have recently developed a structural description of human visual sensitivity that we call the Pyramid of Visibility, that accomplishes this synthesis. This talk shows how this structure accommodates all the dimensions described above, and how it can be used to solve a wide variety of problems in display engineering.</p> <br> <span></span> <p class="session_notes"> </p> <span class="author_string"><strong>Andrew B. Watson, </strong>chief vision scientist, Apple Inc. (United States)<span class="author_string"></span></span> <p> </p> <span></span> <p class="session_notes">Andrew Watson is Chief Vision Scientist at Apple, where he leads the application of vision science to technologies, applications, and displays. His research focuses on computational models of early vision. He is the author of more than 100 scientific papers and 8 patents. He has 21,180 citations and an h-index of 63. Watson founded the Journal of Vision, and served as editor-in-chief 2001-2013 and 2018-2022. Watson has received numerous awards including the Presidential Rank Award from the President of the United States.</p> </div> <br> <p class="event_time">3:00 – 3:30 PM Coffee Break</p> <div class="callout"> <p class="session_title">KEYNOTE: Systematic Data Labeling (W3.1)</p> <span class="chair">Session Chairs: Karen Egiazarian, Tampere University (Finland) and Atanas Gotchev, Tampere University (Finland)<br> </span><span class="session_time">3:30 – 4:15 PM</span> <br> <span class="room">Cyril Magnin III </span> <br> <span></span><br> <p class="presentation_time" style="text-align:left;">3:30<br> <span class="presentation_title">Conference Welcome</span> </p> <p class="presentation_time" style="text-align:left;">3:35<a name="IPAS-284"></a><span style="float: right;">IPAS-284</span> <br> <span class="presentation_title" final_id="IPAS-284" onclick="toggle_me()" style="cursor: pointer;">KEYNOTE: Systematic data labeling at the point of ingestion in enterprise systems, </span><span class="author_string" final_id="IPAS-284" onclick="toggle_me()" style="cursor: pointer;">Gevorg Karapetyan</span><span class="author_string" final_id="IPAS-284" onclick="toggle_me()" style="cursor: pointer;">, Zero Cognitive Systems (United States)</span><span class="abstract_link" final_id="IPAS-284" onclick="toggle_me()"> [view abstract] </span></p> <p class="session_notes">Gevorg Karapetyan is co-founder and Chief Technology Officer with Zero Cognitive Systems. In this role Karapetyan leads long-term technology vision and is responsible for the direction, coordination, and delivery of technology. Founded in 2015 in Los Gatos, California, Zero is dedicated to applying artificial intelligence and smart automation to the most pressing operational challenges of the professional services industry. Karapetyan previously worked at Imagenomic as a Senior Software Engineer and attended National Polytechnic University of Armenia. Karapetyan holds a PhD in Computer Science and has more than 10 years of experience in developing intelligent automation systems.</p> <p class="abstract" final_id="IPAS-284" id="abstract-IPAS-284" onclick="toggle_me()" style="display:none; cursor:pointer;">Almost 80% of the enterprise data is unstructured. Unstructured data includes documents, emails, images, web pages, video files, audio files, etc., which are stored in different data silos. Classification of unstructured is an important topic for the world's largest enterprises. One of the approaches is labeling the content per particular project. We present a system for systematic labeling of the unstructured data at the point of ingestion. This approach gives the ability to systematically generate metadata from incoming unstructured data, which can be stored in data catalogs, unlocking the ability to get business insights from the data and reduce security risks.</p> <p> </p> </div> <br> <br> <br> <p class="session_title">Machine Learning for Image Processing (W3.2)</p> <span class="chair_label">Session Chairs: </span> <span class="chair">Karen Egiazarian, Tampere University (Finland) and Atanas Gotchev, Tampere University (Finland)<br> </span> <span class="session_time">4:15 – 5:35 PM</span> <br> <span class="room">Cyril Magnin III<br> </span> <p class="presentation_time" style="text-align:left;">4:15<a name="IPAS-285"></a><span style="float: right;">IPAS-285</span> <br> <span class="presentation_title" final_id="IPAS-285" onclick="toggle_me()" style="cursor: pointer;">ORCA: An end-to-end video object removal framework with cropping interested region and quality assessment, </span><span class="author_string" final_id="IPAS-285" onclick="toggle_me()" style="cursor: pointer;">Minseong Son, </span><span class="author_string" final_id="IPAS-285" onclick="toggle_me()" style="cursor: pointer;">Hansol Lee, </span><span class="author_string" final_id="IPAS-285" onclick="toggle_me()" style="cursor: pointer;">Sungkeun Kwak, and </span><span class="author_string" final_id="IPAS-285" onclick="toggle_me()" style="cursor: pointer;">Jihwan Woo</span><span class="author_string" final_id="IPAS-285" onclick="toggle_me()" style="cursor: pointer;">, CJOliveNetworks (Republic of Korea)</span><span class="abstract_link" final_id="IPAS-285" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-285" id="abstract-IPAS-285" onclick="toggle_me()" style="display:none; cursor:pointer;">Recently, various types of Video Inpainting models have been released. Video Inpainting is used to naturally erase the object you want to erase in the video. However, to use inpainting models, we usually need frames extracted from a video and masks and most people make these data manually. We propose a novel End-to-End Video Object Removal framework with Cropping Interested Region and Video Quality Assessment (ORCA). ORCA is built in an end-to-end way by combining the Detection, Segmentation, and Inpainting modules. The characteristics of proposed framework are going through the cropping step before inpainting step. In addition, We propose our own video quality assessment since ORCA use two models for inpainting. Our new metric indicates the higher quality of the results between two models. Experimental results show the superior performance of the proposed methods.</p> <p> </p> <p class="presentation_time" style="text-align:left;">4:35<a name="IPAS-286"></a><span style="float: right;">IPAS-286</span> <br> <span class="presentation_title" final_id="IPAS-286" onclick="toggle_me()" style="cursor: pointer;">Detection of object throwing behavior in surveillance videos, </span><span class="author_string" final_id="IPAS-286" onclick="toggle_me()" style="cursor: pointer;">Ivo P.C. Kersten, </span><span class="author_string" final_id="IPAS-286" onclick="toggle_me()" style="cursor: pointer;">Erkut Akdag, </span><span class="author_string" final_id="IPAS-286" onclick="toggle_me()" style="cursor: pointer;">Egor Bondarev, and </span><span class="author_string" final_id="IPAS-286" onclick="toggle_me()" style="cursor: pointer;">Peter H. de With</span><span class="author_string" final_id="IPAS-286" onclick="toggle_me()" style="cursor: pointer;">, Eindhoven University of Technology (the Netherlands)</span><span class="abstract_link" final_id="IPAS-286" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-286" id="abstract-IPAS-286" onclick="toggle_me()" style="display:none; cursor:pointer;">Anomalous behavior detection is a challenging research area within computer vision. One such behavior is throwing action in traffic flow, which is one of the unique requirements of our Smart City project to enhance public safety. This paper proposes a solution for throwing action detection in surveillance videos using deep learning. At present, datasets for throwing actions are not publicly available. To address the use-case of our Smart City project, we first generate the novel public 'Throwing Action' dataset, consisting of 271 videos of throwing actions performed by traffic participants, such as pedestrians, bicyclists, and car drivers, and 130 normal videos without throwing actions. Second, we compare the performance of different feature extractors for our anomaly detection method on the UCF-Crime and Throwing-Action datasets. Finally, we improve the performance of the anomaly detection algorithm by applying the Adam optimizer instead of Adadelta, and we propose a mean normal loss function that yields better anomaly detection performance. The experimental results reach an area under the ROC curve of 86.10 for the Throwing-Action dataset, and 80.13 on the combined UCF-Crime+Throwing dataset, respectively.</p> <p> </p> <p class="presentation_time" style="text-align:left;">4:55<a name="IPAS-287"></a><span style="float: right;">IPAS-287</span> <br> <span class="presentation_title" final_id="IPAS-287" onclick="toggle_me()" style="cursor: pointer;">Hybrid diffractive optics (DOE & refractive lens) for broadband EDoF imaging, </span><span class="author_string" final_id="IPAS-287" onclick="toggle_me()" style="cursor: pointer;">SeyyedReza MiriRostami, </span><span class="author_string" final_id="IPAS-287" onclick="toggle_me()" style="cursor: pointer;">Samuel Pinilla, </span><span class="author_string" final_id="IPAS-287" onclick="toggle_me()" style="cursor: pointer;">Igor Shevkunov, </span><span class="author_string" final_id="IPAS-287" onclick="toggle_me()" style="cursor: pointer;">Vladimir Katkovnik, and </span><span class="author_string" final_id="IPAS-287" onclick="toggle_me()" style="cursor: pointer;">Karen Egiazarian</span><span class="author_string" final_id="IPAS-287" onclick="toggle_me()" style="cursor: pointer;">, Tampere University (Finland)</span><span class="abstract_link" final_id="IPAS-287" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-287" id="abstract-IPAS-287" onclick="toggle_me()" style="display:none; cursor:pointer;">In the considered hybrid diffractive imaging system, a refractive lens is arranged simultaneously with a multilevel phase mask (MPM) as a diffractive optical element (DOE) for Achromatic Extended-depth-of-field (EDoF) imaging. This paper proposes a fully differentiable image formation model that uses neural network techniques to maximize the imaging quality by optimizing MPM, digital image reconstruction algorithm, refractive lens parameters (aperture size, focal length), and distance between the MPM and sensor. Firstly, model-based numerical simulations and end-to-end joint optimization of imaging are used. A spatial light modulator (SLM) is employed in the second stage of the design to implement MPM optimized at the first stage, and the image processing is optimized experimentally using a learning-based approach. The third stage of optimization is targeted at joint optimization of the SLM phase pattern and image reconstruction algorithm in the hardware-in-the-loop (HIL) setup, which allows compensation for a mismatch between numerical modeling and the physical reality of optic and sensor. A comparative analysis of the imaging accuracy and quality using the optical parameters is presented. It is proved experimentally, first time to the best of our knowledge, that wavefront phase modulation can provide imaging of advanced quality as compared with some commercial multi-lens cameras.</p> <p> </p> <p class="presentation_time" style="text-align:left;">5:15<a name="IPAS-288"></a><span style="float: right;">IPAS-288</span> <br> <span class="presentation_title" final_id="IPAS-288" onclick="toggle_me()" style="cursor: pointer;">Evaluating active learning for blind imbalanced domains, </span><span class="author_string" final_id="IPAS-288" onclick="toggle_me()" style="cursor: pointer;">Hiroshi Kuwajima<sup>1</sup>, </span><span class="author_string" final_id="IPAS-288" onclick="toggle_me()" style="cursor: pointer;">Masayuki Tanaka<sup>2</sup>, and </span><span class="author_string" final_id="IPAS-288" onclick="toggle_me()" style="cursor: pointer;">Masatoshi Okutomi<sup>2</sup></span><span class="author_string" final_id="IPAS-288" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>DENSO Corporation and <sup>2</sup>Tokyo Institute of Technology (Japan)</span><span class="abstract_link" final_id="IPAS-288" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-288" id="abstract-IPAS-288" onclick="toggle_me()" style="display:none; cursor:pointer;">Deep learning, which has been very successful in recent years, requires a large amount of data. Active learning has been widely studied and used for decades to reduce annotation costs and now attracts lots of attention in deep learning. Many real-world deep learning applications use active learning to select the informative data to be annotated. In this paper, we first investigate laboratory settings for active learning. We show significant gaps between the results from different laboratory settings and describe our practical laboratory setting that reasonably reflects the active learning use cases in real-world applications. Then, we introduce a problem setting of blind imbalanced domains. Any data set includes multiple domains, e.g., individuals in handwritten character recognition with different social attributes. Major domains have many samples, and minor domains have few samples in the training set. However, we must accurately infer both major and minor domains in the test phase. We experimentally compare different methods of active learning for blind imbalanced domains in our practical laboratory setting. We show that a simple active learning method using softmax margin and a model training method using distance-based sampling with center loss, both working in the deep feature space, perform well.</p> <p> </p> <br> <br> <p class="event_time">5:30 – 7:00 PM EI 2023 Symposium Interactive (Poster) Paper Session (in the Cyril Magnin Foyer)</p> <p class="event_time">5:30 – 7:00 PM EI 2023 Meet the Future: A Showcase of Student and Young Professionals Research (in the Cyril Magnin Foyer)</p> <p class="session_title">Image Processing: Algorithms and Systems XXI Interactive (Poster) Paper Session (W4)</p> <span class="session_time">5:35 – 7:00 PM</span> <br> <span class="room">Cyril Magnin Foyer </span> <br> <span></span> <p class="session_notes">The following work will be presented at the EI 2023 Symposium Interactive (Poster) Paper Session.</p> <br> <p class="presentation_time" style="text-align:left;"> <a name="IPAS-290"></a><span style="float: right;">IPAS-290</span> <br> <span class="presentation_title" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">MLExchange: An integrated platform for scientific machine learning, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Guanhua Hao<sup>1</sup>, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Tanny Chavez<sup>1</sup>, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Zhuowen Zhao<sup>1</sup>, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Elizabeth Holman<sup>1</sup>, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Eric Roberts<sup>1</sup>, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Howard Yanxon<sup>2</sup>, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Adam Green<sup>1</sup>, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Harinarayan Krishnan<sup>1</sup>, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Dylan McReynolds<sup>1</sup>, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Nicholas Schwarz<sup>2</sup>, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Petrus Zwart<sup>1</sup>, </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Alexander Hexemer<sup>1</sup>, and </span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">Dilworth Parkinson<sup>1</sup></span><span class="author_string" final_id="IPAS-290" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Lawrence Berkeley National Laboratory and <sup>2</sup>Argonne National Laboratory (United States)</span><span class="abstract_link" final_id="IPAS-290" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-290" id="abstract-IPAS-290" onclick="toggle_me()" style="display:none; cursor:pointer;">Scientific user facilities are some of the world’s leading producers of scientific data. As a collaboration effort across several Department of Energy (DOE) national labs, a project called “MLExchange” is under development to build a collective machine learning platform, and it is targeted to serve as a toolbox to enhance the experience for users working with large scientific data. Two applications within the platform are designed to focus on image analysis: an image segmentation application and an image labeling pipeline. The segmentation application has a web-based interface with embedded machine learning algorithms to aid segmentation tasks. Three machine learning models, including a Mixed-Scale Dense Convolutional Neural Network (MSDNet), have been successfully deployed. The labeling pipeline consists of three web-based applications: Label Maker, Data Clinic, and MLCoach, and aims to provide automatic sample-type identification/classification tasks. Several use cases have been successfully deployed using X-ray scattering and microCT data.</p> <p> </p> <br> <br> <p class="date">Thursday 19 January 2023</p> <p class="session_title">Face and Facial Image Processing (R1)</p> <span class="chair_label">Session Chairs: </span> <span class="chair">Karen Egiazarian, Tampere University (Finland) and Atanas Gotchev, Tampere University (Finland)<br> </span> <span class="session_time">8:50 – 9:50 AM</span> <br> <span class="room">Cyril Magnin III<br> </span> <p class="presentation_time" style="text-align:left;">8:50<a name="IPAS-291"></a><span style="float: right;">IPAS-291</span> <br> <span class="presentation_title" final_id="IPAS-291" onclick="toggle_me()" style="cursor: pointer;">Facial expression recognition using visual transformer with histogram of oriented gradients, </span><span class="author_string" final_id="IPAS-291" onclick="toggle_me()" style="cursor: pointer;">Jieun Kim, </span><span class="author_string" final_id="IPAS-291" onclick="toggle_me()" style="cursor: pointer;">Ju o Kim, </span><span class="author_string" final_id="IPAS-291" onclick="toggle_me()" style="cursor: pointer;">Seungwan Je, and </span><span class="author_string" final_id="IPAS-291" onclick="toggle_me()" style="cursor: pointer;">Deokwoo Lee</span><span class="author_string" final_id="IPAS-291" onclick="toggle_me()" style="cursor: pointer;">, Keimyung University (Republic of Korea)</span><span class="abstract_link" final_id="IPAS-291" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-291" id="abstract-IPAS-291" onclick="toggle_me()" style="display:none; cursor:pointer;">Emotions play an important role in our life as a response to our interactions with others, decisions, and so on. Among various emotional signals, facial expression is one of the most powerful and natural means for humans to convey their emotions and intentions, and it has the advantage of easily obtaining information using only a camera, so facial expression-based emotional research is being actively conducted. Facial expression recognition(FER) have been studied by classifying them into seven basic emotions: anger, disgust, fear, happiness, sadness, surprise, and normal. Before the appearance of deep learning, handcrafted feature extractors and simple classifiers such as SVM, Adaboost was used to extracted Facial emotion. With the advent of deep learning, it is now possible to extract facial expression without using feature extractors. Despite its excellent performance in FER research, it is still challenging task due to external factors such as occlusion, illumination, and pose, and similarity problems between different facial expressions. In this paper, we propose a method of training through a ResNet [1] and Visual Transformer [2] called FViT and using Histogram of Oriented Gradients(HOGs) [3] data to solve the similarity problem between facial expressions.</p> <p> </p> <p class="presentation_time" style="text-align:left;">9:10<a name="IPAS-292"></a><span style="float: right;">IPAS-292</span> <br> <span class="presentation_title" final_id="IPAS-292" onclick="toggle_me()" style="cursor: pointer;">Face expressions understanding by geometrical characterization of deep human faces representation, </span><span class="author_string" final_id="IPAS-292" onclick="toggle_me()" style="cursor: pointer;">Adrien Raison, </span><span class="author_string" final_id="IPAS-292" onclick="toggle_me()" style="cursor: pointer;">Theo Biardeau, </span><span class="author_string" final_id="IPAS-292" onclick="toggle_me()" style="cursor: pointer;">Pascal Bourdon, and </span><span class="author_string" final_id="IPAS-292" onclick="toggle_me()" style="cursor: pointer;">David Helbert</span><span class="author_string" final_id="IPAS-292" onclick="toggle_me()" style="cursor: pointer;">, University de Poitiers (France)</span><span class="abstract_link" final_id="IPAS-292" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-292" id="abstract-IPAS-292" onclick="toggle_me()" style="display:none; cursor:pointer;">Face expressions understanding is a key to have a better understanding of the human nature. In this contribution we propose an end-to-end pipeline that takes color images as inputs and produces a semantic graph that encodes numerically what are facial emotions. This approach leverages low-level geometric details as face representation which are numerical representations of facial muscle activation patterns to build this emotional understanding. It shows that our method recovers social expectations of what characterize facial emotions.</p> <p> </p> <p class="presentation_time" style="text-align:left;">9:30<a name="IPAS-293"></a><span style="float: right;">IPAS-293</span> <br> <span class="presentation_title" final_id="IPAS-293" onclick="toggle_me()" style="cursor: pointer;">Crowd counting using deep learning based head detection, </span><span class="author_string" final_id="IPAS-293" onclick="toggle_me()" style="cursor: pointer;">Maryam Hassan<sup>1</sup>, </span><span class="author_string" final_id="IPAS-293" onclick="toggle_me()" style="cursor: pointer;">Farhan Hussain<sup>1</sup>, </span><span class="author_string" final_id="IPAS-293" onclick="toggle_me()" style="cursor: pointer;">Sultan D. Khan<sup>2</sup>, </span><span class="author_string" final_id="IPAS-293" onclick="toggle_me()" style="cursor: pointer;">Mohib Ullah<sup>3</sup>, </span><span class="author_string" final_id="IPAS-293" onclick="toggle_me()" style="cursor: pointer;">Mudassar Yamin<sup>3</sup>, and </span><span class="author_string" final_id="IPAS-293" onclick="toggle_me()" style="cursor: pointer;">Habib Ullah<sup>4</sup></span><span class="author_string" final_id="IPAS-293" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>NUST College of Electrical & Mechanical Engineering (Norway), <sup>2</sup>National University of Technology (Pakistan), <sup>3</sup>Norwegian University of Science and Technology (Norway), and <sup>4</sup>Norwegian University of Life Sciences (NMBU) (Norway)</span><span class="abstract_link" final_id="IPAS-293" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-293" id="abstract-IPAS-293" onclick="toggle_me()" style="display:none; cursor:pointer;">Scale invariance and high miss detection rates for small objects are some of the challenging issues for object detection and often lead to inaccurate results. This research aims to provide an accurate detection model for crowd counting by focusing on human head detection from natural scenes acquired from publicly available datasets of Casablanca, Hollywood-Heads and Scut-head. In this study, we tuned a yolov5, a deep convolutional neural network (CNN) based object detection architecture, and then evaluated the model using mean average precision (mAP) score, precision, and recall. The transfer learning approach is used for fine-tuning the architecture. Training on one dataset and testing the model on another leads to inaccurate results due to different types of heads in different datasets. Another main contribution of our research is combining the three datasets into a single dataset, including every kind of head that is medium, large and small. From the experimental results, it can be seen that this yolov5 architecture showed significant improvements in small head detections in crowded scenes as compared to the other baseline approaches, such as the Faster R-CNN and VGG-16-based SSD MultiBox Detector.</p> <p> </p> <br> <br> <p class="event_time">10:20 – 10:50 AM Coffee Break</p> <div class="callout"> <p class="session_title">KEYNOTE: Vulnerability of Neural Networks (R2.1)</p> <span class="chair">Session Chairs: Karen Egiazarian, Tampere University (Finland) and Atanas Gotchev, Tampere University (Finland)<br> </span><span class="session_time">10:50 – 11:30 AM</span> <br> <span class="room">Cyril Magnin III </span> <br> <span></span><br> <p class="presentation_time" style="text-align:left;"> <a name="IPAS-294"></a><span style="float: right;">IPAS-294</span> <br> <span class="presentation_title" final_id="IPAS-294" onclick="toggle_me()" style="cursor: pointer;">KEYNOTE: Surprising vulnerability of neural networks: Recovering training and input data in federated learning and split computing, </span><span class="author_string" final_id="IPAS-294" onclick="toggle_me()" style="cursor: pointer;">Pavlo Molchanov</span><span class="author_string" final_id="IPAS-294" onclick="toggle_me()" style="cursor: pointer;">, NVIDIA Corporation (United States)</span><span class="abstract_link" final_id="IPAS-294" onclick="toggle_me()"> [view abstract] </span></p> <p class="session_notes">Pavlo Molchanov obtained his PhD (2014) from Tampere University of Technology, Finland, in the area of signal processing. His dissertation focused on designing automatic target recognition systems for radars. Since 2015 he has been with the Learning and Perception Research team at NVIDIA, currently holding a senior research scientist position. His research is focused on methods for neural network acceleration, and designing novel human-computer interaction systems and human understanding. On network acceleration, he is interested in neural network pruning methods and conditional inference. For human understanding he is working on landmark estimation, gesture recognition, hand pose estimation.</p> <p class="abstract" final_id="IPAS-294" id="abstract-IPAS-294" onclick="toggle_me()" style="display:none; cursor:pointer;">We present a number of studies that demonstrated the possibility of recovering training data distribution given only the final trained model. We also study the effect of data recovery in the split computing scenario where only intermediate features are shared. Finally, we will present results of gradient attack in federated learning that for a first time demonstrates almost the exact image recovery. The focus is on for large convolution networks such as ResNets and transformers, and on complex datasets such as ImageNet.</p> <p> </p> </div> <br> <br> <br> <p class="session_title">Segmentation, Classification, and Tracking (R2.2)</p> <span class="chair_label">Session Chairs: </span> <span class="chair">Karen Egiazarian, Tampere University (Finland) and Atanas Gotchev, Tampere University (Finland)<br> </span> <span class="session_time">11:30 AM – 12:30 PM</span> <br> <span class="room">Cyril Magnin III<br> </span> <p class="presentation_time" style="text-align:left;">11:30<a name="IPAS-295"></a><span style="float: right;">IPAS-295</span> <br> <span class="presentation_title" final_id="IPAS-295" onclick="toggle_me()" style="cursor: pointer;">Exploring effects of colour and image quality in semantic segmentation (JIST-first), </span><span class="author_string" final_id="IPAS-295" onclick="toggle_me()" style="cursor: pointer;">Kanjar De</span><span class="author_string" final_id="IPAS-295" onclick="toggle_me()" style="cursor: pointer;">, Luleå University of Technology (Sweden)</span><span class="abstract_link" final_id="IPAS-295" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-295" id="abstract-IPAS-295" onclick="toggle_me()" style="display:none; cursor:pointer;">Recent advances in convolutional neural networks and vision transformers have brought a revolution in the area of computer vision. Studies have shown that the performance of deep learning-based models is sensitive to the quality of the images. The human visual system is trained to infer semantic information from poor quality images, but deep learning algorithms may find it challenging to perform this task. In this paper, we study the effect of image quality and color parameters on deep learning models trained for the task of semantic segmentation. One of the major challenges in benchmarking robust deep learning-based computer vision models is the lack of challenging data covering different quality and colour parameters. In this paper, we have generated data using the subset of the standard benchmark semantic segmentation dataset (ADE20K) with the goal of studying the effect of different quality and colour parameters for the semantic segmentation task. To the best of our knowledge, this is one of the first attempts to benchmark semantic segmentation algorithms under different colour and quality parameters, and this study will motivate further research in this direction.</p> <p> </p> <p class="presentation_time" style="text-align:left;">11:50<a name="IPAS-296"></a><span style="float: right;">IPAS-296</span> <br> <span class="presentation_title" final_id="IPAS-296" onclick="toggle_me()" style="cursor: pointer;">ILIAC: Efficient classification of degraded images using knowledge distillation with cutout data augmentation, </span><span class="author_string" final_id="IPAS-296" onclick="toggle_me()" style="cursor: pointer;">Dinesh Daultani<sup>1</sup>, </span><span class="author_string" final_id="IPAS-296" onclick="toggle_me()" style="cursor: pointer;">Masayuki Tanaka<sup>1</sup>, </span><span class="author_string" final_id="IPAS-296" onclick="toggle_me()" style="cursor: pointer;">Masatoshi Okutomi<sup>1</sup>, and </span><span class="author_string" final_id="IPAS-296" onclick="toggle_me()" style="cursor: pointer;">Kazuki Endo<sup>2</sup></span><span class="author_string" final_id="IPAS-296" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Tokyo Institute of Technology and <sup>2</sup>Teikyo Heisei University (Japan)</span><span class="abstract_link" final_id="IPAS-296" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-296" id="abstract-IPAS-296" onclick="toggle_me()" style="display:none; cursor:pointer;">Image classification is extensively used in various applications such as satellite imagery, autonomous driving, smartphones, and healthcare. Most of the images used to train classification models can be considered ideal, i.e., without any degradation either due to corruption of pixels in the camera sensors, sudden shake blur, or the compression of images in a specific format. In this paper, we have proposed a novel CNN-based architecture for image classification of degraded images based on intermediate layer knowledge distillation and data augmentation approach cutout named ILIAC. Our approach achieves 1.1%, and 0.4% mean accuracy improvements for all the degradation levels of JPEG and AWGN, respectively, compared to the current state-of-the-art approach. Furthermore, ILIAC method is efficient in computational capacity, i.e., about half the size of the previous state-of-the-art approach in terms of model parameters and GFlops count. Additionally, we demonstrate that we do not necessarily need a larger teacher network in knowledge distillation to improve the model performance and generalization of a smaller student network for the classification of degraded images.</p> <p> </p> <p class="presentation_time" style="text-align:left;">12:10<a name="IPAS-297"></a><span style="float: right;">IPAS-297</span> <br> <span class="presentation_title" final_id="IPAS-297" onclick="toggle_me()" style="cursor: pointer;">AInBody: Are you in shape? - An integrated deep learning model that tracks your body measurement, </span><span class="author_string" final_id="IPAS-297" onclick="toggle_me()" style="cursor: pointer;">Nakyung Lee, </span><span class="author_string" final_id="IPAS-297" onclick="toggle_me()" style="cursor: pointer;">Youngsun Cho, </span><span class="author_string" final_id="IPAS-297" onclick="toggle_me()" style="cursor: pointer;">Minseong Son, </span><span class="author_string" final_id="IPAS-297" onclick="toggle_me()" style="cursor: pointer;">Sungkeun Kwak, and </span><span class="author_string" final_id="IPAS-297" onclick="toggle_me()" style="cursor: pointer;">Jihwan Woo</span><span class="author_string" final_id="IPAS-297" onclick="toggle_me()" style="cursor: pointer;">, CJ OliveNetworks (Republic of Korea)</span><span class="abstract_link" final_id="IPAS-297" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-297" id="abstract-IPAS-297" onclick="toggle_me()" style="display:none; cursor:pointer;">This paper presents AInBody, a novel deep learning-based body shape measurement solution. We have devised a user-centered design that automatically tracks the progress of the body by adequately integrating various methods, including human parsing, instance segmentation, and image matting. Our system guides a user's pose when taking photos by displaying the outline of the latest picture of the user, divides the human body into several parts, and compares before and after photos of the body part level. The parsing performance has been improved through an ensemble approach and a denoising phase in our main module, Advanced Human Parser. In evaluation, the proposed method is 0.1% to 4.8% better than the other best-performing model in average precision in 3 out of 5 parts, and 1.4% and 2.4% superior in mAP and mean IoU, respectively. Furthermore, the inference time of our framework takes approximately three seconds to process one HD image, demonstrating that our structure can be applied to real-time applications.</p> <p> </p> <br> <br> <p class="event_time">12:30 – 2:00 PM Lunch</p> <p class="session_title">Biomedical Image Processing (R3)</p> <span class="chair_label">Session Chairs: </span> <span class="chair">Karen Egiazarian, Tampere University (Finland) and Atanas Gotchev, Tampere University (Finland)<br> </span> <span class="session_time">2:00 – 3:00 PM</span> <br> <span class="room">Cyril Magnin III<br> </span> <p class="presentation_time" style="text-align:left;">2:00<a name="IPAS-298"></a><span style="float: right;">IPAS-298</span> <br> <span class="presentation_title" final_id="IPAS-298" onclick="toggle_me()" style="cursor: pointer;">Deep learning based speech emotion recognition for Parkinson patient, </span><span class="author_string" final_id="IPAS-298" onclick="toggle_me()" style="cursor: pointer;">Habib Khan<sup>1</sup>, </span><span class="author_string" final_id="IPAS-298" onclick="toggle_me()" style="cursor: pointer;">Mohib Ullah<sup>2</sup>, </span><span class="author_string" final_id="IPAS-298" onclick="toggle_me()" style="cursor: pointer;">Fadi Al-Machot<sup>3</sup>, </span><span class="author_string" final_id="IPAS-298" onclick="toggle_me()" style="cursor: pointer;">Faouzi Alaya Cheikh<sup>2</sup>, and </span><span class="author_string" final_id="IPAS-298" onclick="toggle_me()" style="cursor: pointer;">Muhammad Sajjad<sup>2</sup></span><span class="author_string" final_id="IPAS-298" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Islamia College University Peshawar (Pakistan), <sup>2</sup>Norwegian University of Science and Technology (Norway), and <sup>3</sup>Norwegian University of Life Sciences (Norway)</span><span class="abstract_link" final_id="IPAS-298" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-298" id="abstract-IPAS-298" onclick="toggle_me()" style="display:none; cursor:pointer;">Speech emotions (SEs) are an important component of human interactions and an efficient way of persuading human behavior. The recognition of emotions from the speech is an emergent but challenging area of digital signal processing (DSP). Healthcare professionals are looking for the best ways to understand patient voices for better diagnosis and treatment. Speech emotions recognition (SER) from the human voice, particularly in a person with neurological disorders like Parkinson's disease (PD), can expedite the diagnostic process. Mostly, patients with PD are passed through diagnosis via expensive tests and continuous monitoring that is time-consuming and very costly. The primary goal of this research is to develop a system that can accurately identify common SEs such as anger, happiness, normal, and sadness. We proposed a novel lightweight deep model to predict common SEs. The adaptive wavelet thresholding method is employed for pre-processing the audio data. The proposed method is trained on generated spectrograms of the Interactive Emotional Dyadic Capture (IEMOCAP) dataset. The suggested deep learning method contains convolution layers used for learning discriminative features from the spectrograms. A dense layer with a Softmax classifier is used for the classification. The accuracy of the proposed framework is evaluated on standard performance metrics, which show promising real-time results for PD patients.</p> <p> </p> <p class="presentation_time" style="text-align:left;">2:20<a name="IPAS-299"></a><span style="float: right;">IPAS-299</span> <br> <span class="presentation_title" final_id="IPAS-299" onclick="toggle_me()" style="cursor: pointer;">Blind denoising of dental X-ray images, </span><span class="author_string" final_id="IPAS-299" onclick="toggle_me()" style="cursor: pointer;">Mykola Ponomarenko<sup>1</sup>, </span><span class="author_string" final_id="IPAS-299" onclick="toggle_me()" style="cursor: pointer;">Oleksandr Miroshnichenko<sup>2</sup>, </span><span class="author_string" final_id="IPAS-299" onclick="toggle_me()" style="cursor: pointer;">Vladimir Lukin<sup>2</sup>, </span><span class="author_string" final_id="IPAS-299" onclick="toggle_me()" style="cursor: pointer;">Sergey Krivenko<sup>2</sup>, and </span><span class="author_string" final_id="IPAS-299" onclick="toggle_me()" style="cursor: pointer;">Karen Egiazarian<sup>1</sup></span><span class="author_string" final_id="IPAS-299" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Tampere University (Finland) and <sup>2</sup>National Aerospace University (Ukraine)</span><span class="abstract_link" final_id="IPAS-299" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-299" id="abstract-IPAS-299" onclick="toggle_me()" style="display:none; cursor:pointer;">The paper considers a problem of automatic analysis and noise suppression in dental X-Ray images, e.g., in images acquired by dental Morita system. Such images contain spatially correlated noise with unknown spectrum and with standard deviation that varies for different image regions. In the paper, we propose two deep convolutional neural networks. The first network estimates the spectrum and level of noise for each pixel of a noisy image, predicting maps of noise standard deviation for three image scales. The second network uses the maps as inputs to suppress noise in the image. It is shown, using modelled and real-life images, that the proposed networks provide PSNR for dental X-Ray images by 2.7 dB better than other modern denoising methods.</p> <p> </p> <p class="presentation_time" style="text-align:left;">2:40<a name="IPAS-300"></a><span style="float: right;">IPAS-300</span> <br> <span class="presentation_title" final_id="IPAS-300" onclick="toggle_me()" style="cursor: pointer;">Automatic estimation of mucosal waves lateral peak sharpness – Modern approach, </span><span class="author_string" final_id="IPAS-300" onclick="toggle_me()" style="cursor: pointer;">Ales Zita<sup>1</sup>, </span><span class="author_string" final_id="IPAS-300" onclick="toggle_me()" style="cursor: pointer;">Simon Gresko<sup>1</sup>, </span><span class="author_string" final_id="IPAS-300" onclick="toggle_me()" style="cursor: pointer;">Adam Novozamsky<sup>1</sup>, </span><span class="author_string" final_id="IPAS-300" onclick="toggle_me()" style="cursor: pointer;">Michal Sorel<sup>1</sup>, </span><span class="author_string" final_id="IPAS-300" onclick="toggle_me()" style="cursor: pointer;">Barbara Zitova<sup>1</sup>, </span><span class="author_string" final_id="IPAS-300" onclick="toggle_me()" style="cursor: pointer;">Jan Svec<sup>2</sup>, and </span><span class="author_string" final_id="IPAS-300" onclick="toggle_me()" style="cursor: pointer;">Jitka Vydrova<sup>3</sup></span><span class="author_string" final_id="IPAS-300" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Institute of Information Theory and Automation, <sup>2</sup>Palacky University, and <sup>3</sup>Voice Centre Prague, Medical Healthcom, Ltd (Czechia)</span><span class="abstract_link" final_id="IPAS-300" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IPAS-300" id="abstract-IPAS-300" onclick="toggle_me()" style="display:none; cursor:pointer;">Videokymographic (VKG) images of the human larynx are often used for automatic vibratory feature extraction for diagnostic purposes. One of the most challenging parameters to evaluate is the mucosal wave's presence and its lateral peaks' sharpness. Although these features can be clinically helpful and give an insight into the health and pliability of vocal fold mucosa, the identification and visual estimation of the sharpness can be challenging for human examiners and even more so for an automatic process. This work aims to create and validate a method that can automatically quantify the lateral peak sharpness from the VKG images using a convolutional neural network.</p> <p> </p> <br> <br> <script> function toggle_me() { var elm = event.srcElement var final_id = elm.getAttribute("final_id") var the_id = "abstract-" + final_id; var x = document.getElementById(the_id); if (x.style.display === "none"){ x.style.display = "block"; } else { x.style.display = "none"; } }</script></div><div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_ControlErrorPanel_ConferenceHeading" class="Error" style="Display:None;"> </div> </div> </div> </div> </div></div> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_3" class="rmpView rmpHidden"> <div class="ContentTabbedDisplay AddPadding"> <p class="AsiWarning">No content found</p> </div> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_4" class="rmpView rmpHidden"> <div class="ContentTabbedDisplay AddPadding"> <p class="AsiWarning">No content found</p> </div> </div><input id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage_ClientState" name="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage_ClientState" type="hidden" /> </div> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_updatePanel"> <input type="submit" name="ctl01$TemplateBody$WebPartManager1$gwpciConfCCO$ciConfCCO$refreshTrigger" value="Refresh" id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_refreshTrigger" style="display:none" /> </div> </div> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_panStep"> </div> <span id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_debug"></span> </div> </div> </div></div> </div> </div> <div id="ctl01_TemplateBody_ContentPage2_downloadContainer" style="display:none;"> <input type="hidden" name="ctl01$TemplateBody$ContentPage2$HiddenDownloadPathField" id="ctl01_TemplateBody_ContentPage2_HiddenDownloadPathField" /><input type="submit" name="ctl01$TemplateBody$ContentPage2$downloadButton" value="Download Path" id="ctl01_TemplateBody_ContentPage2_downloadButton" style="display:none" /> </div></div> </div> </div> </div> </div> </div> </div> <div data-label="Secondary Navigation" class="col-secondary cs-right d-none"> <div ID="WTZone8_Page1" class="WTZone "> </div> </div> </div> </div> <a class="backToTop" href="#PageTop">Back to Top</a> <footer id="ft" class="footer ClearFix"> <div class="footer-content"> <div class="container" data-label="Footer 1"> <div ID="WTZone9_Page1" class="WTZone "> <div id="ste_container_ciFooterContent1" class="ContentItemContainer"> <div id="ste_container_NewContentHtml2" class="ContentItemContainer"><div class="footer-nav"> <div class="footer-col"> <ul> <li><a href="https://www.imaging.org/">IMAGING.ORG</a></li> <li><a href="/IST/Conferences/Events_Overview.aspx">Events</a></li> <li><a href="/IST/Publications/Publications_Overview.aspx" class="">Publications</a></li> <li><a href="/IST/Standards/TC42.aspx">Standards</a></li> </ul> </div> <div class="footer-col"> <ul> <li><a href="/IST/Resources/Resources_Home.aspx">RESOURCES</a></li> <li><a href="/IST/Resources/CareerCenter.aspx">Careers</a></li> <li><a href="/IST/Policies/Policies.aspx">Policies</a></li> </ul> </div> <div class="footer-col"> <ul> <li><a href="/IST/About/About.aspx">ABOUT US</a></li> <li><a href="/IST/Membership/Individual_Membership.aspx">Membership</a></li> <li><a href="/IST/About/Donations.aspx">Donate</a></li> <li><a href="/IST/About/About.aspx">Contact</a></li> </ul> </div> </div></div><div id="ste_container_FooterContent" class="ContentItemContainer"><div class="FooterTop"><div class="FooterLogo"><img src="/images/75th%20logo%20alt%20blue%20white%20bkgrnd.png" alt=""> </div> <div class="FooterSocial"> <div class="FooterSocialText"> <p>Stay Connected!</p> </div> <div class="FooterSocialImg"><a href="https://www.linkedin.com/company/society-for-imaging-science-and-technology-is&t-"><img src="/images/Icons/linkedin36blue.png" alt="" style="margin-right: 10px;"></a><a href="https://twitter.com/ImagingOrg"><img src="/images/Icons/twitter36blue.png" alt="" style=""></a></div> </div></div></div><div id="ste_container_NewContentHtml1" class="ContentItemContainer"><div class="FooterBottom"><p style="text-align: center;">© Copyright 2023 Society for Imaging Sciences and Technology. All Rights Reserved.</p></div></div><div class="ContentRecordPageButtonPanel"> </div> </div> </div> </div> </div> <div class="footer-nav-copyright"> <div class="container" role="navigation"> <div class="footer-copyright" data-label= "Footer 2"> <div ID="WTZone10_Page1" class="WTZone iPartsDisplayInlineBlock"> </div> </div> </div> </div> </footer> </div> <!--Jscript from Page.ResgisterStartupScript extention is loaded here --> <Div><script type="text/javascript">Sys.Application.add_load(function () {{ MasterPageBase_Init(); }});</script> <script type="text/javascript">Sys.Application.add_load(function() { { BreadCrumb_load('80409b89-ae6d-45a9-a9d4-96d522ff2047'); } }); </script> </Div><input name="ctl01$TemplateScripts$timeoutsoonmsg" type="hidden" id="timeoutsoonmsg" value="PGgyPllvdSBhcmUgYWJvdXQgdG8gYmUgc2lnbmVkIG91dDwvaDI+DQo8cD5Zb3Ugd2lsbCBiZSBzaWduZWQgb3V0IGluIDxzdHJvbmc+W1NlY29uZHNSZW1haW5pbmddPC9zdHJvbmc+IHNlY29uZHMgZHVlIHRvIGluYWN0aXZpdHkuIFlvdXIgY2hhbmdlcyB3aWxsIG5vdCBiZSBzYXZlZC4gVG8gY29udGludWUgd29ya2luZyBvbiB0aGUgd2Vic2l0ZSwgY2xpY2sgIlN0YXkgU2lnbmVkIEluIiBiZWxvdy48L3A+" /><input name="ctl01$TemplateScripts$timeoutsoonstaysignintxt" type="hidden" id="timeoutsoonstaysignintxt" value="U3RheSBTaWduZWQgSW4=" /><input name="ctl01$TemplateScripts$timeoutsoonlogouttxt" type="hidden" id="timeoutsoonlogouttxt" value="U2lnbiBPdXQ=" /><input name="ctl01$TemplateScripts$stayLoggedInURL" type="hidden" id="stayLoggedInURL" /><input name="ctl01$TemplateScripts$logoutUrl" type="hidden" id="logoutUrl" value="aHR0cHM6Ly93d3cuaW1hZ2luZy5vcmcvYXNpY29tbW9uL2NvbnRyb2xzL3NoYXJlZC9mb3Jtc2F1dGhlbnRpY2F0aW9uL2xvZ2luLmFzcHg/U2Vzc2lvblRpbWVvdXQ9MSZSZXR1cm5Vcmw9JTJmSVNUJTJmSVNUJTJmQ29uZmVyZW5jZXMlMmZFSSUyZkVJMjAyMyUyZkNvbmZlcmVuY2UlMmZDX0lQQVMuYXNweCUzZg==" /> <!-- Bootstrap Modal --> <div id="BootstrapModal" class="modal fade" tabindex="-1" role="dialog" aria-label="Modal" aria-hidden="true"> <div id="BootstrapDocument" class="modal-dialog modal-xl" role="document"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"> <span aria-hidden="true">×</span> </button> </div> <div class="modal-body p-0 m-0"> <iframe id="ContentFrame" class="modal-content-iframe" width="100%" height="100px" frameborder="0"></iframe> </div> </div> </div> </div> <div id="ctl01_RadAjaxManager1SU"> <span id="ctl01_RadAjaxManager1" style="display:none;"></span> </div><div id="ctl01_WindowManager1" style="display:none;"> <div id="ctl01_GenericWindow" style="display:none;"> <div id="ctl01_GenericWindow_C" style="display:none;"> </div><input id="ctl01_GenericWindow_ClientState" name="ctl01_GenericWindow_ClientState" type="hidden" /> </div><div id="ctl01_ObjectBrowser" style="display:none;"> <div id="ctl01_ObjectBrowser_C" style="display:none;"> </div><input id="ctl01_ObjectBrowser_ClientState" name="ctl01_ObjectBrowser_ClientState" type="hidden" /> </div><div id="ctl01_ObjectBrowserDialog" style="display:none;"> <div id="ctl01_ObjectBrowserDialog_C" style="display:none;"> </div><input id="ctl01_ObjectBrowserDialog_ClientState" name="ctl01_ObjectBrowserDialog_ClientState" type="hidden" /> </div><div id="ctl01_WindowManager1_alerttemplate" style="display:none;"> <div class="rwDialogPopup radalert"> <div class="rwDialogText"> {1} </div> <div> <a onclick="$find('{0}').close(true);" class="rwPopupButton" href="javascript:void(0);"> <span class="rwOuterSpan"> <span class="rwInnerSpan">##LOC[OK]##</span> </span> </a> </div> </div> </div><div id="ctl01_WindowManager1_prompttemplate" style="display:none;"> <div class="rwDialogPopup radprompt"> <div class="rwDialogText"> {1} </div> <div> <script type="text/javascript"> function RadWindowprompt_detectenter(id, ev, input) { if (!ev) ev = window.event; if (ev.keyCode == 13) { var but = input.parentNode.parentNode.getElementsByTagName("A")[0]; if (but) { if (but.click) but.click(); else if (but.onclick) { but.focus(); var click = but.onclick; but.onclick = null; if (click) click.call(but); } } return false; } else return true; } </script> <input title="Enter Value" onkeydown="return RadWindowprompt_detectenter('{0}', event, this);" type="text" class="rwDialogInput" value="{2}" /> </div> <div> <a onclick="$find('{0}').close(this.parentNode.parentNode.getElementsByTagName('input')[0].value);" class="rwPopupButton" href="javascript:void(0);" ><span class="rwOuterSpan"><span class="rwInnerSpan">##LOC[OK]##</span></span></a> <a onclick="$find('{0}').close(null);" class="rwPopupButton" href="javascript:void(0);"><span class="rwOuterSpan"><span class="rwInnerSpan">##LOC[Cancel]##</span></span></a> </div> </div> </div><div id="ctl01_WindowManager1_confirmtemplate" style="display:none;"> <div class="rwDialogPopup radconfirm"> <div class="rwDialogText"> {1} </div> <div> <a onclick="$find('{0}').close(true);" class="rwPopupButton" href="javascript:void(0);" ><span class="rwOuterSpan"><span class="rwInnerSpan">##LOC[OK]##</span></span></a> <a onclick="$find('{0}').close(false);" class="rwPopupButton" href="javascript:void(0);"><span class="rwOuterSpan"><span class="rwInnerSpan">##LOC[Cancel]##</span></span></a> </div> </div> </div><input id="ctl01_WindowManager1_ClientState" name="ctl01_WindowManager1_ClientState" type="hidden" /> </div> <script type="text/javascript"> //<![CDATA[ var gCartCount; var cartDiv = $get("CartItemCount"); if (cartDiv != null){ jQuery.ajax({ type: "POST", url: gWebRoot + "/WebMethodUtilities.aspx/GetCartItemCount", data: "{}", contentType: "application/json; charset=utf-8", dataType: 'json', success: function(result) { if (result.d != '' && result.d != null) { gCartCount = result.d; if (gCartCount != null) { cartDiv.innerHTML = gCartCount; } } }, async: true }); } function CheckForChildren() { var contentRecordPageButtonPanelHasChildren = false; var contentRecordPageButtonPanel = jQuery('div.ContentRecordPageButtonPanel'); for (var i = 0, max = contentRecordPageButtonPanel.length; i < max; i++) { if (contentRecordPageButtonPanel[i].children.length > 0) { contentRecordPageButtonPanelHasChildren = true; break; } } if (!contentRecordPageButtonPanelHasChildren) { jQuery("Body").addClass("TemplateAreaEasyEditOn"); } } if (gIsEasyEditEnabled) CheckForChildren(); //]]> </script> <div class="aspNetHidden"> <input type="hidden" name="__VIEWSTATEGENERATOR" id="__VIEWSTATEGENERATOR" value="773E43D9" /> </div> <script type="text/javascript"> //<![CDATA[ if(typeof(window['ctl01_TemplateBody_ContentPage1_contentitemdisplaybasejsmanager'])==='undefined') { window['ctl01_TemplateBody_ContentPage1_contentitemdisplaybasejsmanager']=new Asi_WebRoot_AsiCommon_ContentManagement_DownloadDocument(); }if(typeof(window['ctl01_TemplateBody_ContentPage1_contentitemdisplaybasejsmanager'])!=='undefined') { window['ctl01_TemplateBody_ContentPage1_contentitemdisplaybasejsmanager'].OnLoad('#ctl01_TemplateBody_ContentPage1_downloadButton','#ctl01_TemplateBody_ContentPage1_HiddenDownloadPathField'); }if(typeof(window['ctl01_TemplateBody_ContentPage2_contentitemdisplaybasejsmanager'])==='undefined') { window['ctl01_TemplateBody_ContentPage2_contentitemdisplaybasejsmanager']=new Asi_WebRoot_AsiCommon_ContentManagement_DownloadDocument(); }if(typeof(window['ctl01_TemplateBody_ContentPage2_contentitemdisplaybasejsmanager'])!=='undefined') { window['ctl01_TemplateBody_ContentPage2_contentitemdisplaybasejsmanager'].OnLoad('#ctl01_TemplateBody_ContentPage2_downloadButton','#ctl01_TemplateBody_ContentPage2_HiddenDownloadPathField'); }if(typeof(window['ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_contentitemdisplaybasejsmanager'])==='undefined') { window['ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_contentitemdisplaybasejsmanager']=new Asi_WebRoot_AsiCommon_ContentManagement_DownloadDocument(); }if(typeof(window['ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_contentitemdisplaybasejsmanager'])!=='undefined') { window['ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_contentitemdisplaybasejsmanager'].OnLoad('#ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_downloadButton','#ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_HiddenDownloadPathField'); }__Document_Head_Init('https://www.imaging.org/NoCookies.html', '', false);window.__TsmHiddenField = $get('ctl01_ScriptManager1_TSM');NavigationList_NavControlId = '_rptWrapper';NavigationList_Init();var ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties = new SimpleSearchFieldProperties(); ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties.WatermarkClass = 'Watermarked'; ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties.WatermarkText = 'Keyword search'; ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties.SearchTarget = 'https://www.imaging.org/Search'; var ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties = new SimpleSearchFieldProperties(); ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties.WatermarkClass = 'Watermarked'; ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties.WatermarkText = 'Keyword search'; ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties.SearchTarget = 'https://www.imaging.org/Search'; NavigationList_NavControlId = 'ctl01_ciPrimaryNavigation_NavControl_NavMenu';NavigationList_Init();PageNavR_NavMenuClientID = 'ctl01_ciPrimaryNavigation_NavControl_NavMenu';var __wpmExportWarning='This Web Part Page has been personalized. As a result, one or more Web Part properties may contain confidential information. Make sure the properties contain information that is safe for others to read. After exporting this Web Part, view properties in the Web Part description file (.WebPart) by using a text editor such as Microsoft Notepad.';var __wpmCloseProviderWarning='You are about to close this Web Part. It is currently providing data to other Web Parts, and these connections will be deleted if this Web Part is closed. To close this Web Part, click OK. To keep this Web Part, click Cancel.';var __wpmDeleteWarning='You are about to permanently delete this Web Part. Are you sure you want to do this? To delete this Web Part, click OK. To keep this Web Part, click Cancel.';__wpm = new WebPartManager(); __wpm.overlayContainerElement = document.getElementById('ctl01_TemplateBody_WebPartManager1___Drag'); __wpm.personalizationScopeShared = false; var zoneElement; var zoneObject; zoneElement = document.getElementById('ctl01_TemplateBody_ContentPage1_WebPartZone1_Page1');if (zoneElement != null) {zoneObject = __wpm.AddZone(zoneElement, 'ctl01$TemplateBody$ContentPage1$WebPartZone1_Page1', true, false, 'Blue'); zoneObject.AddWebPart(document.getElementById('WebPart_gwpciCornerArt'), document.getElementById('WebPartTitle_gwpciCornerArt'), false); zoneObject.AddWebPart(document.getElementById('WebPart_gwpciSponsors_e01b3063842f4cd9a411c84775f8643c'), document.getElementById('WebPartTitle_gwpciSponsors_e01b3063842f4cd9a411c84775f8643c'), false); }zoneElement = document.getElementById('ctl01_TemplateBody_ContentPage2_WebPartZone2_Page1');if (zoneElement != null) {zoneObject = __wpm.AddZone(zoneElement, 'ctl01$TemplateBody$ContentPage2$WebPartZone2_Page1', true, false, 'Blue'); zoneObject.AddWebPart(document.getElementById('WebPart_gwpciConfCCO'), document.getElementById('WebPartTitle_gwpciConfCCO'), false); }if(typeof(window['ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_jsmanager'])=='undefined') { window['ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_jsmanager']=new Asi_Web_iParts_ContentCollectionOrganizer_ContentCollectionOrganizerDisplay('ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage', 'False'); }Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadMenu, {"_childListElementCssClass":"rmRootGroup rmToggleHandles rmHorizontal","_skin":"NaturalHeritageSites","attributes":{"Translate":"Yes","PerspectiveId":"80409b89-ae6d-45a9-a9d4-96d522ff2047","NavigationArea":"1","MaxDataBindDepth":"3"},"autoScrollMinimumWidth":100,"clientStateFieldID":"ctl01_ciPrimaryNavigation_NavControl_NavMenu_ClientState","collapseAnimation":"{\"duration\":450}","defaultGroupSettings":"{\"flow\":0,\"expandDirection\":2,\"offsetX\":0}","expandAnimation":"{\"duration\":450}","itemData":[],"showToggleHandle":true}, {"itemClicking":PageNavR_OnClientItemClicking,"itemClosed":PageNavR_OnItemClosed,"itemOpened":PageNavR_OnItemOpened,"load":PageNavR_OnClientLoadHandler}, null, $get("ctl01_ciPrimaryNavigation_NavControl_NavMenu")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadTabStrip, {"_autoPostBack":true,"_postBackOnClick":true,"_postBackReference":"__doPostBack(\u0027ctl01$TemplateBody$WebPartManager1$gwpciConfCCO$ciConfCCO$radTab_Top\u0027,\u0027arguments\u0027)","_scrollButtonsPosition":1,"_selectedIndex":1,"_skin":"MetroTouch","causesValidation":false,"clientStateFieldID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radTab_Top_ClientState","enableAriaSupport":true,"multiPageID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage","selectedIndexes":["1"],"tabData":[{"value":"1","_implPageViewID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_1","attributes":{"translate":"yes"}},{"value":"2","_implPageViewID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_2","attributes":{"translate":"yes"}},{"value":"3","_implPageViewID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_3","attributes":{"translate":"yes"}},{"value":"4","_implPageViewID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_4","attributes":{"translate":"yes"}}]}, null, null, $get("ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radTab_Top")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadMultiPage, {"clientStateFieldID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage_ClientState","pageViewData":[{"id":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_1"},{"id":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_2"},{"id":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_3"},{"id":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_4"}],"selectedIndex":1}, null, null, $get("ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadAjaxManager, {"_updatePanels":"","ajaxSettings":[],"clientEvents":{OnRequestStart:"",OnResponseEnd:""},"defaultLoadingPanelID":"AjaxStatusLoadingPanel","enableAJAX":true,"enableHistory":false,"links":[],"styles":[],"uniqueID":"ctl01$RadAjaxManager1","updatePanelsRenderMode":0}, null, null, $get("ctl01_RadAjaxManager1")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadWindow, {"_dockMode":false,"behaviors":117,"clientStateFieldID":"ctl01_GenericWindow_ClientState","enableAriaSupport":true,"formID":"aspnetForm","height":"550px","iconUrl":"","localization":"{\"Close\":\"Close\",\"Maximize\":\"Maximize\",\"Minimize\":\"Minimize\",\"Reload\":\"Reload\",\"PinOn\":\"PinOn\",\"PinOff\":\"PinOff\",\"Restore\":\"Restore\",\"OK\":\"OK\",\"Cancel\":\"Cancel\",\"Yes\":\"Yes\",\"No\":\"No\"}","minimizeIconUrl":"","modal":true,"name":"GenericWindow","shortcuts":"[[\u0027close\u0027,\u0027Esc\u0027]]","showContentDuringLoad":false,"skin":"MetroTouch","visibleStatusbar":false,"width":"800px"}, null, null, $get("ctl01_GenericWindow")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadWindow, {"_dockMode":false,"behaviors":117,"clientStateFieldID":"ctl01_ObjectBrowser_ClientState","enableAriaSupport":true,"formID":"aspnetForm","height":"550px","iconUrl":"","localization":"{\"Close\":\"Close\",\"Maximize\":\"Maximize\",\"Minimize\":\"Minimize\",\"Reload\":\"Reload\",\"PinOn\":\"PinOn\",\"PinOff\":\"PinOff\",\"Restore\":\"Restore\",\"OK\":\"OK\",\"Cancel\":\"Cancel\",\"Yes\":\"Yes\",\"No\":\"No\"}","minimizeIconUrl":"","modal":true,"name":"ObjectBrowser","shortcuts":"[[\u0027close\u0027,\u0027Esc\u0027]]","showContentDuringLoad":false,"skin":"MetroTouch","visibleStatusbar":false,"width":"760px"}, null, null, $get("ctl01_ObjectBrowser")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadWindow, {"_dockMode":false,"behaviors":117,"clientStateFieldID":"ctl01_ObjectBrowserDialog_ClientState","enableAriaSupport":true,"formID":"aspnetForm","height":"400px","iconUrl":"","localization":"{\"Close\":\"Close\",\"Maximize\":\"Maximize\",\"Minimize\":\"Minimize\",\"Reload\":\"Reload\",\"PinOn\":\"PinOn\",\"PinOff\":\"PinOff\",\"Restore\":\"Restore\",\"OK\":\"OK\",\"Cancel\":\"Cancel\",\"Yes\":\"Yes\",\"No\":\"No\"}","minimizeIconUrl":"","modal":true,"name":"ObjectBrowserDialog","shortcuts":"[[\u0027close\u0027,\u0027Esc\u0027]]","showContentDuringLoad":false,"skin":"MetroTouch","visibleStatusbar":false,"width":"600px"}, null, null, $get("ctl01_ObjectBrowserDialog")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadWindowManager, {"behaviors":117,"clientStateFieldID":"ctl01_WindowManager1_ClientState","enableAriaSupport":true,"formID":"aspnetForm","iconUrl":"","localization":"{\"Close\":\"Close\",\"Maximize\":\"Maximize\",\"Minimize\":\"Minimize\",\"Reload\":\"Reload\",\"PinOn\":\"PinOn\",\"PinOff\":\"PinOff\",\"Restore\":\"Restore\",\"OK\":\"OK\",\"Cancel\":\"Cancel\",\"Yes\":\"Yes\",\"No\":\"No\"}","minimizeIconUrl":"","name":"WindowManager1","shortcuts":"[[\u0027close\u0027,\u0027Esc\u0027]]","skin":"MetroTouch","windowControls":"['ctl01_GenericWindow','ctl01_ObjectBrowser','ctl01_ObjectBrowserDialog']"}, null, {"child":"ctl01_GenericWindow"}, $get("ctl01_WindowManager1")); }); //]]> </script> </form> <div id="fb-root"></div> </body> </html>