CINXE.COM
Human Vision and Electronic Imaging 2023 (HVEI)
<!DOCTYPE html> <html id="MainHtml" lang="en" class="html-main"> <head id="ctl01_Head1"><script type="text/javascript"> !function(v,y,T){var S=v.location,k="script",D="instrumentationKey",C="ingestionendpoint",I="disableExceptionTracking",E="ai.device.",b="toLowerCase",w=(D[b](),"crossOrigin"),N="POST",e="appInsightsSDK",t=T.name||"appInsights",n=((T.name||v[e])&&(v[e]=t),v[t]||function(l){var u=!1,d=!1,g={initialize:!0,queue:[],sv:"6",version:2,config:l};function m(e,t){var n={ },a="Browser";return n[E+"id"]=a[b](),n[E+"type"]=a,n["ai.operation.name"]=S&&S.pathname||"_unknown_",n["ai.internal.sdkVersion"]="javascript:snippet_"+(g.sv||g.version),{time:(a=new Date).getUTCFullYear()+"-"+i(1+a.getUTCMonth())+"-"+i(a.getUTCDate())+"T"+i(a.getUTCHours())+":"+i(a.getUTCMinutes())+":"+i(a.getUTCSeconds())+"."+(a.getUTCMilliseconds()/1e3).toFixed(3).slice(2,5)+"Z",iKey:e,name:"Microsoft.ApplicationInsights."+e.replace(/-/g,"")+"."+t,sampleRate:100,tags:n,data:{baseData:{ver:2}}};function i(e){e = "" + e;return 1===e.length?"0"+e:e}}var e,n,f=l.url||T.src;function a(e){var t,n,a,i,o,s,r,c,p;u=!0,g.queue=[],d||(d=!0,i=f,r=(c=function(){var e,t={ },n=l.connectionString;if(n)for(var a=n.split(";"),i=0;i<a.length;i++){var o=a[i].split("=");2===o.length&&(t[o[0][b]()]=o[1])}return t[C]||(t[C]="https://"+((e=(n=t.endpointsuffix)?t.location:null)?e+".":"")+"dc."+(n||"services.visualstudio.com")),t}()).instrumentationkey||l[D]||"",c=(c=c[C])?c+"/v2/track":l.endpointUrl,(p=[]).push((t="SDK LOAD Failure: Failed to load Application Insights SDK script (See stack for details)",n=i,o=c,(s=(a=m(r,"Exception")).data).baseType="ExceptionData",s.baseData.exceptions=[{typeName:"SDKLoadFailed",message:t.replace(/\./g,"-"),hasFullStack:!1,stack:t+"\nSnippet failed to load ["+n+"] -- Telemetry is disabled\nHelp Link: https://go.microsoft.com/fwlink/?linkid=2128109\nHost: "+(S&&S.pathname||"_unknown_")+"\nEndpoint: "+o,parsedStack:[]}],a)),p.push((s=i,t=c,(o=(n=m(r,"Message")).data).baseType="MessageData",(a=o.baseData).message='AI (Internal): 99 message:"'+("SDK LOAD Failure: Failed to load Application Insights SDK script (See stack for details) ("+s+")").replace(/\"/g,"")+'"',a.properties={endpoint:t},n)),i=p,r=c,JSON&&((o=v.fetch)&&!T.useXhr?o(r,{method:N,body:JSON.stringify(i),mode:"cors"}):XMLHttpRequest&&((s=new XMLHttpRequest).open(N,r),s.setRequestHeader("Content-type","application/json"),s.send(JSON.stringify(i)))))}function i(e,t){d || setTimeout(function () { !t && g.core || a() }, 500)}f&&((n=y.createElement(k)).src=f,!(o=T[w])&&""!==o||"undefined"==n[w]||(n[w]=o),n.onload=i,n.onerror=a,n.onreadystatechange=function(e,t){"loaded" !== n.readyState && "complete" !== n.readyState || i(0, t)},e=n,T.ld<0?y.getElementsByTagName("head")[0].appendChild(e):setTimeout(function(){y.getElementsByTagName(k)[0].parentNode.appendChild(e)},T.ld||0));try{g.cookie = y.cookie}catch(h){ }function t(e){for(;e.length;)!function(t){g[t] = function () { var e = arguments; u || g.queue.push(function () { g[t].apply(g, e) }) }}(e.pop())}var s,r,o="track",c="TrackPage",p="TrackEvent",o=(t([o+"Event",o+"PageView",o+"Exception",o+"Trace",o+"DependencyData",o+"Metric",o+"PageViewPerformance","start"+c,"stop"+c,"start"+p,"stop"+p,"addTelemetryInitializer","setAuthenticatedUserContext","clearAuthenticatedUserContext","flush"]),g.SeverityLevel={Verbose:0,Information:1,Warning:2,Error:3,Critical:4},(l.extensionConfig||{ }).ApplicationInsightsAnalytics||{ });return!0!==l[I]&&!0!==o[I]&&(t(["_"+(s="onerror")]),r=v[s],v[s]=function(e,t,n,a,i){var o=r&&r(e,t,n,a,i);return!0!==o&&g["_"+s]({message:e,url:t,lineNumber:n,columnNumber:a,error:i,evt:v.event}),o},l.autoExceptionInstrumented=!0),g}(T.cfg));function a(){T.onInit && T.onInit(n)}(v[t]=n).queue&&0===n.queue.length?(n.queue.push(a),n.trackPageView({ })):a()}(window,document,{ src: "https://js.monitor.azure.com/scripts/b/ai.2.min.js", crossOrigin: "anonymous", onInit: function (sdk) { window.appInsights.context.telemetryTrace.traceID = "3f2c84756b12c0de4532b0b4ff4c4ebb"; sdk.addTelemetryInitializer(function (envelope) { envelope.data = envelope.data || {}; envelope.data.TenantId = "SIST"; }); }, cfg: { // Application Insights Configuration connectionString: "InstrumentationKey=96392a35-9d5a-4f7c-adcb-1a186bd1a320;IngestionEndpoint=https://eastus-8.in.applicationinsights.azure.com/;LiveEndpoint=https://eastus.livediagnostics.monitor.azure.com/", autoTrackPageVisitTime:true, enableRequestHeaderTracking: true, disableCookiesUsage: true }}); </script><meta charset="UTF-8" /><title> Human Vision and Electronic Imaging 2023 (HVEI) </title> <meta name="Description" content="The HVEI conference explores the role of human perception and cognition in the design, analysis, and use of electronic media systems." /> <meta property="og:description" content="The HVEI conference explores the role of human perception and cognition in the design, analysis, and use of electronic media systems." /> <link rel="stylesheet" href="/Assets/css/10-UltraWaveResponsive.css?v=638750753400000000"><!–– Begin Google Script ––> <script type="text/javascript"> var _gaq = _gaq || []; _gaq.push(['_setAccount', 'UA-12227505-1']); _gaq.push(['_trackPageview']); (function() { var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); })(); </script> <!–– End Google Script ––> <!–– Begin Informz Script ––> <script> //------------------------------------------------------- var z_account = "EDE8547A-6395-48C6-AF0F-9F17F3CE9387"; var z_collector = "ist.informz.net"; var z_cookieDomain = ".imaging.org"; //------------------------------------------------------- (function (e, o, n, r, t, a, s) { e[t] || (e.GlobalSnowplowNamespace = e.GlobalSnowplowNamespace || [], e.GlobalSnowplowNamespace.push(t), e[t] = function () { (e[t].q = e[t].q || []).push(arguments) }, e[t].q = e[t].q || [], a = o.createElement(n), s = o.getElementsByTagName(n)[0], a.async = 1, a.src = r, s.parentNode.insertBefore(a, s)) }(window, document, "script", "https://"+z_collector+"/web_trk/sp.js", "informz_trk")), informz_trk("newTracker", "infz", z_collector + "/web_trk/collector/", { appId: z_account, cookieDomain: z_cookieDomain }), informz_trk("setUserIdFromLocation", "_zs"), informz_trk("enableActivityTracking", 30, 15); informz_trk("trackPageView", null); </script> <!–– End Informz Script ––> <link rel="shortcut icon" href="/images/ist-logo-white.png" /> <link rel="canonical" href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_HVEI.aspx" /> <script type="text/javascript"> //<![CDATA[ var gDocumentTitle = document.title; var gPostBackFormObject = null; var gWindowOnLoad = new Array(); var gPostBackFormProcessSubmit = true; var gWebRoot = ''; var gWebSiteRoot = 'https://www.imaging.org'; var gPageTheme = 'IST-GlacierBay-Responsive'; var gWebsiteKey = '6d978a6f-475d-46cc-bcf2-7a9e3d5f8f82'; var gHKey = ''; var gIsPostBack = false; var gDefaultConfirmMessage = 'Continue without saving changes?'; var gIsEasyEditEnabled = false; var gMasterPage = '/templates/masterpages/wst-ist_naturalheritagesitesresponsive.master'; var gCurrentCultureCode = 'en-US'; if (gWebsiteKey != 'fbdf17a3-cae7-4943-b1eb-71b9c0dd65d2' && gWebsiteKey != 'fad2fd17-7e27-4c96-babe-3291ecde4822') { gIsEasyEditEnabled = false; } //]]> </script><script src="/AsiCommon/Scripts/Modernizr/Modernizr.min.js" type="text/javascript"></script><meta name="viewport" content="width=device-width, initial-scale=1.0" /> <script type="text/javascript"> // Add a class to the <html> tag of browsers that do not support Media Queries var mqSupported = Modernizr.mq('only all'); if (!mqSupported) { document.documentElement.className += ' no-mqs'; } </script> <link href="../../../../../../App_Themes/IST-GlacierBay-Responsive/99-GlacierBay_Responsive.css" type="text/css" rel="stylesheet" /><script src="/AsiCommon/Scripts/Jquery/Jquery.min.js" type="text/javascript" ></script><script src="/AsiCommon/Scripts/Jquery/jquery-migrate.min.js" type="text/javascript" ></script><script src="/AsiCommon/Scripts/Jquery/jquery-ui.min.js" type="text/javascript" ></script><style type="text/css">@import url("//fonts.googleapis.com/css?family=Raleway:200"); @import url('https://fonts.googleapis.com/css2?family=Open+Sans:ital,wght@0,300;0,400;0,600;0,700;0,800;1,300;1,400;1,600;1,700;1,800&display=swap'); @import url('https://fonts.googleapis.com/css2?family=Montserrat:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,800;1,900&display=swap'); @import url('https://fonts.googleapis.com/css2?family=Jost:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,800;1,900&display=swap'); @import url('https://fonts.googleapis.com/css2?family=Work+Sans:ital,wght@0,100;0,200;0,300;0,400;0,500;0,550;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,550;1,600;1,700;1,800;1,900&display=swap'); :root { --header-font-family: 'Jost', sans-serif; --body-font-family: 'Montserrat', sans-serif; } body { font-family: 'Montserrat', sans-serif; color: #0c0c0c; font-size: 14px; line-height:1.4; } h1 { color: #737373; background-color: white; font-family: 'Jost', sans-serif; font-size: 24px; font-weight: 500; text-transform: none; text-align: left; margin-top: 10px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 0px; padding-bottom: 0px; padding-left: 0; padding-right: 0; line-height: 1em; } h2 { color: #ffffff; background-color: #145098; font-family: 'Open Sans', sans-serif; font-size: 18px; text-transform: uppercase; font-weight: 400; display: block; padding-top: 2px; padding-bottom: 2px; padding-left: .33em; padding-right: 3px; margin-top: 1em; margin-bottom: .5em; margin-left: 0px; margin-right: 0px; margin-inline-start: 0px; margin-inline-end: 0px; border-bottom: 1px none #145098; } h3, h3.rightlink { color: #e68600; font-size: 18px; font-weight: 400; font-family: 'Jost', sans-serif; text-transform: uppercase; display: block; border-bottom: 0px solid #ddd; margin: 20px 20px 20px 20px; margin-block-start: 0em; margin-block-end: 0em; margin-inline-start: 0px; margin-inline-end: 0px; } h4, .h4, .SectionLabel { color: #000; font-size: 18px; font-weight: 400; font-family: 'Jost', sans-serif; text-transform: none; display: block; } h5 { color: #e68600; font-size: 16px; font-weight: 400; font-family: 'Jost', sans-serif; text-transform: none; display: block; border-bottom: #e68600 1px solid; } h6 { color: #000; font-size: 16px; font-weight: normal; font-family: 'Jost', sans-serif; text-transform: none; display: block; } a, .Link, .RadGrid.RadGrid .rgRow a, .RadGrid.RadGrid .rgAltRow a, .RadGrid.RadGrid .rgEditForm a, .k-grid.k-grid a, .RadToolTip.RadToolTip a { color: #0032a0; text-decoration: none; } a:visited, .Link:visited, .RadGrid.RadGrid .rgRow a:visited, .RadGrid.RadGrid .rgAltRow a:visited, .RadGrid.RadGrid .rgEditForm a:visited, .k-grid.k-grid a:visited, .RadToolTip.RadToolTip a:visited { color: #0032a0; text-decoration: none; } a:hover, a:focus, a:active, .Link:hover, .Link:focus, .Link:active, .RadGrid.RadGrid .rgRow a:hover, .RadGrid.RadGrid .rgRow a:focus, .RadGrid.RadGrid .rgRow a:active, .RadGrid.RadGrid .rgAltRow a:hover, .RadGrid.RadGrid .rgAltRow a:focus, .RadGrid.RadGrid .rgAltRow a:active, .RadGrid.RadGrid .rgEditForm a:hover, .RadGrid.RadGrid .rgEditForm a:focus, .RadGrid.RadGrid .rgEditForm a:active, .k-grid.k-grid a:hover, .k-grid.k-grid a:focus, .k-grid.k-grid a:active, .RadToolTip.RadToolTip a:hover, .RadToolTip.RadToolTip a:focus, .RadToolTip.RadToolTip a:active { color: #7f7f7f; text-decoration: underline ; } p { font-size: 14px; font-weight: 400; } .PrimaryButton, .UsePrimaryButton .TextButton, .RadGrid input.PrimaryButton { background-color: #9a9a9a; border-color: transparent; } .ISTButton { border: none; border-radius: 8px; background-color: #c00000; color: #ffffff; padding: 6px 12px; text-align: center; text-decoration: none; display: inline-block; font-size: 14px; margin: 4px 2px; cursor: pointer; } .ISTButton:hover { background-color: #9a9a9a; color: #ffffff; text-decoration: none; } .ISTButton:visited { color: #ffffff; text-decoration: none; } body:not(.Wrapper-HomePage) #hd, body:not(.Wrapper-HomePage) .InternalHeaderArea { height: 140px; background: #fff; } body:not(.Wrapper-HomePage) #hd, body:not(.Wrapper-HomePage) .InternalHeaderArea { min-height: 140px; height: 140px; box-shadow: 0 0.3em 0.6em #888; } body:not(.Wrapper-HomePage) .header-top-container #masterLogoArea { top: 19px; } body:not(.Wrapper-HomePage) .primary-navigation-area { margin-top: 10px; width: 100%; margin-left: 0px; } div#ctl01_ciPrimaryNavigation_NavControl_NavMenu { margin-top: -10px; margin-bottom: -6px; } body:not(.Wrapper-HomePage) .header-bottom-container { min-height: 40px; margin-top: 35px; top: 85px; background-color: #004a80; } .navbar-collapse, .searchbar-collapse { padding-left: 0; padding-right: 0; border: none; background: none; padding-top: 0px; } .RadMenu.RadMenu_NaturalHeritageSites .rmText, .RadMenu.RadMenu_NaturalHeritageSites .rmHorizontal .rmText, .RadMenu.RadMenu_NaturalHeritageSites .rmSlide .rmText { padding: 10px 12px; } .RadMenu.RadMenu_NaturalHeritageSites .rmSlide ul.rmGroup { padding-right:0px; } .RadMenu.MegaDropDownMenuOuter ul.rmLevel1 div.rmSlide, .RadMenu.MegaDropDownMenuOuter ul.rmLevel1 ul div.rmSlide { position:absolute; } .RadMenu.MegaDropDownMenuOuter ul.rmVertical.rmGroup.rmLevel1 li ul.rmVertical.rmGroup.rmLevel2 { width:200px; display:none; position:absolute; visibility:hidden; left:-108px; } .RadMenu.MegaDropDownMenuOuter ul.rmVertical.rmGroup.rmLevel1 li, ul.rmVertical.rmGroup.rmLevel2 li { font-size:14px; line-height:16px; padding: 1px 0px; } .RadMenu.MegaDropDownMenuOuter ul.rmLevel2 > .rmItem { width:200px; } .RadMenu_NaturalHeritageSites .rmSlide .rmExpandRight, .RadMenu_NaturalHeritageSites .rmSlide .rmGroup a.rmLink.rmExpandRight { background-image: url(/images/ResponsiveNavMed.png); background-position: 100% 0px; background-position-y:center; margin-right:10px; } .RadMenu.RadMenu_NaturalHeritageSites .rmTwoLevels .rmLevel1 > .rmItem > .rmLink, .RadMenu.RadMenu_NaturalHeritageSites .rmTwoLevels .rmLevel1 > .rmItem.rmDisabled > .rmLink:hover, .RadMenu.RadMenu_NaturalHeritageSites .rmTwoLevels .rmLevel1 > .rmItem.rmDisabled > .rmLink:focus { text-transform: capitalize; } .RadMenu.RadMenu_NaturalHeritageSites .rmRootGroup > .rmItem > .rmLink { color: #fff; font-family: Arial, Helvetica, sans-serif; font-size: 16px; font-weight: 200; letter-spacing: 0px; text-transform: uppercase; } .RadMenu.RadMenu_NaturalHeritageSites .rmSlide .rmText { padding-right: 0px; } ul.rmVertical.rmGroup.rmLevel1 { min-width:340px; } .RadMenu.RadMenu_NaturalHeritageSites .rmRootGroup > .rmItem > .rmLink:hover, .RadMenu.RadMenu_NaturalHeritageSites .rmTwoLevels .rmLevel1 > .rmItem.rmDisabled > .rmLink:focus { color: #004a80; } .RadMenu_NaturalHeritageSites .rmSlide .rmLink:hover, .RadMenu_NaturalHeritageSites .rmSlide .rmLink:focus, .RadMenu_NaturalHeritageSites .rmSlide .rmLink.rmFocused, .RadMenu_NaturalHeritageSites .rmSlide .rmLevel1 > .rmItem > .rmLink:hover, .RadMenu_NaturalHeritageSites .rmSlide .rmLevel1 > .rmItem > .rmLink:focus, .RadMenu_NaturalHeritageSites .rmSlide .rmLevel1 > .rmItem > .rmLink.rmFocused { background-color: transparent; color: #004a80; font-weight: bold; } ul.rmRootGroup.rmToggleHandles.rmHorizontal { height: 45px; margin: 0px; } nav#asi_BreadCrumbNav { font-family: arial, sans-serif; font-size: 13px; } .panel-title > a, .panel-title > a:link, .panel-title > a:hover, .panel-title > a:focus, .panel-title > a:active, .panel-title > a:visited { color: #145098; text-decoration: none; font-family: Arial, sans-serif; font-weight: normal; font-size: 24px; background: none; } h2.panel-title { background: none; color: #145098 } .event h2, .h2, .PanelTitle { background: none; } .HomeLogin h2 { background-color: #fff; text-transform: none; font-family: arial,sans-serif; padding-left: 0px; } .RadTabStrip .rtsUL, .RadTabStripVertical .rtsUL { font-family: arial, sans-serif; font-size: 12px; font-weight: bold; } .RadTabStrip_MetroTouch.RadTabStrip_MetroTouch .rtsLevel1 .rtsSelected { background-color: #004a80 !important; border-color: #ccc!important; color: white; text-decoration: none !important; border-top-left-radius: 10px; border-top-right-radius: 10px; } .RadTabStrip_MetroTouch .rtsLevel1 .rtsLink { margin: 0 0 0 5px; border: 1px solid #e0dfdf; background-color: #f9f9f9; border-top-left-radius: 10px; border-top-right-radius: 10px; margin-left:5px; } div#ctl01_TemplateBody_WebPartManager1_gwpciMAINContentCollectionOrganizerCommon2_ciMAINContentCollectionOrganizerCommon2_radPage { font-family: arial, sans-serif; font-size: 14px; } .obo-panel { margin-top: 50px; } .navbar-header .NavigationLink { color: #004a80; padding-left: 0px; } .UtilityNavigation .account-menu > li > a, .UtilityNavigation .account-menu > li > a:hover, .UtilityNavigation .account-menu > li > a:visited, .UtilityNavigation .account-menu > li > a:active { color: #004a80; } .account-menu .profile-picture-wrapper { position: relative; display: none; width: 34px; } .HeaderSocial { float: left; margin-top: -75px; margin-left: -50px; } li#ctl01_ciUtilityNavigation_ctl01_OBOToggleLI { display: none; } .header-top-container .navbar-header { position: absolute; right: 0; top: 10px; } .header-search .search-field { padding: 0 0 0 7px; padding-left: 34px !important; background-position: 4px -15px !important; background-color: #fff; border: #004a80 .25px solid; height: 25px; background-size: 16px; } a.ste-toggle.off { margin-top: -4px; background-color: #fff; } .UtilitySection.UtilityAccountArea { font-family: Arial, san-serif; } .UtilitySection.UtilitySearch { float:right; margin-top:-8px; } .obo-label { font-weight: bold; font-family: arial, sans-serif; font-size: 12px; } .obo-actions { display: inline-block; font-family: arial, sans-serif; font-size: 12px; } .ISTLeftNav_BigButtonSingle { background-color: #eeeeee; font-family: jost, sans-serif; font-size: 13px; text-transform: uppercase; color: #0c0c0c; margin-top: 10px; } .ISTLeftNav_BigButtonSingle a { color: #0c0c0c; } .ISTLeftNav_BigButtonSingle a:visited { color: #0c0c0c; } .ISTLeftNav_BigButtonSingle li a:hover{ color: #004a80; font-weight: bold; text-decoration: none; } #masterContentArea { margin-top: 50px; } .footer-content { background-color: #fff; color: #004a80; text-align: center; border-top: #004a80 1px solid; padding-top:40px; } .footer-nav { float: left; position: absolute; width: 45%; } .footer-col { float: left; width: 30%; text-align: left; } .footer-col ul { list-style: none; text-transform: none; padding-left: 0; } .footer-col li { padding-top: 10px; font-size: 15px; font-family: 'Jost'; } .footer-col a, .footer-col a:link { text-decoration: none; color: #004a80; } .footer-col a:hover { text-decoration: underline; color: #191919; } .FooterLogo { margin-left: 40px; } .FooterLogo img { margin-right: 0px; width: 120px; } .FooterSocial { text-align: left; padding: 25px 0 0 0; height: 95px; } .FooterSocial p { margin: 13px 20px 0px 0; float: left; } .FooterTop { float: right; width: 20%; } .FooterBottom { font-family: Arial, Sans-Serif; margin-top: 0px; } .FooterBottom p { font-size: 11px; } .FooterBottom a, .FooterBottom a:link { text-decoration: none; color: #004a80; } .FooterBottom a:hover { text-decoration: underline; color: #191919; } .footer-nav-copyright { background-color: #fff; color: #004a80; } .backToTop { bottom: 0px; background: transparent url(/images/icons/BackToTop.png) 3px 0 no-repeat !important; background-color: #fff !important; } @media (max-width: 992px) { body:not(.Wrapper-HomePage) .primary-navigation-area { margin: 0px; } div#ste_container_ciUtilityNavigation { float: left; } div#ste_container_ciNewUtilityNavigationCommon2 { float: right; } .navbar-header .NavigationLink { color: #004a80; padding-left: 10px; } .OnBehalfOfContainer { width: 100%; float: right; } .header-container .nav-auxiliary { margin-top: -35px; position: relative; } .header-container .UtilityNavigation.nav-auxiliary { margin-bottom: 10px; margin-top: -10px; } .ClearFix.header-bottom-container { background-color: #fff; } .header-logo-container { height: 100px; } .HeaderSocial { display:none; } body:not(.Wrapper-HomePage) #hd, body:not(.Wrapper-HomePage) .InternalHeaderArea { height: 0px; min-height: 1px; } body:not(.Wrapper-HomePage) .header-bottom-container { min-height: 40px; margin-top: 0px; top: 65px; background-color: #fff; } div#mainContentWrapper { margin-top: 30px; } #masterContentArea { margin-top:30px; } .RadMenu.RadMenu_NaturalHeritageSites .rmRootGroup > .rmItem > .rmLink { color: #004a80; } .RadMenu.RadMenu_NaturalHeritageSites .rmRootGroup > .rmItem > .rmLink:expanded { color: #004a80; } .RadMenu.RadMenu_NaturalHeritageSites .rmRootGroup > .rmItem > .rmLink:hover { color: #004a80; } .RadMenu_NaturalHeritageSites .rmSlide .rmExpandRight, .RadMenu_NaturalHeritageSites .rmSlide .rmGroup a.rmLink.rmExpandRight { background-image: none; } nav#asi_BreadCrumbNav { margin-left: 0px; } li#ctl01_ciUtilityNavigation_ctl01_SignInLI { padding-right: 10px; } #menuv-container { width: 100%; } .about .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 95%; margin-bottom: 10px; } .about .row { position: relative; } .about .col-sm-9 { margin-bottom: 250px; } .member .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 95%; margin-bottom: 10px; } .member .row { position: relative; } .member .col-sm-9 { margin-bottom: 100px; } .honors .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 95%; margin-bottom: 10px; } .honors .row { position: relative; } .honors .col-sm-9 { margin-bottom: 600px; } .pubs .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 95%; margin-bottom: 10px; } .pubs .row { position: relative; } .pubs .col-sm-9 { margin-bottom: 400px; } .standards .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 95%; margin-bottom: 10px; } .standards .row { position: relative; } .standards .col-sm-9 { margin-bottom: 400px; } .JournPI .ISTLeftNav_BigButtonSingle { float: right; position: absolute; bottom: 0; width: 88%; margin-bottom: 10px; } .JournPI .row { position: relative; } .JournPI .col-sm-9 { margin-bottom: 700px; } } @media (max-width: 767px) { .footer-col { float: left; width: 100%; text-align: left; } .footer-col ul { padding-bottom: 10px; margin-left: 5px; } .footer-col li { padding-top: 0px; } .footer-content { background-color: #fff; color: #004a80; text-align: center; border-top: #004a80 1px solid; padding-top: 40px; float: left; position: absolute; width: 100%; } .footer-nav { float: left; position: relative; width: 100%; } .FooterTop { float: left; width: 100%; margin: 30px 0 20px 0; } .FooterLogo { margin-left: 0px; text-align: left; float: left; } .FooterLogo img { margin-right: 0px; width: 100px; } .FooterSocial { padding: 0; height: 95px; } .FooterSocial p { margin: 13px 0px 0px 40px; float: right; } .FooterSocialText { float: right; width: 60%; text-align: right; } .FooterSocialImg { float: right; width: 60%; text-align: right; } .FooterCopyright { padding: 0px; text-align: left; } .FooterPolicies { float: left; } } </style><link href="/WebResource.axd?d=vsKpXOsoEwDUfVug9KOqSkSpSXVaNBxqYq0TMVVWWh_D58LwxadHoTawtKs_sAzbrfD81FIKiaLMHfE8C-_1biM4aI5bWvBCogQPX0gqgkMOYJs3nRZiavvQhAxxuDxC0&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><link href="/WebResource.axd?d=HkmiNSkKJcp9zYO7iQ4LQYfcigDVRny_bpX66yTnidFPn8M0fokYr_cMbxH1xEMngudkfC-ZQGay1WGMDdRErxNZUgamrg-I8VpzCrtaoIMj66mt5nON1ewFVLY5L_q3Yce4_RXJFjxRuseRI8WsKg2&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><link href="/WebResource.axd?d=501uTrfxke03AuVmqnFlcktdinHr9iVATciqFqhrqcOKTkomCd2QsHcrpjM_-xP-CqLQMjvRfrLOklibzCo45kW9flZoyUMNvcu5Mzw18RmO3Ap9KCdkr0PDfDKg4UD2xehjr4iqZqrdKJTFAyVnpJ_TSCRLyoWqgAqgqCSum1w1&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><link href="/WebResource.axd?d=lsHxUYuoKn-ifTgGVgyNZQIZvqADQrnISEv67X7zPJZKRrk0V0-vW7xMMI_xGUYrNRgY0V24wprRwBJ4JAXB-PL_gpUMjcMBvixi6LjgrUFLgqzRtutTUA_SoLdglMWBbO_rKbCsgkpfQKzxCPGYpg2&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><link href="/WebResource.axd?d=QKX1AO1vX8ebWKfbb4eOTEvhmgWml__oci93TQX-3srWTTq0hFmFYaJFDB9UKx-GfZqFRMKXn0jqsLJL5ywha6wcGEkFGP5adK9HNSwDXjbr9acE3_Dn_vJGCZ6A4DPI0&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><link href="/WebResource.axd?d=_s8C6V0hVmZ50IR7zUlbBIx5hUAxbrT46F-Qe8ZhKIJt5wIqEePTdOgtTXhtJoRp9My5-D7o68asFZMfXdfLr664FIRyOvnOI7-P_C5gb1tC239yby2f-BLyA5vIZfs93zIzosPTIzGVgEB2TKXRYc6QuLCwX1Z_PNYRI9JRlaA1&t=638584690780000000" type="text/css" rel="stylesheet" class="Telerik_stylesheet" /><meta property="og:title" content="Human Vision and Electronic Imaging 2023 (HVEI)" /><style type="text/css"> .ctl01_TemplateBody_ContentPage1_WebPartZone1_Page1_0 { border-color:Black;border-width:1px;border-style:Solid; } .ctl01_TemplateBody_ContentPage2_WebPartZone2_Page1_0 { border-color:Black;border-width:1px;border-style:Solid; } </style></head> <body id="MainBody"> <a href="#mainContentWrapper" class="screen-reader-text show-on-focus">Skip to main content</a> <form method="post" action="/IST/IST/Conferences/EI/EI2023/Conference/C_HVEI.aspx?" id="aspnetForm" class="form-main" novalidate=""> <div class="aspNetHidden"> <input type="hidden" name="__WPPS" id="__WPPS" value="s" /> <input type="hidden" name="__CTRLKEY" id="__CTRLKEY" value="" /> <input type="hidden" name="__SHIFTKEY" id="__SHIFTKEY" value="" /> <input type="hidden" name="ctl01_ScriptManager1_TSM" id="ctl01_ScriptManager1_TSM" value="" /> <input type="hidden" name="PageInstanceKey" id="PageInstanceKey" value="96f8622a-7217-4b0b-9a02-170a343db60a" /> <input type="hidden" name="__RequestVerificationToken" id="__RequestVerificationToken" value="KlCywQZHQVL8SZBOg8ahbqW_H1MAe-tzEpEvJ-naXhSUBbktygUWMweN77r0lqspY_yq9qWLK22wUSqA3nbBIniNO45mwJL8tHVHF4sJd2k1" /> <input type="hidden" name="TemplateUserMessagesID" id="TemplateUserMessagesID" value="ctl01_TemplateUserMessages_ctl00_Messages" /> <input type="hidden" name="PageIsDirty" id="PageIsDirty" value="false" /> <input type="hidden" name="IsControlPostBack" id="IsControlPostBack" value="1" /> <input type="hidden" name="__EVENTTARGET" id="__EVENTTARGET" value="" /> <input type="hidden" name="__EVENTARGUMENT" id="__EVENTARGUMENT" value="" /> <input type="hidden" name="NavMenuClientID" id="NavMenuClientID" value="ctl01_ciPrimaryNavigation_NavControl_NavMenu" /> <input type="hidden" name="AtomObjectPrimaryKeyctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_TaggedListRepeater_ctl00_ctl00" id="AtomObjectPrimaryKeyctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_TaggedListRepeater_ctl00_ctl00" value="/wEUKwABKClYU3lzdGVtLkd1aWQsIG1zY29ybGliLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49Yjc3YTVjNTYxOTM0ZTA4OSQ4ZDg3MWRmOS1kOWE4LTRiNmEtOTJlOS0xOTNjNTczYzQ0OTQ=" /> <input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="pCS5gHm97+FFs26mveLgrJFmPmslCVhsrhrPI9W1LFSbiWhmUom3glcw+kk7GZ/pkoBCWrd8cOmlRwQIsiOd0sdA/jOlqEMEPbBHxdEWYwhhJFliLkNMfGKGuPCGK2Z35w5wGCU472LGig35k0IFEBAe/w5nt9I0WXj/lIjwGbOmJyArIiJRpkyTUMqdgG0w/TmC8p+8A8t0QLjd2BLXWJZwNJJs+p4EMNVbuk9h3aUAfhGfbQyLlBOF+9nbdunhJysdMihMg/9Oxrybs7qRr+aGE27HUEpU3ry0m3U/jBi36YWGWBBLt1LrVpoOcZsBc/ylTRkyrhdiKKJ+MhNjUKaeiKcTLltHi/bCOrgJK00aZGEyWuB+5v2LS/xEVk3z43LAIhdcNa8dsJt2XVLy99dhUaHxLP7aeeU8dSUBv850JdzGbk3XqKJBDkXYIlPuaf1iEBzekp4TZ25AdpNkgMsaGm+9bfib7Su1sF5iRZBeVKwSsIH96Jo+mJx9tJ5N4Y1OGB8N0DcTpPzPwXXFnaTOsesZT8i4N1eeGm+cV8/vmeRTyppmfG2prodbwW2ChcSU2f5xJIiWaZXqqrIGSonGKScuQKIsKB0IamqHUeuCuonS+1f1h6Nqkvkego9ob5LAijSdG1C3o6fOnV54Cw==" /> </div> <script type="text/javascript"> //<![CDATA[ var theForm = document.forms['aspnetForm']; function __doPostBack(eventTarget, eventArgument) { if (!theForm.onsubmit || (theForm.onsubmit() != false)) { theForm.__EVENTTARGET.value = eventTarget; theForm.__EVENTARGUMENT.value = eventArgument; theForm.submit(); } } //]]> </script> <script src="/WebResource.axd?d=pynGkmcFUV13He1Qd6_TZLwPOoJkZw-ail94-Z1e8o9JKy5XzVIKyzXBuAdScQY_NLwimceAOofHK9IKX_cayw2&t=638628063619783110" type="text/javascript"></script> <script src="/ScriptResource.axd?d=xtDSkLnUefsDkXzy1V9OGa6OCagMXmkl_on3a8y8NGDPptd6ziBM_mXXpJaW6g3bAnmjPrqbW2lbph1KssEF59UbMuFmudeQGDDUAWBHVZTdtlKFn7LFGi9mzCfs5F5gse9haA3GMbufpptHVBPI0A2&t=539c0818" type="text/javascript"></script> <script src="/ScriptResource.axd?d=p_deTm7itTQEj4Yl_o9V5132R7IOH_FNkOrNg0Wu4z-0Jjzbg8oBmdxEdRBkchw1tEKKp1WoH6jZO5LAJJGzCkc-KY5mVc68zRsR3i7g0WA6qQgfRzQnMZfrhMdR7PmIqa76dpt6oxpcUJLoqJ_Ouw2&t=ffffffffc7a8e318" type="text/javascript"></script> <script type="text/javascript"> //<![CDATA[ function CopyMoveContentItem_Callback(dialogWindow) { if (!dialogWindow.result) return; eval(dialogWindow.Argument.replace('[[RESULT]]',dialogWindow.result)); } //]]> </script> <script src="/Telerik.Web.UI.WebResource.axd?_TSM_HiddenField_=ctl01_ScriptManager1_TSM&compress=1&_TSM_CombinedScripts_=%3b%3bAjaxControlToolkit%3aen-US%3a0c8c847b-b611-49a7-8e75-2196aa6e72fa%3aea597d4b%3ab25378d2%3bTelerik.Web.UI%2c+Version%3d2024.3.805.462%2c+Culture%3dneutral%2c+PublicKeyToken%3d121fae78165ba3d4%3aen-US%3a169c7ca7-1df1-4370-a5b9-ee71a36cb3f0%3a16e4e7cd%3a33715776%3af7645509%3a24ee1bba%3ae330518b%3a2003d0b8%3ac128760b%3a1e771326%3a88144a7a%3ac8618e41%3a1a73651d%3a333f8d94%3a8e6f0d33%3a1f3a7489%3a6a6d718d%3aed16cbdc%3a19620875%3a874f8ea2%3ab2e06756%3af46195d3%3a92fe8ea0%3afa31b949%3a4877f69a%3a490a9d4e%3abd8f85e4" type="text/javascript"></script> <script src="/AsiCommon/Scripts/Asi.js?v=-1289050844" type="text/javascript"></script> <script src="/iparts/Common/ContentCollectionOrganizer/ContentCollectionOrganizer.js" type="text/javascript"></script> <script type="text/javascript"> //<![CDATA[ var PageMethods = function() { PageMethods.initializeBase(this); this._timeout = 0; this._userContext = null; this._succeeded = null; this._failed = null; } PageMethods.prototype = { _get_path:function() { var p = this.get_path(); if (p) return p; else return PageMethods._staticInstance.get_path();}, GetActionLink:function(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,succeededCallback, failedCallback, userContext) { return this._invoke(this._get_path(), 'GetActionLink',false,{action:action,templateType:templateType,docType:docType,hierarchyKey:hierarchyKey,documentVersionKey:documentVersionKey,folderHierarchyKey:folderHierarchyKey,itemCount:itemCount,closeWindowOnCommit:closeWindowOnCommit,websiteKey:websiteKey,pageInstanceKey:pageInstanceKey},succeededCallback,failedCallback,userContext); }, GetWindowProperties:function(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,succeededCallback, failedCallback, userContext) { return this._invoke(this._get_path(), 'GetWindowProperties',false,{action:action,templateType:templateType,docType:docType,hierarchyKey:hierarchyKey,documentVersionKey:documentVersionKey,folderHierarchyKey:folderHierarchyKey,itemCount:itemCount,closeWindowOnCommit:closeWindowOnCommit,websiteKey:websiteKey,pageInstanceKey:pageInstanceKey},succeededCallback,failedCallback,userContext); }, GetAddressCompletionList:function(prefixText,succeededCallback, failedCallback, userContext) { return this._invoke(this._get_path(), 'GetAddressCompletionList',false,{prefixText:prefixText},succeededCallback,failedCallback,userContext); }, CheckForPasteConflict:function(selectedKeys,targetHierarchyKey,succeededCallback, failedCallback, userContext) { return this._invoke(this._get_path(), 'CheckForPasteConflict',false,{selectedKeys:selectedKeys,targetHierarchyKey:targetHierarchyKey},succeededCallback,failedCallback,userContext); }} PageMethods.registerClass('PageMethods',Sys.Net.WebServiceProxy); PageMethods._staticInstance = new PageMethods(); PageMethods.set_path = function(value) { PageMethods._staticInstance.set_path(value); } PageMethods.get_path = function() { return PageMethods._staticInstance.get_path(); } PageMethods.set_timeout = function(value) { PageMethods._staticInstance.set_timeout(value); } PageMethods.get_timeout = function() { return PageMethods._staticInstance.get_timeout(); } PageMethods.set_defaultUserContext = function(value) { PageMethods._staticInstance.set_defaultUserContext(value); } PageMethods.get_defaultUserContext = function() { return PageMethods._staticInstance.get_defaultUserContext(); } PageMethods.set_defaultSucceededCallback = function(value) { PageMethods._staticInstance.set_defaultSucceededCallback(value); } PageMethods.get_defaultSucceededCallback = function() { return PageMethods._staticInstance.get_defaultSucceededCallback(); } PageMethods.set_defaultFailedCallback = function(value) { PageMethods._staticInstance.set_defaultFailedCallback(value); } PageMethods.get_defaultFailedCallback = function() { return PageMethods._staticInstance.get_defaultFailedCallback(); } PageMethods.set_enableJsonp = function(value) { PageMethods._staticInstance.set_enableJsonp(value); } PageMethods.get_enableJsonp = function() { return PageMethods._staticInstance.get_enableJsonp(); } PageMethods.set_jsonpCallbackParameter = function(value) { PageMethods._staticInstance.set_jsonpCallbackParameter(value); } PageMethods.get_jsonpCallbackParameter = function() { return PageMethods._staticInstance.get_jsonpCallbackParameter(); } PageMethods.set_path("C_HVEI.aspx"); PageMethods.GetActionLink= function(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,onSuccess,onFailed,userContext) {PageMethods._staticInstance.GetActionLink(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,onSuccess,onFailed,userContext); } PageMethods.GetWindowProperties= function(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,onSuccess,onFailed,userContext) {PageMethods._staticInstance.GetWindowProperties(action,templateType,docType,hierarchyKey,documentVersionKey,folderHierarchyKey,itemCount,closeWindowOnCommit,websiteKey,pageInstanceKey,onSuccess,onFailed,userContext); } PageMethods.GetAddressCompletionList= function(prefixText,onSuccess,onFailed,userContext) {PageMethods._staticInstance.GetAddressCompletionList(prefixText,onSuccess,onFailed,userContext); } PageMethods.CheckForPasteConflict= function(selectedKeys,targetHierarchyKey,onSuccess,onFailed,userContext) {PageMethods._staticInstance.CheckForPasteConflict(selectedKeys,targetHierarchyKey,onSuccess,onFailed,userContext); } var gtc = Sys.Net.WebServiceProxy._generateTypedConstructor; Type.registerNamespace('Asi.Web.UI.Common.BSA'); if (typeof(Asi.Web.UI.Common.BSA.WindowProperties) === 'undefined') { Asi.Web.UI.Common.BSA.WindowProperties=gtc("Asi.Web.UI.Common.BSA.WindowProperties"); Asi.Web.UI.Common.BSA.WindowProperties.registerClass('Asi.Web.UI.Common.BSA.WindowProperties'); } Type.registerNamespace('Asi.Web.UI'); if (typeof(Asi.Web.UI.PageOperation) === 'undefined') { Asi.Web.UI.PageOperation = function() { throw Error.invalidOperation(); } Asi.Web.UI.PageOperation.prototype = {None: 0,Edit: 1,New: 2,Execute: 3,Select: 4,SelectAndReturnValue: 5,Delete: 6,Purge: 7,Import: 8,Export: 9,Publish: 10,Versions: 11,Refresh: 12,Cut: 13,Copy: 14,Paste: 15,Undo: 16,SelectAll: 17,Search: 18,Preview: 19,Submit: 20,RequestDelete: 21,PurgeAll: 22,Download: 23} Asi.Web.UI.PageOperation.registerEnum('Asi.Web.UI.PageOperation', true); } if (typeof(Asi.Web.UI.TemplateType) === 'undefined') { Asi.Web.UI.TemplateType = function() { throw Error.invalidOperation(); } Asi.Web.UI.TemplateType.prototype = {I: 0,D: 1,P: 2,E: 3,T: 4,F: 5,A: 6} Asi.Web.UI.TemplateType.registerEnum('Asi.Web.UI.TemplateType', true); } //]]> </script> <object hidden type='application/json'><param name='__ClientContext' id='__ClientContext' value='{"baseUrl":"/","isAnonymous":true,"tenantId":"SIST","loggedInPartyId":"54444","selectedPartyId":"54444","websiteRoot":"https://www.imaging.org/","virtualDir":"","appTimeZoneOffset":-14400000.0,"cookieConsent":2}'></object> <input type="hidden" name="ctl01$lastClickedElementId" id="lastClickedElementId" /> <script type="text/javascript"> //<![CDATA[ Sys.WebForms.PageRequestManager._initialize('ctl01$ScriptManager1', 'aspnetForm', ['tctl01$UserMessagesUpdatePanel','','tctl01$TemplateBody$PublishUpdatePanel','','tctl01$TemplateBody$WebPartManager1$gwpciConfCCO$ciConfCCO$updatePanel','','tctl01$RadAjaxManager1SU',''], ['ctl01$ScriptManager1','','ctl01$TemplateBody$WebPartManager1$gwpciConfCCO$ciConfCCO$radTab_Top',''], [], 3600, 'ctl01'); //]]> </script> <input type="hidden" name="ctl01$ScriptManager1" id="ctl01_ScriptManager1" /> <script type="text/javascript"> //<![CDATA[ Sys.Application.setServerId("ctl01_ScriptManager1", "ctl01$ScriptManager1"); Sys.Application._enableHistoryInScriptManager(); //]]> </script> <iframe id="__historyFrame" src="/ScriptResource.axd?d=RdaXQ9iecm7hiWz39PPpNLgmaAcV_2z8_42NdYw2u0hjahgjMY_ozmqF21gLMSdw4m14Tz6uDBU1t755b4AKIx1fkrp-ix5rkmaS7WzI4hmF3XAwcZVAoBlRR9_0cgQKRmOX22gZb2obyZiby4xGpw2" style="display:none;"> </iframe> <script type="text/javascript"> </script> <div id="ctl01_masterWrapper" class="wrapper"> <a id="PageTop" class="sr-only">Top of the Page</a> <header id="hd" class="header ClearFix navbar" data-height-offset="true"> <div class="header-top-container"> <div class="header-container"> <div id="masterLogoArea" class="header-logo-container pull-left" data-label="Logo"> <div ID="WTZone1_Page1" class="WTZone "> <div id="ste_container_ciLogoContent" class="ContentItemContainer"> <div id="ste_container_HeaderLogoSpan" class="ContentItemContainer"><strong style="color: transparent;"><a href="https://www.imaging.org/" style="color: transparent;"> <div style="height: 55px; width: 400px;"><img src="/images/IST_website_banner.png" style="width: 434px; height: 93px;"></div> </a></strong></div><div class="ContentRecordPageButtonPanel"> </div> </div> </div> </div> <div class="navbar-header nav-auxiliary pull-right" id="auxiliary-container" data-label="Utility"> <div ID="WTZone2_Page1" class="WTZone "> <div id="ste_container_ciUtilityNavigation" class="ContentItemContainer"><div id="ctl01_ciUtilityNavigation_UtilityPlaceholder" class="UtilityNavigation nav-auxiliary" role="navigation"> <div class="UtilitySection UtilityAccountArea"> <ul id="ctl01_ciUtilityNavigation_ctl01_AccountMenu" class="account-menu"> <li id="ctl01_ciUtilityNavigation_ctl01_SignInLI"> <a id="ctl01_ciUtilityNavigation_ctl01_LoginStatus1" class="sign-in-link" translate="yes" href="javascript:__doPostBack('ctl01$ciUtilityNavigation$ctl01$LoginStatus1$ctl02','')">Sign in</a> </li> </ul> <script> function ToggleOBO(toggle_id) { var $this = jQuery("#" + toggle_id); var enabled = $this.hasClass("on"); var button; if (enabled) // click the clear button button = window.$get('ctl01_ciUtilityNavigation_ctl01_OBOControlPanel_ClearContactButton'); else // click the select button button = window.$get('ctl01_ciUtilityNavigation_ctl01_OBOControlPanel_SelectContactButton'); if (button != null) button.click(); } jQuery(document).ready(function () { jQuery('.website-item:gt(2)').hide(); jQuery('.js-show-more-sites').show(); jQuery('.js-show-more-sites a').on("click", function () { jQuery('.website-item:not(:visible):lt(5)').fadeIn(function () { if (jQuery('.website-item:not(:visible)').length == 0) { jQuery('.website-item a').last().focus(); jQuery('.js-show-more-sites').remove(); } }); return false; }); }); </script> </div><div class="UtilitySection UtilityNavigationList"> <ul class='NavigationUnorderedList'><li id="ctl01_ciUtilityNavigation_ctl03__rptWrapper__rptWrapper_rpt_ctl01_NavigationListItem" class="NavigationListItem nav-aux-button nav-aux-cart"><a id="ctl01_ciUtilityNavigation_ctl03__rptWrapper__rptWrapper_rpt_ctl01_NavigationLink" class="NavigationLink" onClick="MenuLI_OnClick('/IST/IST/iMIS/Store/StoreLayouts/Cart_OLD.aspx?hkey=f596492d-3628-4f58-b5ac-bbf9ff3a0784')" href="/IST/IST/iMIS/Store/StoreLayouts/Cart_OLD.aspx?hkey=f596492d-3628-4f58-b5ac-bbf9ff3a0784"><span class="nav-text" translate="yes">Cart </span><span><span data-cartlink='y' class="cartEmpty"><span class="cartSprite"></span></span></span></a></li><li id="ctl01_ciUtilityNavigation_ctl03__rptWrapper__rptWrapper_rpt_ctl02_NavigationListItem" class="NavigationListItem"><a id="ctl01_ciUtilityNavigation_ctl03__rptWrapper__rptWrapper_rpt_ctl02_NavigationLink" class="NavigationLink" onClick="MenuLI_OnClick('/IST/IST/MyAccount/CreateAccount.aspx?hkey=44e40f35-cbf4-4ac4-929b-39a8d9f1668b')" href="/IST/IST/MyAccount/CreateAccount.aspx?hkey=44e40f35-cbf4-4ac4-929b-39a8d9f1668b"><span class="nav-text" translate="yes">Create Account</span></a></li></ul> </div><div class="UtilitySection UtilityNavigationToggle"> <button onclick="return false;" class="navbar-toggle collapsed menu-toggle" data-toggle="collapse" data-target=".navbar-collapse" data-parent=".navbar"><span class="sr-only" translate="yes">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> </div><div class="UtilitySection UtilitySTEToggle ste-section"> </div> </div> <script type="text/javascript"> //<![CDATA[ // adapted from bs.collapse - if the search bar is open when opening the menu, close it (and vise versa) function UtilityAreaAction(utilityControl, hideArea) { jQuery(utilityControl).on('show.bs.collapse', function() { var actives = jQuery(hideArea).find('> .in'); if (actives && actives.length) { var hasData = actives.data('bs.collapse'); if (hasData && hasData.transitioning) return; actives.collapse('hide'); hasData || actives.data('bs.collapse', null); } }); } jQuery(document).ready(function () { UtilityAreaAction('.navbar-collapse', '.nav-auxiliary'); UtilityAreaAction('.searchbar-collapse', '.primary-navigation-area'); }); //]]> </script> </div> <div id="ste_container_ciNewUtilityNavigationCommon2" class="ContentItemContainer"><div id="ctl01_ciNewUtilityNavigationCommon2_UtilityPlaceholder" class="UtilityNavigation nav-auxiliary"> <div class="UtilitySection UtilityNavigationToggle"> <button onclick="return false;" class="navbar-toggle collapsed searchbar-toggle" data-toggle="collapse" data-target=".searchbar-collapse" data-parent=".navbar"><span class="sr-only" translate="yes">Toggle search</span> <span class="icon-search"></span> </button> </div><div class="UtilitySection UtilitySearch"> <div class="search-container-sm" data-set="searchbar"> <div class="header-search"> <div id="SimpleSearchBlock" role="search" class="SimpleSearchBlock"><label for="ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTerms" id="ctl01_ciNewUtilityNavigationCommon2_ctl05_Prompt" class="screen-reader-text" translate="yes">Keyword search</label><input name="ctl01$ciNewUtilityNavigationCommon2$ctl05$SearchTerms" type="text" value="Keyword search" id="ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTerms" class="search-field Watermarked" onfocus="SimpleSearchField_OnFocus('ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties);" onblur="SimpleSearchField_OnBlur('ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties);" onkeypress="return clickButton(event,'ctl01_ciNewUtilityNavigationCommon2_ctl05_GoSearch');" translate="yes" /><input type="button" name="ctl01$ciNewUtilityNavigationCommon2$ctl05$GoSearch" value="Go" onclick="SimpleSearchField_ExecuteSearch('ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties); return cancelEvent();__doPostBack('ctl01$ciNewUtilityNavigationCommon2$ctl05$GoSearch','')" id="ctl01_ciNewUtilityNavigationCommon2_ctl05_GoSearch" class="TextButton" /></div> </div> </div> </div><div id="ctl01_ciNewUtilityNavigationCommon2_search-collapse" class="searchbar-collapse collapse" style="height:auto;"> <div class="search-container" data-set="searchbar"> <div class="header-search"> <div id="ResponsiveSimpleSearchBlock" role="search" class="SimpleSearchBlock"><label for="ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTerms" id="ctl01_ciNewUtilityNavigationCommon2_ctl08_Prompt" class="screen-reader-text" translate="yes">Keyword search</label><input name="ctl01$ciNewUtilityNavigationCommon2$ctl08$SearchTerms" type="text" value="Keyword search" id="ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTerms" class="search-field Watermarked" onfocus="SimpleSearchField_OnFocus('ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties);" onblur="SimpleSearchField_OnBlur('ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties);" onkeypress="return clickButton(event,'ctl01_ciNewUtilityNavigationCommon2_ctl08_GoSearch');" translate="yes" /><input type="button" name="ctl01$ciNewUtilityNavigationCommon2$ctl08$GoSearch" value="Go" onclick="SimpleSearchField_ExecuteSearch('ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTerms', ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties); return cancelEvent();__doPostBack('ctl01$ciNewUtilityNavigationCommon2$ctl08$GoSearch','')" id="ctl01_ciNewUtilityNavigationCommon2_ctl08_GoSearch" class="TextButton" /></div> </div> </div> </div> </div> <script type="text/javascript"> //<![CDATA[ // adapted from bs.collapse - if the search bar is open when opening the menu, close it (and vise versa) function UtilityAreaAction(utilityControl, hideArea) { jQuery(utilityControl).on('show.bs.collapse', function() { var actives = jQuery(hideArea).find('> .in'); if (actives && actives.length) { var hasData = actives.data('bs.collapse'); if (hasData && hasData.transitioning) return; actives.collapse('hide'); hasData || actives.data('bs.collapse', null); } }); } jQuery(document).ready(function () { UtilityAreaAction('.navbar-collapse', '.nav-auxiliary'); UtilityAreaAction('.searchbar-collapse', '.primary-navigation-area'); }); //]]> </script> </div> <div id="ste_container_ciNewContentHtml_4c5b80c800b04a9a932a995d85dcc730" class="ContentItemContainer"><div class="HeaderSocial"><div id="ctl01_ciNewContentHtml_4c5b80c800b04a9a932a995d85dcc730_Panel_NewContentHtml"> <a href="https://www.linkedin.com/company/society-for-imaging-science-and-technology-is&t-"><img src="/images/Icons/linkedin36blue.png" alt="" style="width: 15px; height: 13px; margin-right: 10px;"></a> </div></div></div> </div> </div> </div> </div> <div data-label="Primary" class="ClearFix header-bottom-container"> <div class="header-container"> <div ID="WTZone3_Page1" class="WTZone "> <div id="ste_container_ciPrimaryNavigation" class="ContentItemContainer"> <div id="ctl01_ciPrimaryNavigation_PrimaryNavigationArea" class="primary-navigation-area"> <div id="ctl01_ciPrimaryNavigation_PrimaryNavigationControl" class="collapse navbar-collapse nav-primary"> <nav id="ctl01_ciPrimaryNavigation_NavControl_NavWrapper" aria-label="primary"> <div tabindex="0" id="ctl01_ciPrimaryNavigation_NavControl_NavMenu" class="RadMenu RadMenu_NaturalHeritageSites MainMenu MegaDropDownMenuOuter" Translate="Yes" PerspectiveId="80409b89-ae6d-45a9-a9d4-96d522ff2047" NavigationArea="1" MaxDataBindDepth="3" style="z-index:2999;"> <!-- 2024.3.805.462 --><input class="rmRootGroup rmToggleHandles rmHorizontal" id="ctl01_ciPrimaryNavigation_NavControl_NavMenu_ClientState" name="ctl01_ciPrimaryNavigation_NavControl_NavMenu_ClientState" type="hidden" /> </div> </nav> </div> </div></div> </div> </div> </div> <div id="HomePageContent" class="HomePageFullWidthArea" data-label="Home Content"> <div ID="WTZone4_Page1" class="WTZone "> <div id="ste_container_ciFullWidthContent" class="ContentItemContainer"> </div> </div> </div> <div class="InternalHeaderArea" data-label="Internal Header"> <div ID="WTZone5_Page1" class="WTZone "> <div id="ste_container_ciInternalFullWidthHeader" class="ContentItemContainer"> </div> </div> </div> </header> <div id="masterContentArea" class="container ClearFix"> <div role="main" class="body-container" id="ContentPanel"> <div data-label="On Behalf Of" class="ClearFix OnBehalfOfContainer"> <div ID="WTZone6_Page1" class="WTZone "> <div id="ste_container_ciOBO" class="ContentItemContainer"><div class="FloatRight"><div id="ctl01_ciOBO_UtilityPlaceholder" class="UtilityNavigation nav-auxiliary"> <div class="UtilitySection OnBehalfOf"> </div> </div> </div></div> </div> </div> <div class="col-primary" data-label="Main Content"> <div ID="WTZone7_Page1" class="WTZone "> <div id="mainContentWrapper" class="ContentPanel"> <div id="masterMainBreadcrumb" data-height-offset="true" ></div> <div id="ctl01_UserMessagesUpdatePanel"> </div> <script type="text/javascript"> </script> <div id="ctl01_TemplateBody_PublishUpdatePanel"> </div><div id="ctl01_TemplateBody_WebPartManager1___Drag" style="display:none;position:absolute;z-index:32000;filter:alpha(opacity=75);"> </div> <div> <div class="row"> <div class="col-sm-3"> <div class="ContentItemContainer"> <div id="WebPartZone1_Page1" class="WebPartZone "> <div class="iMIS-WebPart"> <div id="ste_container_ciCornerArt" class="ContentItemContainer"><div id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_ListTitle" class="panel " style="border-style:None;"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_ListTitle_Head" class="panel-heading"> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_ListTitle_BodyContainer" class="panel-body-container"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_ListTitle_Body" class="panel-body"> <div class="TaggedListPanel"> <span><div class='ContentHtml'><link href="https://fonts.googleapis.com/css2?family=Open+Sans:ital,wght@0,300;0,400;0,600;0,700;0,800;1,300;1,400;1,600;1,700;1,800&display=swap" rel="stylesheet" /> <link href="https://fonts.googleapis.com/css2?family=Montserrat:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,900&display=swap" rel="stylesheet" /> <link href="https://fonts.googleapis.com/css2?family=Jost:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,800;1,900&display=swap" rel="stylesheet" /></div><div class='ContentHtml'><style type="text/css"> #ConfContent { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 14px; font-weight: 400; } .ConfContent { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 14px; font-weight: 400; } #SymposiumContent { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 14px; font-weight: 400; } .SymposiumContent { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 14px; font-weight: 400; } ul { margin: 0 0 0 2em; padding: 0; } ol { margin: 0 0 0 2em; padding: 0; } P + UL { margin: 0em 0em 1em 2em; } P + OL { margin: 0em 0em 1em 2em; } P+ H2 { margin: 0 0 1em 0; } H2 + P { margin: 0 0 1em 0; } H2 + UL { margin: 0 0 1em 2em; } H2 + OL { margin: 0 0 1em 2em; } H3 + UL { margin: 0 0 1em 2em; } H3 + OL { margin: 0 0 1em 2em; } H3 + H2 { margin: 0 0 3em 2em; } H1 { color: #d2232a; background-color: white; font-family: 'Jost', sans-serif; font-size: 24px; font-weight: 500; text-transform: none; text-align: left; margin-top: 10px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 0px; padding-bottom: 0px; padding-left: 0; padding-right: 0; line-height: 1em; } #ConfContent h1 { color: #e8ba03; background-color: white; font-family: 'Jost', sans-serif; font-size: 24px; font-weight: 500; text-transform: none; text-align: left; margin-top: 10px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 0px; padding-bottom: 0px; padding-left: 0px; padding-right: 0px; line-height: 1.2em; } .ConfContent h1 { color: #e8ba03; background-color: white; font-family: 'Jost', sans-serif; font-size: 24px; font-weight: 500; text-transform: none; text-align: left; margin-top: 10px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 0px; padding-bottom: 0px; padding-left: 0px; padding-right: 0px; line-height: 1.2em; } H2 { color: #ffffff; background-color: #9f9f9f; font-family: 'Open Sans', sans-serif; font-size: 18px; text-transform: uppercase; font-weight: 400; display: block; padding-top: 2px; padding-bottom: 2px; padding-left: .33em; padding-right: 3px; margin-top: 1em; margin-bottom: .5em; margin-left: 0px; margin-right: 0px; margin-inline-start: 0px; margin-inline-end: 0px; } H3 { color: #d2232a; font-size: 15px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; display: block; border-bottom: 0px solid #ddd; margin: 20px 20px 20px 20px; margin-block-start: 0em; margin-block-end: 0em; margin-inline-start: 0px; margin-inline-end: 0px; } H4 { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 14px; font-weight: 500; } H5 { color: #d2232a; background-color: white; font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; display: block; border-bottom: 1px solid #ddd; margin: 0em 0; margin-block-start: 0em; margin-block-end: 0em; margin-inline-start: 0px; margin-inline-end: 0px; } H6 { color: #808080; font-family: 'Jost', sans-serif; font-size: 14px; font-weight: 500; } /* unvisited link */ a:link { color: #0032a0; font-weight: normal; } /* visited link */ a:visited { color: #0032a0; font-weight: normal; } /* mouse over link */ a:hover { color: #7f7f7f; } /* selected link */ a:active { color: #3b3b3b; font-weight: bold; } /* unvisited link */ #ConfContent a:link { color: #0032a0; font-weight: normal; } /* visited link */ #ConfContent a:visited { color: #0032a0; font-weight: normal; } /* mouse over link */ #ConfContent a:hover { color: #7f7f7f; } /* selected link */ #ConfContent a:active { color: #3b3b3b; font-weight: bold; } /* unvisited link */ .ConfContent a:link { color: #0032a0; font-weight: normal; } /* visited link */ .ConfContent a:visited { color: #0032a0; font-weight: normal; } /* mouse over link */ .ConfContent a:hover { color: #7f7f7f; } /* selected link */ .ConfContent a:active { color: #3b3b3b; font-weight: bold; } .ConfKeywords { font-size: 13px; font-family: 'Open Sans', sans-serif; color: #8C8C8C; font-weight: 400; font-style: italic; margin-top: 0em; margin-bottom: 0em; padding-bottom: 0em; padding-top: 0em; } #toplinkshead { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; display: block; border-bottom: 0px solid #ddd; margin: 20px 20px 20px 20px; margin-block-start: 1em; margin-block-end: 0em; margin-inline-start: 0px; margin-inline-end: 0px; } #toplinks { text-transform: uppercase; color: #0032a0; font-family: 'Jost', sans-serif; font-weight: 400; font-size: 14px; display: block; margin-bottom: 0; margin-left: 27px; } /* unvisited link */ #toplinks a:link { color: #0032a0; } /* visited link */ #toplinks a:visited { color: #0032a0; } /* mouse over link */ #toplinks a:hover { color: #7f7f7f; decoration: underline; } /* selected link */ #toplinks a:active { color: #7f7f7f; decoration: underline; } /* unvisited link */ #toplinkshead a:link { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; } /* visited link */ #toplinkshead a:visited { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; } /* mouse over link */ #toplinkshead a:hover { color: #7f7f7f; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; decoration: underline; } /* selected link */ #toplinkshead a:active { color: #7f7f7f; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; decoration: underline; } .toplinkshead { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; display: block; border-bottom: 0px solid #ddd; margin: 20px 20px 20px 20px; margin-block-start: 1em; margin-block-end: 0em; margin-inline-start: 0px; margin-inline-end: 0px; } #toplinks { text-transform: uppercase; color: #0032a0; font-family: 'Jost', sans-serif; font-weight: 400; font-size: 14px; display: block; margin-bottom: 0; margin-left: 27px; } /* unvisited link */ .toplinks a:link { color: #0032a0; } /* visited link */ .toplinks a:visited { color: #0032a0; } /* mouse over link */ .toplinks a:hover { color: #7f7f7f; decoration: underline; } /* selected link */ .toplinks a:active { color: #7f7f7f; decoration: underline; } /* unvisited link */ .toplinkshead a:link { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; } /* visited link */ .toplinkshead a:visited { color: #0C0C0C; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; } /* mouse over link */ .toplinkshead a:hover { color: #7f7f7f; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; decoration: underline; } /* selected link */ .toplinkshead a:active { color: #7f7f7f; font-size: 14px; font-weight: 500; font-family: 'Jost', sans-serif; text-transform: uppercase; decoration: underline; } .sub { vertical-align: sub } .sup { vertical-align: super } .mixedcase { text-transform: none; } .box{ margin-top: 11px; width: 825px; max-width: 100%; margin-top: 0px; margin-bottom: 0px; margin-left: 0; margin-right: 0; } .leftbox{ margin-top: 0px; margin-left: auto; margin-right: auto; width: 100% max-width: 100%; object-fit: contain; } .box img{ max-width: 100%; max-height: 100%; display: block; /* remove extra space below image */ object-fit: cover; } .EI21coursegroup_time{ color: #4488DC; font-size: 12pt; font-weight: bold; } .SubmissionLink::before { content: " "; } .SubmissionLink { font-size: 9pt !important; font-family: 'Montserrat', sans-serif; font-weight: 600 !important; text-transform: uppercase !important; color: #d2232a !important; } /* unvisited link */ a.SubmissionLink:link { font-size: 9pt !important; font-family: 'Montserrat', sans-serif; font-weight: 600 !important; text-transform: uppercase !important; color: #d2232a !important; } /* visited link */ a.SubmissionLink:visited { font-size: 9pt !important; font-family: 'Montserrat', sans-serif; font-weight: 600 !important; text-transform: uppercase !important; color: #d2232a !important; } /* mouse over link */ a.SubmissionLink:hover { font-size: 9pt !important; font-family: 'Montserrat', sans-serif; font-weight: 600 !important; text-transform: uppercase !important; color: #d2232a !important; decoration: underline !important; } /* selected link */ a.SubmissionLink:active { font-size: 9pt !important; font-family: 'Montserrat', sans-serif; font-weight: 600 !important; text-transform: uppercase !important; color: #d2232a !important; decoration: underline !important; } .nomarginnopadding { margin: 0; padding: 0; } .SessionOrganizer { margin-left: 27px; } .CommitteeMember { font-size: 13px; font-weight: 500; } .CommitteeMemberAffil { font-size: 13px; font-weight: 400; font-style: normal; } .AwardName { font-weight: 600; color: #3f3f3f; } strong { font-weight: 600; } .BoldItalic { font-weight: 600; font-style: italic; } .Thin { font-weight: 100; } .ExtraLight { font-weight: 200; } .Light { font-weight: 300; } .Regular{ font-weight: 400; } .Medium { font-weight: 500; } .SemiBold { font-weight: 600; } .ExtraBold { font-weight: 800; } .Black { font-weight: 900; } caption { display: table-caption; caption-side: bottom; text-align: left; color: #000000; font-family: 'Open Sans', sans-serif; font-size: 12px; font-weight: 200; font-style: italic; line-height: 1.6em; padding: 0.5em; } .TopAlert { font-weight: 300; font-style: italic; font-size: 16px; font-family: Jost, sans-serif; color: #d2232a; margin-top: -2px; margin-bottom: 5px; } a.RegisterLink{ background-color: #d2232a; border-radius: 3px; color: white; padding: 6px 12px; text-align: center; text-decoration: none; display: inline-block; font-size: 14px; margin: 4px 2px; transition-duration: 0.4s; cursor: pointer; } a.RegisterLink:hover { background-color: #b3b3b3; border-radius: 3px; color: white; padding: 6px 12px; text-align: center; text-decoration: none; display: inline-block; font-size: 14px; margin: 4px 2px; transition-duration: 0.4s; cursor: pointer; } </style></div><div class='ContentHtml'><style> /*this is the all-purpose callout. It is behind the keynotes It is grey */ .callout{ background-color:#FFFBED; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } /*this is the callout for panels and special events. It is yellow */ .coloredcallout{ background-color: #F2F2F2; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } /*this is the plenary callout. It is pink */ .pinkcallout{ background-color: #FFEEED; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } #content #story .callout .session_title { line-height: normal; margin-bottom: 1px; } .group { font-size: 14pt; font-weight: bold; text-align: center; } .cat { font-size: 16pt; font-weight: bold; color: red; text-align: center; } .session_time { font-size: 10pt; font-weight: bold; display: inline-block; margin-bottom: 2ex; } .event_time { font-size: 10pt; font-weight: normal; text-align: center; color: #c00000; padding-bottom: 10px; } .date { color: #c00000; background-color: #ffffff; font-size: 18px; font-weight: 500; text-transform: uppercase; margin-top: 11px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 7px; padding-bottom: 7px; padding-left: 0px; padding-right: 0; line-height: 1em; } .session_title { font-size: 11pt; color: #3b3b3b; font-weight: bold; margin-top: 0ex; margin-bottom: 0ex; } .session_title:before{ content: ' '; display: block; border: 0; border-top: 1px solid #c00000; margin-top: 1ex; margin-bottom: 1ex; } .session_title:after { content: ' '; display: block; border: 0; border-bottom: 1px solid #c00000; margin-top: 1ex; margin-bottom: 1ex; } .chair_label { font-size: 10pt; font-weight: bold; } .chair { font-size: 10pt; } p span.author_string { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 400; font-style: italic; } p span.placeholder_desc { font-size: 12pt; font-weight: normal; } .room { font-size: 11pt; color: #c00000; } .presentation_title { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 500; } .presentation_time { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 500; } .session_notes { color: #3b3b3b; font-style: italic; font-weight: 300; font-size: 9pt; display: block; margin-bottom: 1ex; } .abstract{ color: #3b3b3b; font-style: italic; font-weight: 400; display: block; margin-bottom: 1ex; } .bio { color: #3b3b3b; font-style: italic; font-weight: 400; display: block; margin-bottom: 1ex; } .keynote-bio{ font-size: 11px; color: #3b3b3b; font-style: italic; font-weight: 400; display: block; margin-bottom: 1ex; } </style></div><div class='ContentHtml'><div class="leftbox"><a href="http://www.imaging.org/IST/IST/Conferences/EI/EI2023/EI2023.aspx"><img alt="" src="/images/IST_Images/Conferences/EI/EI2023/EI2022_250x125_January2023.png" style="margin-top: 0px; width: 100%;" /></a> </div></div><div class='ContentHtml'><style type="text/css"> /* CSS Dropdown menuv */ #menuv-container { max-width: 100%; display: block; margin-left: 0px; margin-right: auto; width: 250px } #menuv { font-size: 13px; font-family: 'Jost', sans-serif; text-transform: uppercase; max-width:100%; width:100%; float:left; margin-top: 0em; margin-right: 0em; margin-bottom: 0em; margin-left: 0em; border: auto solid #ffffff; background-color: #ffffff; /* white*/ } #menuv_NAV{ padding: 0; margin: 0; border: 0; } #menuv_NAV ul, #menuv_NAV li { list-style: none; margin: 0; padding: 0; } #menuv_NAV ul { position: relative; z-index: 597; float: left; } #menuv_NAV ul li { float: left; min-height: 1px; line-height: 1.5em; vertical-align: middle; } #menuv_NAV ul li.hover, #menuv_NAV ul li:hover { position: relative; z-index: 599; cursor: default; } #menuv_NAV ul ul { visibility: hidden; position: absolute; top: 100%; left: 0; z-index: 598; width: 100%; } #menuv_NAV ul ul li { float: none; } #menuv_NAV ul ul, #menuv_NAV ul ul ul { top: -2px; left: 99%; } #menuv_NAV ul li:hover > ul { visibility: visible; } #menuv_NAV ul li { float: none; } #menuv_NAV a { display: block; font-weight: 400 !important; } /* Custom CSS Styles */ #menuv_NAV { font-family: 'Jost', sans-serif; text-transform: uppercase; font-size: 13px; } #menuv_NAV:after, #menuv_NAV ul:after { content: ''; display: block; clear: both; } #menuv_NAV ul { background: #EEEEEE; border: 0px solid #aaaaaa; padding: 4px; width: 100%; } #menuv_NAV ul li { color: #0C0C0C; position: relative; } #menuv_NAV ul li.hover, #menuv_NAV ul li:hover { background: #cccccc; background: -moz-linear-gradient(#cccccc 0%, #cccccc100%); background: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #9f9f9f), color-stop(100%, #cccccc )); background: -webkit-linear-gradient(#cccccc 0%, #cccccc 100%); background: linear-gradient(#cccccc 0%, #cccccc 100%); color: #FFF; } #menuv_NAV ul li.hover > a, #menuv_NAV ul li:hover > a { color: #000; border: 0px solid #cccccc; } #menuv_NAV ul ul { width: 650px; } #menuv_NAV a { border: 0px solid transparent; padding: 3px 10px; } #menuv_NAV a:link, #menuv_NAV a:visited { color: #0C0C0C; text-decoration: none; } #menuv_NAV a:hover { background: #cccccc; background: -moz-linear-gradient(#cccccc 0%, #cccccc 100%); background: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #cccccc ), color-stop(100%, #cccccc )); background: -webkit-linear-gradient(#cccccc 0%, #cccccc 100%); background: linear-gradient(#cccccc 0%, #cccccc 100%); color: #FFF; } #menuv_NAV a:active { color: #ffa500; } #menuv_NAV .has-sub:hover > a:after, #menuv_NAV .has-sub.hover > a:after { border-color: transparent transparent transparent #FFF; } #menuv_NAV .has-sub > a:after { content: ''; width: 0px; height: 0px; border-style: solid; border-width: 0px 0px 0px 0px; border-color: transparent transparent transparent #808080; position: absolute; top: 50%; right: 5%; margin-top: -4px; -webkit-transform: rotate(360deg); } </style> <div id="menuv-container"> <div id="menuv_NAV"> <ul> <li> </li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/Attend___Register/IST/Conferences/EI/EI2023/Attend.aspx" target="_blank"><span style="color: #d2232a;">REGISTER</span></a></li> </ul> <ul> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=1#EntryCCO'">EI Home/About</a> <ul> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=1#EntryCCO">Home</a></li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=2#EntryCCO'">At-a-Glance</a></li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=3#EntryCCO">Awards</a> </li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=4#EntryCCO">EI History</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/TAB_Code_of_Conduct.aspx" target="_blank">Code of Conduct</a></li> <li><a href="http://www.imaging.org/IST/IST/About/Press_Releases.aspx" target="_blank">Press Releases</a> </li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx">Symposium Program</a> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx">EI Program</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=2#ProgramCCO">Symposium Plenary Speakers</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=3#ProgramCCO">EI Conferences</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=4#ProgramCCO">Conference Keynotes</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=5#ProgramCCO">Short Courses</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=6#ProgramCCO">Demonstration & Poster Sessions</a></li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/IST/Conferences/EI/EI2023/EI2023.aspx?EntryCCO=2#EntryCCO'">Program At-a-Glance</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=7#ProgramCCO">Author Index</a></li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=5#ProgramCCO">Short Courses</a></li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=3#ProgramCCO" class="top_parent">Conferences</a> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=3#ProgramCCO">EI Conferences</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_3DMP.aspx">3D Imaging and Applications 2023 (3DIA)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_AVM.aspx">Autonomous Vehicles and Machines 2023 (AVM)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_COLOR.aspx">Color Imaging XXVIII: Displaying, Processing, Hardcopy, and Applications (COLOR)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_COIMG.aspx">Computational Imaging XXI (COIMG)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_CVAA.aspx">Computer Vision and Image Analysis of Art 2023 (CVAA)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_ERVR.aspx">Engineering Reality of Virtual Reality 2023 (ERVR)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_HPCI.aspx">High Performance Computing for Imaging 2023 (HPCI)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_HVEI.aspx">Human Vision and Electronic Imaging 2023 (HVEI)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_IPAS.aspx">Image Processing: Algorithms and Systems XXI (IPAS)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_IQSP.aspx">Image Quality and System Performance XX (IQSP)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_IMAGE.aspx">Imaging and Multimedia Analytics at the Edge 2023 (IMAGE)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_ISS.aspx">Imaging Sensors and Systems 2023 (ISS)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_IRIACV.aspx">Intelligent Robotics and Industrial Applications using Computer Vision 2023 (IRIACV)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_MLSI.aspx">Machine Learning for Scientific Imaging 2023 (MLSI)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_MWSF.aspx">Media Watermarking, Security, and Forensics 2023 (MWSF)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_MOBMU.aspx">Mobile Devices and Multimedia: Enabling Technologies, Algorithms, and Applications 2023 (MOBMU)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_SDA.aspx">Stereoscopic Displays and Applications XXXIV (SD&A)</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Conference/C_VDA.aspx">Visualization and Data Analysis 2023 (VDA)</a></li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=2#ProgramCCO">Symposium Plenary Speakers</a></li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/AuthorSubmit.aspx" class="top_parent">Author/Submit</a> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/AuthorSubmit.aspx?Author_Info=1">Submit How-to</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/AuthorSubmit.aspx?Author_Info=2">Accepted: Next Steps</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Program.aspx?ProgramCCO=6#ProgramCCO">Demonstration Session</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/AuthorSubmit.aspx?Author_Info=3">Publication FAQ</a></li> <li><a href="https://www.imaging.org/PDFS/Conferences/ElectronicImaging/EI_InvitationLetterRequest_Form_Fillable.pdf">Visas and Letters of Invitation</a></li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/Attend___Register/IST/Conferences/EI/EI2023/Attend.aspx" class="top_parent">Attend/Register</a> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Attend.aspx">Registration & Fees</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Attend.aspx?Attendee_Information=2#Attendee_Information">Logistics</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Attend.aspx?Attendee_Information=3#Attendee_Information">Why Attend</a></li> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/Attend.aspx?Attendee_Information=3#JustificationLetter.aspx">Justify Attendance</a> </li> <li><a href="https://www.imaging.org/PDFS/Conferences/ElectronicImaging/EI_InvitationLetterRequest_Form_Fillable.pdf">Visas and Letters of Invitation</a></li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/ExhibitSponsor.aspx" class="top_parent">Exhibit/Sponsor</a> <ul> <li><a href="https://www.imaging.org/IST/IST/Conferences/EI/EI2023/ExhibitSponsor.aspx">Exhibition & Sponsorship Opportunities</a> </li> </ul> </li> </ul> <ul> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/For_Students/IST/Conferences/EI/EI2023/For_Students.aspx" class="top_parent">For Students</a> <ul> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/For_Students/IST/Conferences/EI/EI2023/For_Students.aspx" class="top_parent">Student Focus</a></li> <li><a href="https://www.imaging.org/IST/Conferences/EI/EI2023/For_Students/IST/Conferences/EI/EI2023/For_Students.aspx?Student_Focus_Tabs=2#Student_Focus_Tabs">Student Showcase</a></li> </ul> </li> </ul> </div> <!-- end the menuv-container div --> </div> <!-- end the menuv div --></div><div class='ContentHtml'><div class="leftbox"> <table style="text-align: center; margin-left: auto; margin-right: auto;" width="100%" border="0"> <tbody> <tr> <td colspan="2" 7px;"valign="middle" align="center"> </td> </tr> <tr> <td style="height: 19px;" valign="top" align="right"> <!--- begin LinkedIn Share ---> <script src="https://platform.linkedin.com/in.js" type="text/javascript">lang: en_US</script> <script type="IN/Share" data-url="https://www.linkedin.com"></script> <!--- End LinkedIn Share ---> </td> <td style="height: 19px;" valign="top" align="left"> <!--- begin Twitter Share ---> <a href="https://twitter.com/share" class="twitter-share-button" data-count="none" data-hashtags="EI2023">Tweet</a> <script>!function(d,s,id){var js,fjs=d.getElementsByTagName(s)[0],p=/^http:/.test(d.location)?'http':'https';if(!d.getElementById(id)){js=d.createElement(s);js.id=id;js.src=p+'://platform.twitter.com/widgets.js';fjs.parentNode.insertBefore(js,fjs);}}(document, 'script', 'twitter-wjs');</script> <!--- end Twitter Share ---> </td> </tr> <tr> <td colspan="2" valign="middle" align="center"> <!--- begin Twitter Follow ---> <a href="https://twitter.com/ElectroImaging" class="twitter-follow-button" data-show-count="false">Follow @ElectroImaging</a> <script>!function(d,s,id){var js,fjs=d.getElementsByTagName(s)[0],p=/^http:/.test(d.location)?'http':'https';if(!d.getElementById(id)){js=d.createElement(s);js.id=id;js.src=p+'://platform.twitter.com/widgets.js';fjs.parentNode.insertBefore(js,fjs);}}(document, 'script', 'twitter-wjs');</script> <!--- end Twitter Follow ---><br> </td> </tr> </tbody> </table> </div> <br></div><div class='ContentHtml'><style> .greybox { background-color: #f2f2f2; text-align: center; } table.ImportantDates { color: #000000; margin-left: auto; margin-right: auto; border: 4px solid #f2f2f2; padding-top: 3px; padding-bottom: 3px; padding-left: 1px; padding-right: 1px; font-family: Jost, sans-serif; font-size: 10px; font-weight: 300; line-height: 1.1; vertical-align: top; } .ImpDateDescription{ padding-top: 3px; padding-bottom: 3px; text-align: left; font-weight: 400; vertical-align: top; } .ImpDateSubDescription{ padding-top: 3px; padding-bottom: 3px; text-indent: -6px; padding-left: 6px; text-align: left; font-weight: 400; color: #7f7f7f; vertical-align: top; } .impdatedate { padding-top: 3px; padding-bottom: 3px; text-align: center; font-weight: 400; color: #7f7f7f; font-family: Jost, sans-serif; vertical-align: top; text-align: center;" } .impdatedatesub { padding-top: 3px; padding-bottom: 3px; text-align: center; font-weight: 400; color: #7f7f7f; font-family: Jost, sans-serif; vertical-align: top; text-align: center;" } </style> <div class="leftbox"> <div class="greybox"> <a name="Deadlines" id="Deadlines"></a> <table class="ImportantDates" align="center"> <thead> <tr> <td colspan="2" style="text-align: center; white-space: nowrap;"><span style="font-weight: 500; font-size: 14px; font-family: Jost, sans-serif; color: #d2232a;">IMPORTANT DATES<br /> </span> <span style="font-size: 10px; color: #d2232a;"><em>Dates currently being confirmed; check back.</em></span> </td> </tr> <tr> <td colspan="2"> </td> </tr> </thead> <tbody> <tr> <td class="ImpDateDescription"> <br /> </td> <td class="impdate"><span style="font-weight: 500;">2022</span></td> </tr> <tr> <td class="ImpDateDescription">Call for Papers Announced</td> <td class="impdate">2 May</td> </tr> <tr> <td class="ImpDateDescription">Journal-first (JIST/JPI) Submissions <br /> </td> <td class="impdate"><br /> </td> </tr> <tr> <td class="ImpDateSubDescription">∙ Submission site Opens</td> <td class="impdatedatesub">2 May </td> </tr> <tr> <td class="ImpDateSubDescription">∙ Journal-first (JIST/JPI) Submissions Due</td> <td class="impdatedatesub">1 Aug</td> </tr> <tr> <td class="ImpDateSubDescription">∙ Final Journal-first manuscripts due</td> <td class="impdatedatesub">28 Oct</td> </tr> <tr> <td class="ImpDateDescription">Conference Papers Submissions</td> <td class="impdate"><br /> </td> </tr> <tr> <td class="ImpDateSubDescription">∙ Abstract Submission Opens</td> <td class="impdatedatesub">1 June</td> </tr> <tr> <td class="ImpDateSubDescription">∙ Priority Decision Submission Ends</td> <td class="impdatedatesub">15 July</td> </tr> <tr> <td class="ImpDateSubDescription">∙ Extended Submission Ends</td> <td class="impdatedatesub"> 19 Sept</td> </tr> <tr> <td class="ImpDateSubDescription">∙ FastTrack Conference Proceedings Manuscripts Due</td> <td class="impdatedatesub">25 Dec </td> </tr> <tr> <td class="ImpDateSubDescription">∙ All Outstanding Proceedings Manuscripts Due<br /> </td> <td class="impdatedatesub" style="white-space: nowrap;"> 6 Feb 2023</td> </tr> <tr> <td class="ImpDateDescription">Registration Opens</td> <td class="impdate" style="white-space: nowrap;">1 Dec</td> </tr> <tr> <td class="ImpDateDescription">Demonstration Applications Due</td> <td class="impdate">19 Dec</td> </tr> <tr> <td class="ImpDateDescription">Early Registration Ends</td> <td class="impdate">18 Dec</td> </tr> <tr> <td class="ImpDateDescription"><br /> </td> <td class="impdate" style="text-align: center;"><br /> <span style="font-weight: 500;">2023</span></td> </tr> <tr> <td class="ImpDateDescription">Hotel Reservation Deadline</td> <td class="impdate">6 Jan</td> </tr> <tr> <td class="ImpDateDescription">Symposium begins<br /> </td> <td class="impdate">15 Jan<br /> </td> </tr> <tr> <td class="ImpDateDescription"><br /> </td> <td class="impdate"><br /> </td> </tr> </tbody> </table> </div> </div></div></span> <div translate="yes"> </div> </div> </div> </div> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_downloadContainer" style="display:none;"> <input type="hidden" name="ctl01$TemplateBody$WebPartManager1$gwpciCornerArt$ciCornerArt$HiddenDownloadPathField" id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_HiddenDownloadPathField" /><input type="submit" name="ctl01$TemplateBody$WebPartManager1$gwpciCornerArt$ciCornerArt$downloadButton" value="Download Path" id="ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_downloadButton" style="display:none" /> </div></div> </div> <div class="iMIS-WebPart"> <div id="ste_container_ciSponsors_0155da0917194f109ff1dbb88bccc53e" class="ContentItemContainer"><div id="ctl01_TemplateBody_WebPartManager1_gwpciSponsors_0155da0917194f109ff1dbb88bccc53e_ciSponsors_0155da0917194f109ff1dbb88bccc53e_Panel_Sponsors"> <h3 style="text-align: center;">Sponsor</h3> <br /> <p style="text-align: center;"><a href="https://www.meta.com/" target="_blank"><img alt="" src="/images/IST_Images/company_logos/Huge/Meta_transparent_primary_2022_850x200.png" style="width: 200px; height: 48px;" /></a> <br /> <a href="https://www.qualcomm.com/" target="_blank"><img alt="" src="/images/IST_Images/company_logos/CorporateLogoSlideshow/225x125/Qualcomm.png" /></a></p> </div></div> </div> </div> <div id="ctl01_TemplateBody_ContentPage1_downloadContainer" style="display:none;"> <input type="hidden" name="ctl01$TemplateBody$ContentPage1$HiddenDownloadPathField" id="ctl01_TemplateBody_ContentPage1_HiddenDownloadPathField" /><input type="submit" name="ctl01$TemplateBody$ContentPage1$downloadButton" value="Download Path" id="ctl01_TemplateBody_ContentPage1_downloadButton" style="display:none" /> </div></div> </div> <div class="col-sm-9"> <div class="ContentItemContainer"> <div id="WebPartZone2_Page1" class="WebPartZone "> <div class="iMIS-WebPart"> <div id="ste_container_ciConfCCO" class="ContentItemContainer"><div class="panel "> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO__Head" class="panel-heading"> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO__BodyContainer" class="panel-body-container"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO__Body" class="panel-body"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_MainContentControl" class="cco tabs-wrapper tabs-horizontal tabs-top"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radTab_Top" class="RadTabStrip RadTabStrip_MetroTouch RadTabStripTop_MetroTouch RadTabStripTop RadTabStripTop_MetroTouch_Baseline"> <div class="rtsLevel rtsLevel1"> <ul class="rtsUL"><li class="rtsLI rtsFirst"><a class="rtsLink rtsBefore" href="#"><span class="rtsOut"><span class="rtsIn"><span class="rtsTxt">About HVEI 2023</span></span></span></a></li><li class="rtsLI"><a class="rtsLink rtsSelected" href="#"><span class="rtsOut"><span class="rtsIn"><span class="rtsTxt">HVEI Program</span></span></span></a></li><li class="rtsLI"><a class="rtsLink rtsAfter" href="#"><span class="rtsOut"><span class="rtsIn"><span class="rtsTxt">For HVEI Authors</span></span></span></a></li><li class="rtsLI rtsLast"><a class="rtsLink" href="#"><span class="rtsOut"><span class="rtsIn"><span class="rtsTxt">HVEI History/Proceedings</span></span></span></a></li></ul> </div><input id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radTab_Top_ClientState" name="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radTab_Top_ClientState" type="hidden" /> </div> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage" class="RadMultiPage RadMultiPage_Default"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_1" class="rmpView rmpHidden"> <div class="ContentTabbedDisplay AddPadding"> <p class="AsiWarning">No content found</p> </div> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_2" class="rmpView"> <div class="ContentWizardDisplay ClearFix"><div> <div class="row"> <div class="col-sm-12"> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Zone1PlaceHolder" class="WebPartZone"> <div id="ste_container_ConferenceHeading" class="ContentItemContainer"><style type="text/css"> /*this is the all-purpose callout. It is behind the keynotes It is grey*/ .callout{ background-color:#f2f2f2; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } /*this is the callout for panels and special events. It is yellow*/ .coloredcallout{ background-color: #fff9e6; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } /*this is the plenary callout. It used to be pink, now it's light purple*/ .pinkcallout{ background-color: #e6e6ff; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; } #content #story .callout .session_title { line-height: normal; margin-bottom: 1px; } .group { font-size: 14pt; font-weight: bold; text-align: center; } .cat { font-size: 16pt; font-weight: bold; color: red; text-align: center; } .session_time { font-weight: 300; font-size: 13px; font-family: Jost, sans-serif; display: inline-block; margin-bottom: 2ex; } .event_time { font-family: 'Jost', sans-serif; font-size: 14px; font-weight: 400; text-align: center; color: #A2002D; padding-bottom: 10px; } .date { font-family: 'Jost', sans-serif; color: #A2002D; background-color: #ffffff; font-size: 18px; font-weight: 500; text-transform: uppercase; margin-top: 11px; margin-bottom: 10px; margin-left: 0; margin-right: 0; padding-top: 7px; padding-bottom: 7px; padding-left: 0px; padding-right: 0; line-height: 1em; } .session_title { font-family: 'Open Sans', sans-serif; font-size: 14px; color: #3b3b3b; font-weight: 600; margin-top: 0ex; margin-bottom: 0ex; } .session_title:before{ content: ' '; display: block; border: 0; border-bottom: 1px solid #A2002D; background: #A2002D; margin-top: 1ex; margin-bottom: 1ex; } .session_title:after { content: ' '; display: block; border: 0; border-bottom: 1px solid #A2002D; background: #A2002D; margin-top: 1ex; margin-bottom: 1ex; } .chair_label { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 600; } .chair { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 400; } p span.author_string { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 400; font-style: italic; } p span.placeholder_desc { font-size: 12pt; font-weight: normal; } .room { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #808080; } .redroom { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #b30000; } .greenroom { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #00802b; } .blueroom { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #002db3; } .yellowroom { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #ffcc00; } .purpleroom { font-family: 'Jost', sans-serif; font-size: 15px; font-weight: 400; text-transform: none; color: #6600CC; } .presentation_title { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 700; } .presentation_time { color: #0C0C0C; font-family: 'Montserrat', sans-serif; font-size: 13px; font-weight: 500; } .session_notes { color: #0C0C0C; font-weight: 300; font-style: italic; font-size: 14px; font-family: Jost, sans-serif; display: block; margin-bottom: 1ex; } .abstract{ color: #0C0C0C; font-weight: 300; /*font-style: italic;*/ font-size: 13px; font-family: Jost, sans-serif; display: block; margin-bottom: 1ex; } .bio { color: #0C0C0C; font-style: italic; font-weight: 400; display: block; margin-bottom: 1ex; } .abstract_link { color: #2e769e; cursor:pointer; } </style> <h1>Human Vision and Electronic Imaging 2023</h1> <p class="date">Monday 16 January 2023</p> <p class="event_time">10:20 – 10:50 AM Coffee Break</p> <p class="event_time">12:30 – 2:00 PM Lunch</p> <div class="pinkcallout"> <p class="session_title">Monday 16 January PLENARY: Neural Operators for Solving PDEs</p> <span class="chair">Session Chair: Robin Jenkin, NVIDIA Corporation (United States)<br> </span> <span class="session_time">2:00 PM – 3:00 PM</span> <br> <span class="room">Cyril Magnin I/II/III<br> </span> <span></span> <p class="session_notes">Deep learning surrogate models have shown promise in modeling complex physical phenomena such as fluid flows, molecular dynamics, and material properties. However, standard neural networks assume finite-dimensional inputs and outputs, and hence, cannot withstand a change in resolution or discretization between training and testing. We introduce Fourier neural operators that can learn operators, which are mappings between infinite dimensional spaces. They are independent of the resolution or grid of training data and allow for zero-shot generalization to higher resolution evaluations. When applied to weather forecasting, neural operators capture fine-scale phenomena and have similar skill as gold-standard numerical weather models for predictions up to a week or longer, while being 4-5 orders of magnitude faster.</p> <br> <span></span> <p class="session_notes"> </p> <span class="author_string"><strong>Anima Anandkumar, </strong>Bren professor, California Institute of Technology, and senior director of AI Research, NVIDIA Corporation (United States)<span class="author_string"></span></span> <p> </p> <span></span> <p class="session_notes">Anima Anandkumar is a Bren Professor at Caltech and Senior Director of AI Research at NVIDIA. She is passionate about designing principled AI algorithms and applying them to interdisciplinary domains. She has received several honors such as the IEEE fellowship, Alfred. P. Sloan Fellowship, NSF Career Award, and Faculty Fellowships from Microsoft, Google, Facebook, and Adobe. She is part of the World Economic Forum's Expert Network. Anandkumar received her BTech from Indian Institute of Technology Madras, her PhD from Cornell University, and did her postdoctoral research at MIT and assistant professorship at University of California Irvine.</p> </div> <br> <p class="event_time">3:00 – 3:30 PM Coffee Break</p> <span> </span> <div class="pinkcallout"> <p class="session_title">EI 2023 Highlights Session</p> <span class="chair">Session Chair: Robin Jenkin, NVIDIA Corporation (United States)<br> </span> <span class="session_time">3:30 – 5:00 PM</span><br> <span class="room">Cyril Magnin II<br> </span> <p class="session_notes">Join us for a session that celebrates the breadth of what EI has to offer with short papers selected from EI conferences. </p> <p class="session_notes">NOTE: The EI-wide "EI 2023 Highlights" session is concurrent with Monday afternoon COIMG, COLOR, IMAGE, and IQSP conference sessions. </p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>IQSP-309</span> <br> <span class="presentation_title" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Evaluation of image quality metrics designed for DRI tasks with automotive cameras, </span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Valentine Klein, </span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Yiqi LI, </span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Claudio Greco, </span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Laurent Chanas, and </span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">Frédéric Guichard</span><span class="author_string" final_id="IQSP-309" onclick="toggle_me()" style="cursor: pointer;">, DXOMARK (France)</span><span class="abstract_link" final_id="IQSP-309" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IQSP-309" id="abstract-IQSP-309" onclick="toggle_me()" style="display:none; cursor:pointer;">Driving assistance is increasingly used in new car models. Most driving assistance systems are based on automotive cameras and computer vision. Computer Vision, regardless of the underlying algorithms and technology, requires the images to have good image quality, defined according to the task. This notion of good image quality is still to be defined in the case of computer vision as it has very different criteria than human vision: humans have a better contrast detection ability than image chains. The aim of this article is to compare three different metrics designed for detection of objects with computer vision: the Contrast Detection Probability (CDP) [1, 2, 3, 4], the Contrast Signal to Noise Ratio (CSNR) [5] and the Frequency of Correct Resolution (FCR) [6]. For this purpose, the computer vision task of reading the characters on a license plate will be used as a benchmark. The objective is to check the correlation between the objective metric and the ability of a neural network to perform this task. Thus, a protocol to test these metrics and compare them to the output of the neural network has been designed and the pros and cons of each of these three metrics have been noted.</p> <p> </p> <p class="presentation_time" style="text-align:left;"><span>SD&A-224</span> <br> <span class="presentation_title" final_id="SD&A-224" onclick="toggle_me()" style="cursor: pointer;">Human performance using stereo 3D in a helmet mounted display and association with individual stereo acuity, </span><span class="author_string" final_id="SD&A-224" onclick="toggle_me()" style="cursor: pointer;">Bonnie Posselt</span><span class="author_string" final_id="SD&A-224" onclick="toggle_me()" style="cursor: pointer;">, RAF Centre of Aviation Medicine (United Kingdom)</span><span class="abstract_link" final_id="SD&A-224" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="SD&A-224" id="abstract-SD&A-224" onclick="toggle_me()" style="display:none; cursor:pointer;">Binocular Helmet Mounted Displays (HMDs) are a critical part of the aircraft system, allowing information to be presented to the aviator with stereoscopic 3D (S3D) depth, potentially enhancing situational awareness and improving performance. The utility of S3D in an HMD may be linked to an individual’s ability to perceive changes in binocular disparity (stereo acuity). Though minimum stereo acuity standards exist for most military aviators, current test methods may be unable to characterise this relationship. This presentation will investigate the effect of S3D on performance when used in a warning alert displayed in an HMD. Furthermore, any effect on performance, ocular symptoms, and cognitive workload shall be evaluated in regard to individual stereo acuity measured with a variety of paper-based and digital stereo tests.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>IMAGE-281</span> <br> <span class="presentation_title" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Smartphone-enabled point-of-care blood hemoglobin testing with color accuracy-assisted spectral learning, </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Sang Mok Park<sup>1</sup>, </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Yuhyun Ji<sup>1</sup>, </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Semin Kwon<sup>1</sup>, </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Andrew R. O’Brien<sup>2</sup>, </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Ying Wang<sup>2</sup>, and </span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">Young L. Kim<sup>1</sup></span><span class="author_string" final_id="IMAGE-281" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Purdue University and <sup>2</sup>Indiana University School of Medicine (United States)</span><span class="abstract_link" final_id="IMAGE-281" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IMAGE-281" id="abstract-IMAGE-281" onclick="toggle_me()" style="display:none; cursor:pointer;">We develop an mHealth technology for noninvasively measuring blood Hgb levels in patients with sickle cell anemia, using the photos of peripheral tissue acquired by the built-in camera of a smartphone. As an easily accessible sensing site, the inner eyelid (i.e., palpebral conjunctiva) is used because of the relatively uniform microvasculature and the absence of skin pigments. Color correction (color reproduction) and spectral learning (spectral super-resolution spectroscopy) algorithms are integrated for accurate and precise mHealth blood Hgb testing. First, color correction using a color reference chart with multiple color patches extracts absolute color information of the inner eyelid, compensating for smartphone models, ambient light conditions, and data formats during photo acquisition. Second, spectral learning virtually transforms the smartphone camera into a hyperspectral imaging system, mathematically reconstructing high-resolution spectra from color-corrected eyelid images. Third, color correction and spectral learning algorithms are combined with a spectroscopic model for blood Hgb quantification among sickle cell patients. Importantly, single-shot photo acquisition of the inner eyelid using the color reference chart allows straightforward, real-time, and instantaneous reading of blood Hgb levels. Overall, our mHealth blood Hgb tests could potentially be scalable, robust, and sustainable in resource-limited and homecare settings.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>AVM-118</span> <br> <span class="presentation_title" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Designing scenes to quantify the performance of automotive perception systems, </span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Zhenyi Liu<sup>1</sup>, </span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Devesh Shah<sup>2</sup>, </span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Alireza Rahimpour<sup>2</sup>, </span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Joyce Farrell<sup>1</sup>, and </span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">Brian Wandell<sup>1</sup></span><span class="author_string" final_id="AVM-118" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Stanford University and <sup>2</sup>Ford Motor Company (United States)</span><span class="abstract_link" final_id="AVM-118" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="AVM-118" id="abstract-AVM-118" onclick="toggle_me()" style="display:none; cursor:pointer;">We implemented an end-to-end simulation for perception systems, based on cameras, that are used in automotive applications. The open-source software creates complex driving scenes and simulates cameras that acquire images of these scenes. The camera images are then used by a neural network in the perception system to identify the locations of scene objects, providing the results as input to the decision system. In this paper, we design collections of test scenes that can be used to quantify the perception system’s performance under a range of (a) environmental conditions (object distance, occlusion ratio, lighting levels), and (b) camera parameters (pixel size, lens type, color filter array). We are designing scene collections to analyze performance for detecting vehicles, traffic signs and vulnerable road users in a range of environmental conditions and for a range of camera parameters. With experience, such scene collections may serve a role similar to that of standardized test targets that are used to quantify camera image quality (e.g., acuity, color).</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>VDA-403</span> <br> <span class="presentation_title" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Visualizing and monitoring the process of injection molding, </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Christian A. Steinparz<sup>1</sup>, </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Thomas Mitterlehner<sup>2</sup>, </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Bernhard Praher<sup>2</sup>, </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Klaus Straka<sup>1,</sup><sup>2</sup>, </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Holger Stitz<sup>1,</sup><sup>3</sup>, and </span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">Marc Streit<sup>1,</sup><sup>3</sup></span><span class="author_string" final_id="VDA-403" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Johannes Kepler University, <sup>2</sup>Moldsonics GmbH, and <sup>3</sup>datavisyn GmbH (Austria)</span><span class="abstract_link" final_id="VDA-403" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="VDA-403" id="abstract-VDA-403" onclick="toggle_me()" style="display:none; cursor:pointer;">In injection molding machines the molds are rarely equipped with sensor systems. The availability of non-invasive ultrasound-based in-mold sensors provides better means for guiding operators of injection molding machines throughout the production process. However, existing visualizations are mostly limited to plots of temperature and pressure over time. In this work, we present the result of a design study created in collaboration with domain experts. The resulting prototypical application uses real-world data taken from live ultrasound sensor measurements for injection molding cavities captured over multiple cycles during the injection process. Our contribution includes a definition of tasks for setting up and monitoring the machines during the process, and the corresponding web-based visual analysis tool addressing these tasks. The interface consists of a multi-view display with various levels of data aggregation that is updated live for newly streamed data of ongoing injection cycles.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>COIMG-155</span> <br> <span class="presentation_title" final_id="COIMG-155" onclick="toggle_me()" style="cursor: pointer;">Commissioning the James Webb Space Telescope, </span><span class="author_string" final_id="COIMG-155" onclick="toggle_me()" style="cursor: pointer;">Joseph M. Howard</span><span class="author_string" final_id="COIMG-155" onclick="toggle_me()" style="cursor: pointer;">, NASA Goddard Space Flight Center (United States)</span><span class="abstract_link" final_id="COIMG-155" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="COIMG-155" id="abstract-COIMG-155" onclick="toggle_me()" style="display:none; cursor:pointer;">Astronomy is arguably in a golden age, where current and future NASA space telescopes are expected to contribute to this rapid growth in understanding of our universe. The most recent addition to our space-based telescopes dedicated to astronomy and astrophysics is the James Webb Space Telescope (JWST), which launched on 25 December 2021. This talk will discuss the first six months in space for JWST, which were spent commissioning the observatory with many deployments, alignments, and system and instrumentation checks. These engineering activities help verify the proper working of the telescope prior to commencing full science operations. For the session: Computational Imaging using Fourier Ptychography and Phase Retrieval.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>HVEI-223</span> <br> <span class="presentation_title" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Critical flicker frequency (CFF) at high luminance levels, </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Alexandre Chapiro<sup>1</sup>, </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Nathan Matsuda<sup>1</sup>, </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Maliha Ashraf<sup>2</sup>, and </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Rafal Mantiuk<sup>3</sup></span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Meta (United States), <sup>2</sup>University of Liverpool (United Kingdom), and <sup>3</sup>University of Cambridge (United Kingdom)</span><span class="abstract_link" final_id="HVEI-223" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-223" id="abstract-HVEI-223" onclick="toggle_me()" style="display:none; cursor:pointer;">The critical flicker fusion (CFF) is the frequency of changes at which a temporally periodic light will begin to appear completely steady to an observer. This value is affected by several visual factors, such as the luminance of the stimulus or its location on the retina. With new high dynamic range (HDR) displays, operating at higher luminance levels, and virtual reality (VR) displays, presenting at wide fields-of-view, the effective CFF may change significantly from values expected for traditional presentation. In this work we use a prototype HDR VR display capable of luminances up to 20,000 cd/m^2 to gather a novel set of CFF measurements for never before examined levels of luminance, eccentricity, and size. Our data is useful to study the temporal behavior of the visual system at high luminance levels, as well as setting useful thresholds for display engineering.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>HPCI-228</span> <br> <span class="presentation_title" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Physics guided machine learning for image-based material decomposition of tissues from simulated breast models with calcifications, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Muralikrishnan Gopalakrishnan Meena<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Amir K. Ziabari<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Singanallur Venkatakrishnan<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Isaac R. Lyngaas<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Matthew R. Norman<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Balint Joo<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Thomas L. Beck<sup>1</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Charles A. Bouman<sup>2</sup>, </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Anuj Kapadia<sup>1</sup>, and </span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">Xiao Wang<sup>1</sup></span><span class="author_string" final_id="HPCI-228" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Oak Ridge National Laboratory and <sup>2</sup>Purdue University (United States)</span><span class="abstract_link" final_id="HPCI-228" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HPCI-228" id="abstract-HPCI-228" onclick="toggle_me()" style="display:none; cursor:pointer;">Material decomposition of Computed Tomography (CT) scans using projection-based approaches, while highly accurate, poses a challenge for medical imaging researchers and clinicians due to limited or no access to projection data. We introduce a deep learning image-based material decomposition method guided by physics and requiring no access to projection data. The method is demonstrated to decompose tissues from simulated dual-energy X-ray CT scans of virtual human phantoms containing four materials - adipose, fibroglandular, calcification, and air. The method uses a hybrid unsupervised and supervised learning technique to tackle the material decomposition problem. We take advantage of the unique X-ray absorption rate of calcium compared to body tissues to perform a preliminary segmentation of calcification from the images using unsupervised learning. We then perform supervised material decomposition using a deep learned UNET model which is trained using GPUs in the high-performant systems at the Oak Ridge Leadership Computing Facility. The method is demonstrated on simulated breast models to decompose calcification, adipose, fibroglandular, and air.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>3DIA-104</span> <br> <span class="presentation_title" final_id="3DIA-104" onclick="toggle_me()" style="cursor: pointer;">Layered view synthesis for general images, </span><span class="author_string" final_id="3DIA-104" onclick="toggle_me()" style="cursor: pointer;">Loïc Dehan, </span><span class="author_string" final_id="3DIA-104" onclick="toggle_me()" style="cursor: pointer;">Wiebe Van Ranst, and </span><span class="author_string" final_id="3DIA-104" onclick="toggle_me()" style="cursor: pointer;">Patrick Vandewalle</span><span class="author_string" final_id="3DIA-104" onclick="toggle_me()" style="cursor: pointer;">, Katholieke University Leuven (Belgium)</span><span class="abstract_link" final_id="3DIA-104" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="3DIA-104" id="abstract-3DIA-104" onclick="toggle_me()" style="display:none; cursor:pointer;">We describe a novel method for monocular view synthesis. The goal of our work is to create a visually pleasing set of horizontally spaced views based on a single image. This can be applied in view synthesis for virtual reality and glasses-free 3D displays. Previous methods produce realistic results on images that show a clear distinction between a foreground object and the background. We aim to create novel views in more general, crowded scenes in which there is no clear distinction. Our main contributions are a computationally efficient method for realistic occlusion inpainting and blending, especially in complex scenes. Our method can be effectively applied to any image, which is shown both qualitatively and quantitatively on a large dataset of stereo images. Our method performs natural disocclusion inpainting and maintains the shape and edge quality of foreground objects.</p> <p> </p> <p class="presentation_time" style="text-align:left;"> <span>ISS-329</span> <br> <span class="presentation_title" final_id="ISS-329" onclick="toggle_me()" style="cursor: pointer;">A self-powered asynchronous image sensor with independent in-pixel harvesting and sensing operations, </span><span class="author_string" final_id="ISS-329" onclick="toggle_me()" style="cursor: pointer;">Ruben Gomez-Merchan, </span><span class="author_string" final_id="ISS-329" onclick="toggle_me()" style="cursor: pointer;">Juan Antonio Leñero-Bardallo, and </span><span class="author_string" final_id="ISS-329" onclick="toggle_me()" style="cursor: pointer;">Ángel Rodríguez-Vázquez</span><span class="author_string" final_id="ISS-329" onclick="toggle_me()" style="cursor: pointer;">, University of Seville (Spain)</span><span class="abstract_link" final_id="ISS-329" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="ISS-329" id="abstract-ISS-329" onclick="toggle_me()" style="display:none; cursor:pointer;">A new self-powered asynchronous sensor with a novel pixel architecture is presented. Pixels are autonomous and can harvest or sense energy independently. During the image acquisition, pixels toggle to a harvesting operation mode once they have sensed their local illumination level. With the proposed pixel architecture, most illuminated pixels provide an early contribution to power the sensor, while low illuminated ones spend more time sensing their local illumination. Thus, the equivalent frame rate is higher than the offered by conventional self-powered sensors that harvest and sense illumination in independient phases. The proposed sensor uses a Time-to-First-Spike readout that allows trading between image quality and data and bandwidth consumption. The sensor has HDR operation with a dynamic range of 80 dB. Pixel power consumption is only 70 pW. In the article, we describe the sensor’s and pixel’s architectures in detail. Experimental results are provided and discussed. Sensor specifications are benchmarked against the art.</p> <p> </p> <p class="presentation_time" style="text-align:left;"><span>COLOR-184</span> <br> <span class="presentation_title" final_id="COLOR-184" onclick="toggle_me()" style="cursor: pointer;">Color blindness and modern board games, </span><span class="author_string" final_id="COLOR-184" onclick="toggle_me()" style="cursor: pointer;">Alessandro Rizzi<sup>1</sup> and </span><span class="author_string" final_id="COLOR-184" onclick="toggle_me()" style="cursor: pointer;">Matteo Sassi<sup>2</sup></span><span class="author_string" final_id="COLOR-184" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Università degli Studi di Milano and <sup>2</sup>consultant (Italy)</span><span class="abstract_link" final_id="COLOR-184" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="COLOR-184" id="abstract-COLOR-184" onclick="toggle_me()" style="display:none; cursor:pointer;">Board game industry is experiencing a strong renewed interest. In the last few years, about 4000 new board games have been designed and distributed each year. Board game players gender balance is reaching the equality, but nowadays the male component is a slight majority. This means that (at least) around 10% of board game players are color blind. How does the board game industry deal with this ? Recently, a raising of awareness in the board game design has started but so far there is a big gap compared with (e.g.) the computer game industry. This paper presents some data about the actual situation, discussing exemplary cases of successful board games.</p> <p> </p> <script> function toggle_me() { var elm = event.srcElement var final_id = elm.getAttribute("final_id") var the_id = "abstract-" + final_id; var x = document.getElementById(the_id); if (x.style.display === "none"){ x.style.display = "block"; } else { x.style.display = "none"; } }</script> </div> <br> <span> </span> <p class="event_time">5:00 – 6:15 PM EI 2023 All-Conference Welcome Reception (in the Cyril Magnin Foyer)</p> <p class="date">Tuesday 17 January 2023</p> <span> </span> <div class="callout"> <p class="session_title">KEYNOTE: Perceptual Video Quality 1 (T1)<img alt="" class="flag_image" src="http://www.imaging.org/images/IST_Images/Conferences/EI/Joint-Session.png" style="vertical-align: middle; margin-left:1em;"></p> <span class="chair">Session Chairs: Lukáš Krasula, Netflix, Inc. (United States) and Mohamed Chaker Larabi, Université de Poitiers (France)<br> </span><span class="session_time">9:05 – 10:10 AM</span> <br> <span class="room">Cyril Magnin III </span> <br> <span></span> <p class="session_notes">This session is jointly sponsored by: Human Vision and Electronic Imaging 2023, and Image Quality and System Performance XX. </p> <br> <p class="presentation_time" style="text-align:left;"> <br> <span class="presentation_title">Joint Conference Welcome</span> </p> <p class="presentation_time" style="text-align:left;"> <a name="HVEI-258"></a><a name="HVEI-258"></a><span style="float: right;">HVEI-258</span> <br> <span class="presentation_title" final_id="HVEI-258" onclick="toggle_me()" style="cursor: pointer;">KEYNOTE: Bringing joy to Netflix members through perceptual encoding optimization, </span><span class="author_string" final_id="HVEI-258" onclick="toggle_me()" style="cursor: pointer;">Anne Aaron</span><span class="author_string" final_id="HVEI-258" onclick="toggle_me()" style="cursor: pointer;">, Netflix, Inc. (United States)</span><span class="abstract_link" final_id="HVEI-258" onclick="toggle_me()"> [view abstract] </span></p> <p class="session_notes"> </p> <p class="session_notes">As Director of Encoding Technologies, Anne Aaron leads the team responsible for media processing and encoding at Netflix. Her team works on video, audio, images and timed-text, from analysis to processing, encoding, packaging and DRM. On the streaming side, they strive to deliver a compelling viewing experience for millions of Netflix members worldwide, no matter where, how and what they watch. For the Netflix studio, they build media technologies that can improve content production. In her previous role at Netflix, Aaron led the Video Algorithms team. As a team, they researched and deployed innovation in the video encoding space (per-title encoding, video quality assessment and perceptual metrics, shot-based encoding, HDR, next-generation codecs) that benefited Netflix members as well as impacted the rest of the industry. Recent recognitions include: Some recent recognitions: SMPTE 2019 Workflow Systems Medal, Forbes' 2018 America's top women in Tech, Business Insider's 2017 Most powerful female engineers in US tech in 2017.</p> <p class="abstract" final_id="HVEI-258" id="abstract-HVEI-258" onclick="toggle_me()" style="display:none; cursor:pointer;">Audio and video compression are immensely important to Netflix, as well as internet service providers (ISPs). It has been estimated that our codec optimization efforts, together with the Open Connect program, saved ISPs over 1 billion dollars in 2021 alone. The keynote will talk about the importance of perceptual models and optimization for delivering the hits such as Stranger Things, Squid Game, or Red Notice in the highest quality while being mindful of the internet traffic. It will cover the recent advances in audio and video encoding, innovations in the subjective and objective assessment of quality, as well as immediate and future challenges in this area.</p> <p> </p> </div> <br> <p class="event_time">10:00 AM – 7:30 PM Industry Exhibition - Tuesday (in the Cyril Magnin Foyer)</p> <p class="event_time">10:20 – 10:50 AM Coffee Break</p> <br> <br> <p class="session_title">Perceptual Video Quality 2 (T2)<img alt="" class="flag_image" src="http://www.imaging.org/images/IST_Images/Conferences/EI/Joint-Session.png" style="vertical-align: middle; margin-left:1em;"></p> <span class="chair_label">Session Chairs: </span> <span class="chair">Lukáš Krasula, Netflix, Inc. (United States) and Mohamed Chaker Larabi, Université de Poitiers (France)<br> </span> <span class="session_time">10:50 AM – 12:30 PM</span> <br> <span class="room">Cyril Magnin III </span> <br> <span></span> <p class="session_notes">This session is jointly sponsored by: Human Vision and Electronic Imaging 2023, and Image Quality and System Performance XX.</p> <br> <p class="presentation_time" style="text-align:left;">10:50<a name="HVEI-259"></a><a name="HVEI-259"></a><span style="float: right;">HVEI-259</span> <br> <span class="presentation_title" final_id="HVEI-259" onclick="toggle_me()" style="cursor: pointer;">Video quality of video professionals for Video Assisted Referee (VAR), </span><span class="author_string" final_id="HVEI-259" onclick="toggle_me()" style="cursor: pointer;">Kjell Brunnström<sup>1,</sup><sup>2</sup>, </span><span class="author_string" final_id="HVEI-259" onclick="toggle_me()" style="cursor: pointer;">Anders Djupsjöbacka<sup>1</sup>, </span><span class="author_string" final_id="HVEI-259" onclick="toggle_me()" style="cursor: pointer;">Johsan Billingham<sup>3</sup>, </span><span class="author_string" final_id="HVEI-259" onclick="toggle_me()" style="cursor: pointer;">Katharina Wistel<sup>3</sup>, </span><span class="author_string" final_id="HVEI-259" onclick="toggle_me()" style="cursor: pointer;">Börje Andrén<sup>1</sup>, </span><span class="author_string" final_id="HVEI-259" onclick="toggle_me()" style="cursor: pointer;">Oskars Ozolins<sup>1,</sup><sup>4</sup>, and </span><span class="author_string" final_id="HVEI-259" onclick="toggle_me()" style="cursor: pointer;">Nicolas Evans<sup>3</sup></span><span class="author_string" final_id="HVEI-259" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>RISE Research Institutes of Sweden AB (Sweden), <sup>2</sup>Mid Sweden University (Sweden), <sup>3</sup>Fédération Internationale de Football Association (FIFA) (Switzerland), and <sup>4</sup>KTH (Royal Institute of Technology) (Sweden)</span><span class="abstract_link" final_id="HVEI-259" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-259" id="abstract-HVEI-259" onclick="toggle_me()" style="display:none; cursor:pointer;">Changes in the footballing world’s approach to technology and innovation contributed to the decision by the International Football Association Board (IFAB) to introduce Video Assistant Referees (VAR). The change meant that under strict protocols referees could use video replays to review decisions in the event of a “clear and obvious error” or a “serious missed incident”. This led to the need by Fédération Internationale de Football Association (FIFA) to develop methods for quality control of the VAR-systems, which was done in collaboration with RISE Research Institutes of Sweden AB. One of the important aspects is the video quality. The novelty of this study is that it has performed a user study specifically targeting video experts i.e., to measure the perceived quality of video professionals working with video production as their main occupation. An experiment was performed involving 25 video experts. In addition, six video quality models have been benchmarked against the user data and evaluated to show which of the models could provide the best predictions of perceived quality for this application. Video Quality Metric for variable frame delay (VQM_VFD) had the best performance for both formats, followed by Video Multimethod Assessment Fusion (VMAF) and VQM General model.</p> <p> </p> <p class="presentation_time" style="text-align:left;">11:10<a name="HVEI-260"></a><a name="HVEI-260"></a><span style="float: right;">HVEI-260</span> <br> <span class="presentation_title" final_id="HVEI-260" onclick="toggle_me()" style="cursor: pointer;">User perception for dynamic video resolution change using VVC, </span><span class="author_string" final_id="HVEI-260" onclick="toggle_me()" style="cursor: pointer;">Sachin G. Deshpande and </span><span class="author_string" final_id="HVEI-260" onclick="toggle_me()" style="cursor: pointer;">Philip Cowan</span><span class="author_string" final_id="HVEI-260" onclick="toggle_me()" style="cursor: pointer;">, Sharp (United States)</span><span class="abstract_link" final_id="HVEI-260" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-260" id="abstract-HVEI-260" onclick="toggle_me()" style="display:none; cursor:pointer;">We define experiments that measure user perception when video resolution changes dynamically. Versatile Video Coding (VVC) standard was recently finalized and it includes a reference picture resampling (RPR) tool. VVC RPR supports changing spatial resolution in a coded video sequence on a per picture basis. VVC RPR defines the downsampling and upsampling filters to be used when changing resolution. This paper provides results from subjective evaluation when VVC RPR is used for part of the video sequence to dynamically change resolution. The experiments use different QP values (or bitrates), different RPR scale factors and different highest original spatial resolutions. The results compare how users perceive video coded using VVC RPR for some pictures compared to an anchor which does not use RPR. In addition to the subjective results, we also describe performance of various metrics including PSNR, VMAF and MS-SSIM. Our results can help choose the highest RPR scale factor that can be used to achieve/ maintain certain perceived quality when using RPR (for example for bitrate reduction). The study also confirms that MS-SSIM and VMAF match subjective test results more closely compared to PSNR.</p> <p> </p> <p class="presentation_time" style="text-align:left;">11:30<a name="IQSP-261"></a><a name="IQSP-261"></a><span style="float: right;">IQSP-261</span> <br> <span class="presentation_title" final_id="IQSP-261" onclick="toggle_me()" style="cursor: pointer;">Proposing more ecologically-valid experiment protocol using YouTube platform, </span><span class="author_string" final_id="IQSP-261" onclick="toggle_me()" style="cursor: pointer;">Gabriela Wielgus, </span><span class="author_string" final_id="IQSP-261" onclick="toggle_me()" style="cursor: pointer;">Lucjan Janowski, </span><span class="author_string" final_id="IQSP-261" onclick="toggle_me()" style="cursor: pointer;">Kamil Koniuch, </span><span class="author_string" final_id="IQSP-261" onclick="toggle_me()" style="cursor: pointer;">Mikolaj Leszczuk, and </span><span class="author_string" final_id="IQSP-261" onclick="toggle_me()" style="cursor: pointer;">Rafal Figlus</span><span class="author_string" final_id="IQSP-261" onclick="toggle_me()" style="cursor: pointer;">, AGH University of Science and Technology (Poland)</span><span class="abstract_link" final_id="IQSP-261" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IQSP-261" id="abstract-IQSP-261" onclick="toggle_me()" style="display:none; cursor:pointer;">Video streaming is becoming increasingly popular, and with platforms like YouTube, users do not watch the video passively but seek, pause, and read the comments. The popularity of video services is possible due to the development of compression and quality prediction algorithms. However, those algorithms are developed based on classic experiments, which are non-ecologically valid. Therefore, classic experiments do not mimic real user interaction. Further development of the quality and compression algorithms depends on the results coming from ecologically-valid experiments. Therefore, we aim to propose such experiments. Nevertheless, proposing a new experimental protocol is difficult, especially when there is no limitation on content selection and control of the video. The freedom makes data analysis more challenging. In this paper, we present an ecologically-valid experimental protocol in which the subject assessed the quality while freely using YouTube. To achieve this goal, we developed a Chrome extension that collects objective data and allows network manipulation. Our deep data analysis shows a correlation between MOS and objectively measured results such as resolution, which proves that the ecologically-valid test works. Moreover, we have shown significant differences between subjects, allowing for a more detailed understanding, of how the quality influences the interaction with the service.</p> <p> </p> <p class="presentation_time" style="text-align:left;">11:50<a name="IQSP-262"></a><a name="IQSP-262"></a><span style="float: right;">IQSP-262</span> <br> <span class="presentation_title" final_id="IQSP-262" onclick="toggle_me()" style="cursor: pointer;">Evaluation of motion blur image quality in video frame interpolation, </span><span class="author_string" final_id="IQSP-262" onclick="toggle_me()" style="cursor: pointer;">Hai Dinh, </span><span class="author_string" final_id="IQSP-262" onclick="toggle_me()" style="cursor: pointer;">Fangwen Tu, </span><span class="author_string" final_id="IQSP-262" onclick="toggle_me()" style="cursor: pointer;">Qinyi Wang, </span><span class="author_string" final_id="IQSP-262" onclick="toggle_me()" style="cursor: pointer;">Brett Frymire, and </span><span class="author_string" final_id="IQSP-262" onclick="toggle_me()" style="cursor: pointer;">Bo Mu</span><span class="author_string" final_id="IQSP-262" onclick="toggle_me()" style="cursor: pointer;">, Omnivision Technology (United States)</span><span class="abstract_link" final_id="IQSP-262" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IQSP-262" id="abstract-IQSP-262" onclick="toggle_me()" style="display:none; cursor:pointer;">While slow motion has become a standard feature in mainstream cell phones, a fast approach without relying on specific training datasets to assess slow motion video quality is not available. Conventionally, researchers evaluate their algorithms with peak signal-to-noise ratio (PSNR) or structural similarity index measure (SSIM) between ground-truth and reconstructed frames. But they are both global evaluation index and more sensitive to noise or distortion brought by the interpolation. For video interpolation, especially for fast moving objects, motion blur as well as ghost problem are more essential to the audience subjective judgment. How to achieve a proper evaluation for Video Frame Interpolation (VFI) task is still a problem that is not well addressed.</p> <p> </p> <p class="presentation_time" style="text-align:left;">12:10<a name="IQSP-263"></a><a name="IQSP-263"></a><span style="float: right;">IQSP-263</span> <br> <span class="presentation_title" final_id="IQSP-263" onclick="toggle_me()" style="cursor: pointer;">Subjective video quality for 4K HDR-WCG content using a browser-based approach for “at-home” testing, </span><span class="author_string" final_id="IQSP-263" onclick="toggle_me()" style="cursor: pointer;">Lukáš Krasula<sup>1</sup>, </span><span class="author_string" final_id="IQSP-263" onclick="toggle_me()" style="cursor: pointer;">Anustup Choudhury<sup>2</sup>, </span><span class="author_string" final_id="IQSP-263" onclick="toggle_me()" style="cursor: pointer;">Scott Daly<sup>2</sup>, </span><span class="author_string" final_id="IQSP-263" onclick="toggle_me()" style="cursor: pointer;">Zhi Li<sup>1</sup>, </span><span class="author_string" final_id="IQSP-263" onclick="toggle_me()" style="cursor: pointer;">Robin Atkins<sup>2</sup>, </span><span class="author_string" final_id="IQSP-263" onclick="toggle_me()" style="cursor: pointer;">Ludovic Malfait<sup>2</sup>, and </span><span class="author_string" final_id="IQSP-263" onclick="toggle_me()" style="cursor: pointer;">Aditya Mavlankar<sup>1</sup></span><span class="author_string" final_id="IQSP-263" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Netflix, Inc. and <sup>2</sup>Dolby Laboratories, Inc. (United States)</span><span class="abstract_link" final_id="IQSP-263" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="IQSP-263" id="abstract-IQSP-263" onclick="toggle_me()" style="display:none; cursor:pointer;">A subjective quality study of 4K HDR-WCG (3840 x 2160, High Dynamic Range, Wide Color Gamut) video content was performed in an at-home scenario. There are no available datasets on such content, yet they are crucial for objective quality metrics development and testing. While at-home testing generally implies lack of calibration, we sought to maximize calibration by limiting the displays to a specific model of TV that we have calibrated in our lab and have found that unit to unit deviations are small. Moreover, we performed the experiment in the Dolby Vision mode (where the various enhancements of the TV are turned OFF by default). In addition, we asked subjects to go through procedures to ensure a standard viewing distance of 1.6 picture heights, and to eliminate ambient lighting effects on display contrast by viewing in dark or dim conditions. A browser approach was used which took control of the TV, and ensure the content was viewed at the native resolution of the TV (e.g., dot-on-dot mode). Particular care was given to content selection to probe specific challenge cases of the display behavior as well as human vision (e.g., complex motion effects on eye tracking). Further, several clips were selected that represent the highest quality possible with 2021 technology. We have found the subject response variability was like lab-based experiments, suggesting the noise in the results due to display variability and lack of unit-to-unit calibration, was less than the within-subject variability due to personal physiology or preferences. Several statistical models and subject-rejection strategies will be compared and the usefulness of the data for objective metrics will be presented.</p> <p> </p> <br> <br> <p class="event_time">12:30 – 2:00 PM Lunch</p> <div class="pinkcallout"> <p class="session_title">Tuesday 17 January PLENARY: Embedded Gain Maps for Adaptive Display of High Dynamic Range Images</p> <span class="chair">Session Chair: Robin Jenkin, NVIDIA Corporation (United States)<br> </span> <span class="session_time">2:00 PM – 3:00 PM</span> <br> <span class="room">Cyril Magnin I/II/III<br> </span> <span></span> <p class="session_notes">Images optimized for High Dynamic Range (HDR) displays have brighter highlights and more detailed shadows, resulting in an increased sense of realism and greater impact. However, a major issue with HDR content is the lack of consistency in appearance across different devices and viewing environments. There are several reasons, including varying capabilities of HDR displays and the different tone mapping methods implemented across software and platforms. Consequently, HDR content authors can neither control nor predict how their images will appear in other apps.</p> <span></span> <p class="session_notes">We present a flexible system that provides consistent and adaptive display of HDR images. Conceptually, the method combines both SDR and HDR renditions within a single image and interpolates between the two dynamically at display time. We compute a Gain Map that represents the difference between the two renditions. In the file, we store a Base rendition (either SDR or HDR), the Gain Map, and some associated metadata. At display time, we combine the Base image with a scaled version of the Gain Map, where the scale factor depends on the image metadata, the HDR capacity of the display, and the viewing environment. </p> <br> <span></span> <p class="session_notes"> </p> <span class="author_string"><strong>Eric Chan, </strong>Fellow, Adobe Inc. (United States)<span class="author_string"></span></span> <p> </p> <span></span> <p class="session_notes">Eric Chan is a Fellow at Adobe, where he develops software for editing photographs. Current projects include Photoshop, Lightroom, Camera Raw, and Digital Negative (DNG). When not writing software, Chan enjoys spending time at his other keyboard, the piano. He is an enthusiastic nature photographer and often combines his photo activities with travel and hiking.</p> <br> <span class="author_string"><strong>Paul M. Hubel, </strong>director of Image Quality in Software Engineering, Apple Inc. (United States)<span class="author_string"></span></span> <p> </p> <span></span> <p class="session_notes">Paul M. Hubel is director of Image Quality in Software Engineering at Apple. He has worked on computational photography and image quality of photographic systems for many years on all aspects of the imaging chain, particularly for iPhone. He trained in optical engineering at University of Rochester, Oxford University, and MIT, and has more than 50 patents on color imaging and camera technology. Hubel is active on the ISO-TC42 committee Digital Photography, where this work is under discussion, and is currently a VP on the IS&T Board. Outside work he enjoys photography, travel, cycling, coffee roasting, and plays trumpet in several bay area ensembles.</p> </div> <br> <p class="event_time">3:00 – 3:30 PM Coffee Break</p> <p class="session_title">Computational Models of Vision (T3)</p> <span class="chair_label">Session Chair: </span> <span class="chair">Rafal Mantiuk, University of Cambridge (United Kingdom)<br> </span> <span class="session_time">3:30 – 4:50 PM</span> <br> <span class="room">Cyril Magnin I<br> </span> <p class="presentation_time" style="text-align:left;">3:30<a name="HVEI-246"></a><span style="float: right;">HVEI-246</span> <br> <span class="presentation_title" final_id="HVEI-246" onclick="toggle_me()" style="cursor: pointer;">Modelling contrast sensitivity of discs, </span><span class="author_string" final_id="HVEI-246" onclick="toggle_me()" style="cursor: pointer;">Maliha Ashraf<sup>1</sup>, </span><span class="author_string" final_id="HVEI-246" onclick="toggle_me()" style="cursor: pointer;">Rafal Mantiuk<sup>2</sup>, and </span><span class="author_string" final_id="HVEI-246" onclick="toggle_me()" style="cursor: pointer;">Alexandre Chapiro<sup>3</sup></span><span class="author_string" final_id="HVEI-246" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>University of Liverpool (United Kingdom), <sup>2</sup>University of Cambridge (United Kingdom), and <sup>3</sup>Meta (United States)</span><span class="abstract_link" final_id="HVEI-246" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-246" id="abstract-HVEI-246" onclick="toggle_me()" style="display:none; cursor:pointer;">Spatial and temporal contrast sensitivity is typically measured using different stimuli. Gabor patterns are used to measure spatial contrast sensitivity and flickering discs are used for temporal contrast sensitivity. The data from both types of studies is difficult to compare as there is no well-established relationship between the sensitivity to disc and Gabor patterns. The goal of this work is to propose a model that can predict the contrast sensitivity of a disc using the more commonly available data and models for Gabors. To that end, we measured the contrast sensitivity for discs of different sizes, shown at different luminance levels, and for both achromatic and chromatic (isoluminant) contrast. We used this data to compare 6 different models, each of which tested a different hypothesis on the detection and integration mechanisms of disc contrast. The results indicate that multiple detectors contribute to the perception of disc stimuli, and each can be modelled either using an energy model, or the peak spatial frequency of the contrast sensitivity function.</p> <p> </p> <p class="presentation_time" style="text-align:left;">3:50<a name="HVEI-247"></a><span style="float: right;">HVEI-247</span> <br> <span class="presentation_title" final_id="HVEI-247" onclick="toggle_me()" style="cursor: pointer;">An intrinsic image network evaluated as a model of human lightness perception, </span><span class="author_string" final_id="HVEI-247" onclick="toggle_me()" style="cursor: pointer;">Richard F. Murray<sup>1</sup>, </span><span class="author_string" final_id="HVEI-247" onclick="toggle_me()" style="cursor: pointer;">David H. Brainard<sup>2</sup>, </span><span class="author_string" final_id="HVEI-247" onclick="toggle_me()" style="cursor: pointer;">Alban Flachot<sup>1</sup>, and </span><span class="author_string" final_id="HVEI-247" onclick="toggle_me()" style="cursor: pointer;">Jaykishan Y. Patel<sup>1</sup></span><span class="author_string" final_id="HVEI-247" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>York University (Canada) and <sup>2</sup>University of Pennsylvania (United States)</span><span class="abstract_link" final_id="HVEI-247" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-247" id="abstract-HVEI-247" onclick="toggle_me()" style="display:none; cursor:pointer;">We evaluate a recent artificial neural network architecture (InverseRenderNet) in a lightness matching task. We use supervised learning to train the network to map luminance to albedo, using 100,000 images of scenes of cluttered geometric objects, rendered in Blender. Using Thouless ratios to quantify lightness constancy, we find that the network has human-like levels of partial constancy (Thouless ratios around 0.70). Also like human observers, the network's log reflectance matches are a linear function of log illuminance. To provide context, we evaluate three other current computational models of lightness/brightness in the same tasks (ODOG, Dakin-Bex, and retinex). All three models show much lower levels of lightness constancy (Thouless ratios around 0.10), and largely match luminance instead of albedo. Thus we find interesting similarities between InverseRenderNet's behaviour and human lightness perception, and advantages over competing computational models. We potential obstacles and future directions for using neural networks as models of human lightness perception.</p> <p> </p> <p class="presentation_time" style="text-align:left;">4:10<a name="HVEI-248"></a><span style="float: right;">HVEI-248</span> <br> <span class="presentation_title" final_id="HVEI-248" onclick="toggle_me()" style="cursor: pointer;">Are unique hues defined by complementary color pairings rather than opponent processes?, </span><span class="author_string" final_id="HVEI-248" onclick="toggle_me()" style="cursor: pointer;">Christopher W. Tyler</span><span class="author_string" final_id="HVEI-248" onclick="toggle_me()" style="cursor: pointer;">, The Smith-Kettlewell Eye Research Institute (United States)</span><span class="abstract_link" final_id="HVEI-248" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-248" id="abstract-HVEI-248" onclick="toggle_me()" style="display:none; cursor:pointer;">The current consensus is that there are four unique hues, red, green, blue, and yellow, and that their opponent pairings define the metric of color space. This construct, however, has the problem that the neutral point of the R/G opponency is yellow while that for B/Y opponency is white, making them a complementary pair. The complementary pairings for the red and green extrema in CIE (linear) color space are cyan and magenta, respectively, as confirmed by color afterimage settings. The current extended gamut for visual displays make it clear that these latter two colors have the distinctness properties of unique hues to about the same degree as yellow. It is therefore proposed that these two colors join yellow as three secondary unique hues, with the three extremes of the CIE color space (red, green, and blue) defining the primary unique hues of which they are the complements. Thus, this revised scheme recognizes six unique hues, corresponding to a meld of the RGB and CMYK color primaries.</p> <p> </p> <p class="presentation_time" style="text-align:left;">4:30<a name="HVEI-249"></a><span style="float: right;">HVEI-249</span> <br> <span class="presentation_title" final_id="HVEI-249" onclick="toggle_me()" style="cursor: pointer;">Natural scene statistics and distance perception: ground surface and non-ground objects (JPI-first), </span><span class="author_string" final_id="HVEI-249" onclick="toggle_me()" style="cursor: pointer;">Xavier Morin Duchesne and </span><span class="author_string" final_id="HVEI-249" onclick="toggle_me()" style="cursor: pointer;">Michael Langer</span><span class="author_string" final_id="HVEI-249" onclick="toggle_me()" style="cursor: pointer;">, McGill University (Canada)</span><span class="abstract_link" final_id="HVEI-249" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-249" id="abstract-HVEI-249" onclick="toggle_me()" style="display:none; cursor:pointer;">Both natural scene statistics and ground surfaces have been shown to play important roles in visual perception, in particular, in the perception of distance. Yet, there have been surprisingly few studies looking at the natural statistics of distances to the ground, and the studies that have been done used a loose definition of ground. Additionally, perception studies investigating the role of the ground surface typically use artificial scenes containing perfectly flat ground surfaces with relatively few non-ground objects present, whereas ground surfaces in natural scenes are typically non-planar and have a large number of non-ground objects occluding the ground. Our study investigates the distance statistics of a large number of natural scenes across three datasets, with the goal of separately analyzing the ground surface and non-ground objects. We used a recent filtering method to partition LiDAR-acquired 3D point clouds into ground points and non-ground points. We then examined the way in which distance distributions depend on distance, viewing elevation angle, and simulated viewing height. We found, first, that the distance distribution of ground points shares some similarities with that of a perfectly flat plane, namely with a sharp peak at a near distance that depends on viewing height, but also some differences. Second, we also found that the distribution of non-ground points is flatter and did not vary with viewing height. Third, we found that the proportion of non-ground points increases with viewing elevation angle. Our findings provide further insight into the statistical information available for distance perception in natural scenes, and suggest that studies of distance perception should consider a broader range of ground surfaces and object distributions than what has been used in the past in order to better reflect the statistics of natural scenes.</p> <p> </p> <br> <br> <p class="session_title">DISCUSSION: Tuesday End of Day (T4)</p> <span class="chair_label">Session Chair: </span> <span class="chair">Damon Chandler, Ritsumeikan University (Japan)<br> </span> <span class="session_time">4:50 – 5:30 PM</span> <br> <span class="room">Cyril Magnin I </span> <br> <span></span> <p class="session_notes">Please join us for a lively discussion of today's presentations. Participate in an interactive, moderated discussion, where key topics and questions are discussed from many perspectives, reflecting the diverse HVEI community.</p> <br> <br> <br> <p class="event_time">5:30 – 7:00 PM EI 2023 Symposium Demonstration Session (in the Cyril Magnin Foyer)</p> <p class="date">Wednesday 18 January 2023</p> <div class="callout"> <p class="session_title">KEYNOTE: AR/VR Special Session 1 (W1)<img alt="" class="flag_image" src="http://www.imaging.org/images/IST_Images/Conferences/EI/Joint-Session.png" style="vertical-align: middle; margin-left:1em;"></p> <span class="chair">Session Chair: Alexandre Chapiro, Meta (United States)<br> </span><span class="session_time">9:05 – 10:10 AM</span> <br> <span class="room">Cyril Magnin II </span> <br> <span></span> <p class="session_notes">This session is jointly sponsored by: Engineering Reality of Virtual Reality 2023, Human Vision and Electronic Imaging 2023, and Stereoscopic Displays and Applications XXXIV. </p> <br> <p class="presentation_time" style="text-align:left;"> <br> <span class="presentation_title">Joint Conference Welcome</span> </p> <p class="presentation_time" style="text-align:left;"> <a name="HVEI-219"></a><a name="HVEI-219"></a><span style="float: right;">HVEI-219</span> <br> <span class="presentation_title" final_id="HVEI-219" onclick="toggle_me()" style="cursor: pointer;">KEYNOTE: Display consideration for AR/VR systems, </span><span class="author_string" final_id="HVEI-219" onclick="toggle_me()" style="cursor: pointer;">Ajit Ninan</span><span class="author_string" final_id="HVEI-219" onclick="toggle_me()" style="cursor: pointer;">, Reality Labs at Meta (United States)</span><span class="abstract_link" final_id="HVEI-219" onclick="toggle_me()"> [view abstract] </span></p> <p class="session_notes"> </p> <p class="session_notes">Ajit Ninan is a display industry veteran and led the way to the industry adopting HDR. His inventions & innovations are manifest in millions of shipped HDR TV’s and consumer electronics from multiple companies. He holds 400+ granted patents in imaging and display technology and now works in imaging related to AR/VR at Meta as Senior Director of Applied Perceptual Science and Image Quality. His work spans multiple subjects ranging from Displays, Imaging, Color, Video, Compression, Audio and Networking. His career spans early start-ups to public companies. Ninan is the inventor of the local dimmed quantum dot TV and led the way to the industry adoption of quantum dot displays by working with Vizio, Nanosys and 3M to release the first of its kind R-series QD TV with HDR. He also led the effort with the JPEG committee to standardize JPEG-XT to enable JPEG HDR images. Ninan was inducted as a SMPTE Fellow for his contributions to imaging and standards. The display that caused the world to adopt HDR called the “Pulsar” capable of 4000nits down to .005nits with P3 color in 2010, built by Ninan and his team, has received many awards including the Advanced Imaging Society’s Lumiere award which enabled the development of Dolby Vision and earned Ninan an Emmy.</p> <p class="abstract" final_id="HVEI-219" id="abstract-HVEI-219" onclick="toggle_me()" style="display:none; cursor:pointer;">AR and VR displays must take into consideration human perception and image quality factors that are required for a product. At Meta, we study these perceptual factors and determine what quality targets and requirements are needed. This talk will discuss some of these aspects and highlight examples of our process that help us set direction. The presenter, Ajit Ninan, is the director of Engineering, Display and Optics, at Meta.</p> <p> </p> </div> <br> <p class="event_time">10:00 AM – 3:30 PM Industry Exhibition - Wednesday (in the Cyril Magnin Foyer)</p> <p class="event_time">10:20 – 10:50 AM Coffee Break</p> <br> <br> <p class="session_title">AR/VR Special Session 2 (W2)<img alt="" class="flag_image" src="http://www.imaging.org/images/IST_Images/Conferences/EI/Joint-Session.png" style="vertical-align: middle; margin-left:1em;"></p> <span class="chair_label">Session Chairs: </span> <span class="chair">Nicko Caluya, Ritsumeikan University (Japan) and Alexandre Chapiro, Meta (United States)<br> </span> <span class="session_time">10:50 AM – 12:30 PM</span> <br> <span class="room">Cyril Magnin II </span> <br> <span></span> <p class="session_notes">This session is jointly sponsored by: Engineering Reality of Virtual Reality 2023, Human Vision and Electronic Imaging 2023, and Stereoscopic Displays and Applications XXXIV.</p> <br> <p class="presentation_time" style="text-align:left;">10:50<a name="HVEI-220"></a><a name="HVEI-220"></a><span style="float: right;">HVEI-220</span> <br> <span class="presentation_title" final_id="HVEI-220" onclick="toggle_me()" style="cursor: pointer;">Comparison of AR and VR memory palace quality in second-language vocabulary acquisition (Invited), </span><span class="author_string" final_id="HVEI-220" onclick="toggle_me()" style="cursor: pointer;">Xiaoyang Tian, </span><span class="author_string" final_id="HVEI-220" onclick="toggle_me()" style="cursor: pointer;">Nicko Caluya, and </span><span class="author_string" final_id="HVEI-220" onclick="toggle_me()" style="cursor: pointer;">Damon M. Chandler</span><span class="author_string" final_id="HVEI-220" onclick="toggle_me()" style="cursor: pointer;">, Ritsumeikan University (Japan)</span><span class="abstract_link" final_id="HVEI-220" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-220" id="abstract-HVEI-220" onclick="toggle_me()" style="display:none; cursor:pointer;">The method of loci (memory palace technique) is a learning strategy that uses visualizations of spatial environments to enhance memory. One particularly popular use of the method of loci is for language learning, in which the method can help long-term memory of vocabulary by allowing users to associate location and other spatial information with particular words/concepts, thus making use of spatial memory to assist memory typically associated with language. Augmented reality (AR) and virtual reality (VR) have been shown to potentially provide even better memory enhancement due to their superior visualization abilities. However, a direct comparison of the two techniques in terms of language-learning enhancement has not yet been investigated. In this presentation, we present the results of a study designed to compare AR and VR when using the method of loci for learning vocabulary from a second language.</p> <p> </p> <p class="presentation_time" style="text-align:left;">11:10<a name="HVEI-221"></a><a name="HVEI-221"></a><span style="float: right;">HVEI-221</span> <br> <span class="presentation_title" final_id="HVEI-221" onclick="toggle_me()" style="cursor: pointer;">Projection mapping for enhancing the perceived deliciousness of food (Invited), </span><span class="author_string" final_id="HVEI-221" onclick="toggle_me()" style="cursor: pointer;">Yuichiro Fujimoto</span><span class="author_string" final_id="HVEI-221" onclick="toggle_me()" style="cursor: pointer;">, Nara Institute of Science and Technology (Japan)</span><span class="abstract_link" final_id="HVEI-221" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-221" id="abstract-HVEI-221" onclick="toggle_me()" style="display:none; cursor:pointer;">The perceived deliciousness of a food item is highly related to its appearance. Image processing has been widely used to make food images more appealing to the public, such as when capturing and posting images on social networking sites. In this research, I propose a system to enhance the degree of subjective deliciousness of food visually perceived by a person by automatically changing its appearance with spatial augmented reality (SAR) technique in a real environment. The relationship between the degree of subjective deliciousness and four appearance features for each food category is modeled using data gathered via a crowdsourcing-based questionnaire. Using this model, the system generates the appropriate projection image to increase the deliciousness of the food. Experiments verify that the system can actually change and improve the impression of the target food’s deliciousness.</p> <p> </p> <p class="presentation_time" style="text-align:left;">11:30<a name="HVEI-222"></a><a name="HVEI-222"></a><span style="float: right;">HVEI-222</span> <br> <span class="presentation_title" final_id="HVEI-222" onclick="toggle_me()" style="cursor: pointer;">Real-time imaging processing for low-vision users, </span><span class="author_string" final_id="HVEI-222" onclick="toggle_me()" style="cursor: pointer;">Yang Cai</span><span class="author_string" final_id="HVEI-222" onclick="toggle_me()" style="cursor: pointer;">, CMU (United States)</span><span class="abstract_link" final_id="HVEI-222" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-222" id="abstract-HVEI-222" onclick="toggle_me()" style="display:none; cursor:pointer;">We have developed an assistive technology for people with vision disabilities of central field loss (CFL) and low contrast sensitivity (LCS). Our technology includes a pair of holographic AR glasses with enhanced image magnification and contrast, for example, highlighting objects, and detecting signs, and words. In contrast to prevailing AR technologies which project either mixed reality objects or virtual objects to the glasses, Our solution fuses real-time sensory information and enhances images from reality. The AR glasses technology has two advantages: it’s relatively ‘fail-safe.” If the battery dies or the processor crashes, the glasses can still function because it is transparent. The AR glasses can also be transformed into a VR or AR simulator when it overlays virtual objects such as pedestrians or vehicles onto the glasses for simulation. The real-time visual enhancement and alert information are overlaid on the transparent glasses. The visual enhancement modules include zooming, Fourier filters, contrast enhancement, and contour overlay. Our preliminary tests with low-vision patients show that the AR glass indeed improved patients' vision and mobility, for example, from 20/80 to 20/25 or 20/30.</p> <p> </p> <p class="presentation_time" style="text-align:left;">11:50<a name="HVEI-223"></a><a name="HVEI-223"></a><span style="float: right;">HVEI-223</span> <br> <span class="presentation_title" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Critical flicker frequency (CFF) at high luminance levels, </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Alexandre Chapiro<sup>1</sup>, </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Nathan Matsuda<sup>1</sup>, </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Maliha Ashraf<sup>2</sup>, and </span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">Rafal Mantiuk<sup>3</sup></span><span class="author_string" final_id="HVEI-223" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Meta (United States), <sup>2</sup>University of Liverpool (United Kingdom), and <sup>3</sup>University of Cambridge (United Kingdom)</span><span class="abstract_link" final_id="HVEI-223" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-223" id="abstract-HVEI-223" onclick="toggle_me()" style="display:none; cursor:pointer;">The critical flicker fusion (CFF) is the frequency of changes at which a temporally periodic light will begin to appear completely steady to an observer. This value is affected by several visual factors, such as the luminance of the stimulus or its location on the retina. With new high dynamic range (HDR) displays, operating at higher luminance levels, and virtual reality (VR) displays, presenting at wide fields-of-view, the effective CFF may change significantly from values expected for traditional presentation. In this work we use a prototype HDR VR display capable of luminances up to 20,000 cd/m^2 to gather a novel set of CFF measurements for never before examined levels of luminance, eccentricity, and size. Our data is useful to study the temporal behavior of the visual system at high luminance levels, as well as setting useful thresholds for display engineering.</p> <p> </p> <p class="presentation_time" style="text-align:left;">12:10<a name="HVEI-253"></a><a name="HVEI-253"></a><span style="float: right;">HVEI-253</span> <br> <span class="presentation_title" final_id="HVEI-253" onclick="toggle_me()" style="cursor: pointer;">A multichannel LED-based lighting approach to improve color discrimination for low vision people, </span><span class="author_string" final_id="HVEI-253" onclick="toggle_me()" style="cursor: pointer;">Linna Yang<sup>1</sup>, </span><span class="author_string" final_id="HVEI-253" onclick="toggle_me()" style="cursor: pointer;">Éric Dinet<sup>1</sup>, </span><span class="author_string" final_id="HVEI-253" onclick="toggle_me()" style="cursor: pointer;">Pichayada Katemake<sup>2</sup>, </span><span class="author_string" final_id="HVEI-253" onclick="toggle_me()" style="cursor: pointer;">Alain Trémeau<sup>1</sup>, and </span><span class="author_string" final_id="HVEI-253" onclick="toggle_me()" style="cursor: pointer;">Philippe Colantoni<sup>1</sup></span><span class="author_string" final_id="HVEI-253" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>University Jean Monnet Saint-Etienne (France) and <sup>2</sup>Chulalongkorn University (Thailand)</span><span class="abstract_link" final_id="HVEI-253" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-253" id="abstract-HVEI-253" onclick="toggle_me()" style="display:none; cursor:pointer;">The population of low vision people increases continuously with the acceleration of aging society. As reported by WHO, most of this population is over the age of 50 years and 81% were not concerned by any visual problem before. A visual deficiency can dramatically affect the quality of life and challenge the preservation of a safe independent existence. This study presents a LED-based lighting approach to assist people facing an age-related visual impairment. The research procedure is based on psychophysical experiments consisting in the ordering of standard color samples. Volunteers wearing low vision simulation goggles performed such an ordering under different illumination conditions produced by a 24-channel multispectral lighting system. A filtering technique using color rendering indices coupled with color measurements allowed to objectively determine the lighting conditions providing the best scores in terms of color discrimination. Experimental results were used to combine 3 channels to produce white light inducing a stronger color perception in a low vision context than white LEDs nowadays available for general lighting. Even if further studies will be required, these first results give hope for the design of smart lighting devices that adapt to the visual needs of the visually impaired.</p> <p> </p> <br> <br> <p class="event_time">12:30 – 2:00 PM Lunch</p> <div class="pinkcallout"> <p class="session_title">Wednesday 18 January PLENARY: Bringing Vision Science to Electronic Imaging: The Pyramid of Visibility</p> <span class="chair">Session Chair: Andreas Savakis, Rochester Institute of Technology (United States)<br> </span> <span class="session_time">2:00 PM – 3:00 PM</span> <br> <span class="room">Cyril Magnin I/II/III<br> </span> <span></span> <p class="session_notes">Electronic imaging depends fundamentally on the capabilities and limitations of human vision. The challenge for the vision scientist is to describe these limitations to the engineer in a comprehensive, computable, and elegant formulation. Primary among these limitations are visibility of variations in light intensity over space and time, of variations in color over space and time, and of all of these patterns with position in the visual field. Lastly, we must describe how all these sensitivities vary with adapting light level. We have recently developed a structural description of human visual sensitivity that we call the Pyramid of Visibility, that accomplishes this synthesis. This talk shows how this structure accommodates all the dimensions described above, and how it can be used to solve a wide variety of problems in display engineering.</p> <br> <span></span> <p class="session_notes"> </p> <span class="author_string"><strong>Andrew B. Watson, </strong>chief vision scientist, Apple Inc. (United States)<span class="author_string"></span></span> <p> </p> <span></span> <p class="session_notes">Andrew Watson is Chief Vision Scientist at Apple, where he leads the application of vision science to technologies, applications, and displays. His research focuses on computational models of early vision. He is the author of more than 100 scientific papers and 8 patents. He has 21,180 citations and an h-index of 63. Watson founded the Journal of Vision, and served as editor-in-chief 2001-2013 and 2018-2022. Watson has received numerous awards including the Presidential Rank Award from the President of the United States.</p> </div> <br> <p class="event_time">3:00 – 3:30 PM Coffee Break</p> <div class="callout"> <p class="session_title">PANEL: AR/VR Special Session (W3.1)<img alt="" class="flag_image" src="http://www.imaging.org/images/IST_Images/Conferences/EI/Joint-Session.png" style="vertical-align: middle; margin-left:1em;"></p> <span class="chair">Session Chairs: Nicko Caluya, Ritsumeikan University (Japan) and Alexandre Chapiro, Meta (United States)<br> </span><span class="chair">Panelists: Alexandre Chapiro, Meta (United States); Yuichiro Fujimoto, Nara Institute of Science and Technology (Japan); Nicolas Holliman, King's College London (United Kingdom); and Ajit Ninan, Reality Labs at Meta (United States)<br> </span><span class="session_time">3:30 – 4:50 PM</span> <br> <span class="room">Cyril Magnin II </span> <br> <span></span> <p class="session_notes">This session is jointly sponsored by: Engineering Reality of Virtual Reality 2023, Human Vision and Electronic Imaging 2023, and Stereoscopic Displays and Applications XXXIV.</p> </div> <br> <br> <br> <p class="session_title">DISCUSSION: Wednesday End of Joint Sessions (W3.2)<img alt="" class="flag_image" src="http://www.imaging.org/images/IST_Images/Conferences/EI/Joint-Session.png" style="vertical-align: middle; margin-left:1em;"></p> <span class="chair_label">Session Chair: </span> <span class="chair">Damon Chandler, Ritsumeikan University (Japan)<br> </span> <span class="session_time">4:50 – 5:30 PM</span> <br> <span class="room">Cyril Magnin II </span> <br> <span></span> <p class="session_notes">This session is jointly sponsored by: Engineering Reality of Virtual Reality 2023, Human Vision and Electronic Imaging 2023, and Stereoscopic Displays and Applications XXXIV.</p> <p class="session_notes">Please join us for a lively discussion of today's presentations. Participate in an interactive, moderated discussion, where key topics and questions are discussed from many perspectives, reflecting the diverse HVEI community.</p> <br> <br> <p class="event_time">5:30 – 7:00 PM EI 2023 Symposium Interactive (Poster) Paper Session (in the Cyril Magnin Foyer)</p> <p class="event_time">5:30 – 7:00 PM EI 2023 Meet the Future: A Showcase of Student and Young Professionals Research (in the Cyril Magnin Foyer)</p> <div class="callout"> <p class="session_title">BANQUET: 2023 Friends of HVEI (W5)</p> <span class="chair">Session Chairs: Damon Chandler, Ritsumeikan University (Japan) and Rafal Mantiuk, University of Cambridge (United Kingdom)<br> </span><span class="session_time">7:00 – 10:00 PM</span> <br> <span class="room">MISSION I/II/III </span> <br> <span></span> <p class="session_notes">Join us for a wonderful evening of conversations, a banquet dinner, and an enlightening speaker. This banquet is associated with the Human Vision and Electronic Imaging Conference (HVEI), but everyone interested in research at the intersection of human perception/cognition, imaging technologies, and art is welcome. Banquet registration required, online or at the registration desk. Location will be provided with registration. </p> <br> <p class="presentation_time" style="text-align:left;"> <a name="HVEI-250"></a><span style="float: right;">HVEI-250</span> <br> <span class="presentation_title" final_id="HVEI-250" onclick="toggle_me()" style="cursor: pointer;">KEYNOTE: How to let your pictures shine! The impact of high dynamic range imaging on photography, </span><span class="author_string" final_id="HVEI-250" onclick="toggle_me()" style="cursor: pointer;">Timo Kunkel</span><span class="author_string" final_id="HVEI-250" onclick="toggle_me()" style="cursor: pointer;">, Dolby Laboratories, Inc. (United States)</span><span class="abstract_link" final_id="HVEI-250" onclick="toggle_me()"> [view abstract] </span></p> <p class="session_notes">Dr. Timo Kunkel is director of image technology & standards in the CTO office of Dolby Laboratories, Inc. His fields of expertise include image processing, color science, high dynamic range imaging, color appearance modeling, and advanced display technologies. Kunkel is engaged in developing color management models for both professional and consumer displays (dynamic range and gamut mapping concepts). This involves active research, code development and QA as well as applying metrological and psychophysical concepts for verification, icluding picture quality assessment and tuning for several display technologies from customers all over the world. Additionally, he has experience in neuroscience and psychological concepts related to the Human Visual System (signal processing in the retina and higher visual cortex), and has been involved in developing the core concepts of what is now Dolby Vision. Kunkel is also actively involved with international standards work, serving as technical expert and member of IEC TC100 (Audio, video and multimedia systems and equipment) and TC110 (Electronic displays), the International Color Consortium (ICC), as well as the SID International Committee of Display Metrology (ICDM). Further, Kunkel has a background in Physical Geosciences (remote sensing and geospatial image processing, GIS, Vegetation- and Ecosystem Modeling) and has worked in these fields with research departments at Lund University in Sweden, Lincoln University in New Zealand, and the University of Dar es Salaam in Tanzania. This work is supported by more than 20 years of experience as a freelance landscape and architecture photographer for clients in Europe and the US, winning several prizes with images combining HDR and computational photography aspects. Kunkel served as president of Bristol Chapter, ACM SIGGRAPHACM SIGGRAPH, 2006 - 2008, and was co-founder of the Bruder & Bär publishing company (Germany), serving there as Art Director, 2003 - 2006. Kunkel holds a PhD in computer science from University of Bristol, United Kingdom, and a MSc from University of Freiburg, Germany.</p> <p class="abstract" final_id="HVEI-250" id="abstract-HVEI-250" onclick="toggle_me()" style="display:none; cursor:pointer;">High-dynamic range imaging, better known by its acronym “HDR”, has established itself as a foundational component when looking at the aspects defining today’s image fidelity. Together with the availability of wide color gamut (WCG) approaches, HDR has influenced and shaped both the technical tools and the creative means of photography. This talk will touch on the intersection of HDR technologies and the artistic expression it enables, from scene lighting and composition via camera capture and processing, to print and display.</p> <p> </p> </div> <br> <p class="date">Thursday 19 January 2023</p> <br> <br> <p class="session_title">Creative Intent and Perception in Visualization and Displays (R1)</p> <span class="chair_label">Session Chair: </span> <span class="chair">Damon Chandler, Ritsumeikan University (Japan)<br> </span> <span class="session_time">9:30 – 10:10 AM</span> <br> <span class="room">Mission I/II<br> </span> <p class="presentation_time" style="text-align:left;">9:30<a name="HVEI-251"></a><span style="float: right;">HVEI-251</span> <br> <span class="presentation_title" final_id="HVEI-251" onclick="toggle_me()" style="cursor: pointer;">Am I safe? An examination of how everyday people interpret covid data visualizations, </span><span class="author_string" final_id="HVEI-251" onclick="toggle_me()" style="cursor: pointer;">Bernice Rogowitz<sup>1</sup> and </span><span class="author_string" final_id="HVEI-251" onclick="toggle_me()" style="cursor: pointer;">Paul Borrel<sup>2</sup></span><span class="author_string" final_id="HVEI-251" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Visual Perspectives (United States) and <sup>2</sup>consultant (France)</span><span class="abstract_link" final_id="HVEI-251" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-251" id="abstract-HVEI-251" onclick="toggle_me()" style="display:none; cursor:pointer;">During these past years, international COVID data have been collected by several reputable organizations and made available to the worldwide community. This has resulted in a wellspring of different visualizations. Many different measures can be selected (e.g., cases, deaths, hospitalizations). And for each measure, designers and policy makers can make a myriad of different choices of how to represent the data. Data from individual countries may be presented on linear or log scales, daily, weekly, or cumulative, alone or in the context of other countries, scaled to a common grid, or scaled to their own range, raw or per capita, etc. It is well known that the data representation can influence the interpretation of data. But, what visual features in these different representations affect our judgments? To explore this idea, we conducted an experiment where we asked participants to look at time-series data plots and assess how safe they would feel if they were traveling to one of the countries represented, and how confident they are of their judgment. Observers rated 48 visualizations of the same data, rendered differently along 6 controlled dimensions. Our initial results provide insight into how characteristics of the visual representation affect human judgments of time series data. We also discuss how these results could impact how public policy and news organizations choose to represent data to the public.</p> <p> </p> <p class="presentation_time" style="text-align:left;">9:50<a name="HVEI-254"></a><span style="float: right;">HVEI-254</span> <br> <span class="presentation_title" final_id="HVEI-254" onclick="toggle_me()" style="cursor: pointer;">Biosensors for landing creative intent, </span><span class="author_string" final_id="HVEI-254" onclick="toggle_me()" style="cursor: pointer;">Scott Daly, </span><span class="author_string" final_id="HVEI-254" onclick="toggle_me()" style="cursor: pointer;">Evan Gitterman, </span><span class="author_string" final_id="HVEI-254" onclick="toggle_me()" style="cursor: pointer;">Dan Darcy, and </span><span class="author_string" final_id="HVEI-254" onclick="toggle_me()" style="cursor: pointer;">Shane Ruggieri</span><span class="author_string" final_id="HVEI-254" onclick="toggle_me()" style="cursor: pointer;">, Dolby Laboratories, Inc. (United States)</span><span class="abstract_link" final_id="HVEI-254" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-254" id="abstract-HVEI-254" onclick="toggle_me()" style="display:none; cursor:pointer;">The motivation for use of biosensors in audiovisual media is made by highlighting problem of signal loss due to wide variability in playback devices. A metadata system that allows creatives to steer signal modifications as a function of audience emotion and cognition as determined by biosensor analysis.</p> <p> </p> <br> <br> <p class="event_time">10:20 – 10:50 AM Coffee Break</p> <p class="session_title">EEG/fMRI/Retina (R2)</p> <span class="chair_label">Session Chair: </span> <span class="chair">Bernice Rogowitz, Visual Perspectives (United States)<br> </span> <span class="session_time">10:50 – 11:50 AM</span> <br> <span class="room">Mission I/II<br> </span> <p class="presentation_time" style="text-align:left;">10:50<a name="HVEI-255"></a><span style="float: right;">HVEI-255</span> <br> <span class="presentation_title" final_id="HVEI-255" onclick="toggle_me()" style="cursor: pointer;">Self-regulation of attentional stance facilitates induction of meditative states, </span><span class="author_string" final_id="HVEI-255" onclick="toggle_me()" style="cursor: pointer;">Glenn Hartelius<sup>1,</sup><sup>2</sup>, </span><span class="author_string" final_id="HVEI-255" onclick="toggle_me()" style="cursor: pointer;">Lora T. Likova<sup>3</sup>, and </span><span class="author_string" final_id="HVEI-255" onclick="toggle_me()" style="cursor: pointer;">Christopher W. Tyler<sup>3</sup></span><span class="author_string" final_id="HVEI-255" onclick="toggle_me()" style="cursor: pointer;">; <sup>1</sup>Alef Trust, <sup>2</sup>Naropa University, and <sup>3</sup>The Smith-Kettlewell Eye Research Institute (United States)</span><span class="abstract_link" final_id="HVEI-255" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-255" id="abstract-HVEI-255" onclick="toggle_me()" style="display:none; cursor:pointer;">This study is focused on the novel concept of the origin or seat of the attentional spotlight, the bodily location at which the attended information is felt to impinge. Existing research on the seat of attention, also described as <i>self-location </i>or <i>egocenter</i>, shows that it can be situated in various ways within the experienced body space (Hanley et al., 2020), and that differences in its location have measurable impact on cognitive skill, emotional temperament, and self-construal, as well as social and moral attitudes (Adam et al., 2015; Fetterman et al., 2020; Fetterman & Robinson, 2013). A recent study by Hartelius et al. (2022) showed that this aspect of attention can be volitionally self-regulated into various internal attentional stances, and that these stances are relatively stable as demonstrated by robust within-subject inter-run correlations of EEG-measured patterns of brain activation for each stance; trials with 8 participants showed that most stances were associated with a unique cortical activation pattern in one or more frequency bands. This study also demonstrated that some attentional stances—that is, locations of the seat of attention—can be objectively associated with specific positive emotional states, suggesting that control of attentional stance should provide direct management of specific cognitive and emotional resources. This suggestion is supported by an earlier study with endurance athletes demonstrating that a discrete attentional stance was associated with each of two tasks: a) reading a news story, and b) experiences of a flow state during athletic endurance practice (Hartelius, 2015; Marolt-Sender, 2014).</p> <p> </p> <p class="presentation_time" style="text-align:left;">11:10<a name="HVEI-256"></a><span style="float: right;">HVEI-256</span> <br> <span class="presentation_title" final_id="HVEI-256" onclick="toggle_me()" style="cursor: pointer;">Spatial cognition training rapidly induces cortical plasticity in blind navigation, </span><span class="author_string" final_id="HVEI-256" onclick="toggle_me()" style="cursor: pointer;">Lora T. Likova, </span><span class="author_string" final_id="HVEI-256" onclick="toggle_me()" style="cursor: pointer;">Zhangziyi Zhou, </span><span class="author_string" final_id="HVEI-256" onclick="toggle_me()" style="cursor: pointer;">Michael Liang, and </span><span class="author_string" final_id="HVEI-256" onclick="toggle_me()" style="cursor: pointer;">Christopher W. Tyler</span><span class="author_string" final_id="HVEI-256" onclick="toggle_me()" style="cursor: pointer;">, The Smith-Kettlewell Eye Research Institute (United States)</span><span class="abstract_link" final_id="HVEI-256" onclick="toggle_me()"> [view abstract] </span></p> <p class="abstract" final_id="HVEI-256" id="abstract-HVEI-256" onclick="toggle_me()" style="display:none; cursor:pointer;">Successful navigation requires spatial cognition abilities, primarily the development of an accurate and flexible mental, or cognitive, map of the navigational space and of the route trajectory required to travel to the target location. To train the spatial cognition abilities and spatial memory underlying successful navigation, we translated the power of the Likova Cognitive-Kinesthetic Rehabilitation Training, initially developed for the manual domain of operation, to the domain of navigation. In the tasks requiring the mentally-performed navigational decision planning (planning the shortest or the reversed shortest path between newly-specified locations on a just memorized tactile map) and memory-guided motor execution of these decisions (accurate drawing the respective planned paths), the most significant brain activation increase was found in the two medial posterior cortical regions (DLPFC, insula), in contrast to a very little change in the lateral anterior regions (occipital V1-V4, the retrosplenial/precuneus) for most of these tasks. By extending our previous findings from the manual to the navigation domain, these results demonstrate the power of a multidisciplinary approach incorporating art, behavioral and neuroscience methodologies to drive much-needed plasticity in the adult brain.</p> <p> </p> <br> <br> <p class="event_time">12:30 – 2:00 PM Lunch</p> <p class="session_title">SFMOMA Museum Tour & Casual Dinner (R3)</p> <span class="session_time">2:00 – 8:00 PM</span> <br> <span class="room">OFFSITE - Meet at Registration </span> <br> <span></span> <p class="session_notes">Join your HVEI colleagues for an excursion to the SFMOMA after Thursday's lunch recess. Meet and depart from the EI 2023 registration desk at 2:00 pm. Visit SFMOMA 2:30 - 5:00 pm. Gather informally for dinner at 6:00 pm.</p> <br> <br> <script> function toggle_me() { var elm = event.srcElement var final_id = elm.getAttribute("final_id") var the_id = "abstract-" + final_id; var x = document.getElementById(the_id); if (x.style.display === "none"){ x.style.display = "block"; } else { x.style.display = "none"; } }</script></div><div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_ControlErrorPanel_ConferenceHeading" class="Error" style="Display:None;"> </div> </div> </div> </div> </div></div> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_3" class="rmpView rmpHidden"> <div class="ContentTabbedDisplay AddPadding"> <p class="AsiWarning">No content found</p> </div> </div><div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_4" class="rmpView rmpHidden"> <div class="ContentTabbedDisplay AddPadding"> <p class="AsiWarning">No content found</p> </div> </div><input id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage_ClientState" name="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage_ClientState" type="hidden" /> </div> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_updatePanel"> <input type="submit" name="ctl01$TemplateBody$WebPartManager1$gwpciConfCCO$ciConfCCO$refreshTrigger" value="Refresh" id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_refreshTrigger" style="display:none" /> </div> </div> <div id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_panStep"> </div> <span id="ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_debug"></span> </div> </div> </div></div> </div> </div> <div id="ctl01_TemplateBody_ContentPage2_downloadContainer" style="display:none;"> <input type="hidden" name="ctl01$TemplateBody$ContentPage2$HiddenDownloadPathField" id="ctl01_TemplateBody_ContentPage2_HiddenDownloadPathField" /><input type="submit" name="ctl01$TemplateBody$ContentPage2$downloadButton" value="Download Path" id="ctl01_TemplateBody_ContentPage2_downloadButton" style="display:none" /> </div></div> </div> </div> </div> </div> </div> </div> <div data-label="Secondary Navigation" class="col-secondary cs-right d-none"> <div ID="WTZone8_Page1" class="WTZone "> </div> </div> </div> </div> <a class="backToTop" href="#PageTop">Back to Top</a> <footer id="ft" class="footer ClearFix"> <div class="footer-content"> <div class="container" data-label="Footer 1"> <div ID="WTZone9_Page1" class="WTZone "> <div id="ste_container_ciFooterContent1" class="ContentItemContainer"> <div id="ste_container_NewContentHtml2" class="ContentItemContainer"><div class="footer-nav"> <div class="footer-col"> <ul> <li><a href="https://www.imaging.org/">IMAGING.ORG</a></li> <li><a href="/IST/Conferences/Events_Overview.aspx">Events</a></li> <li><a href="/IST/Publications/Publications_Overview.aspx" class="">Publications</a></li> <li><a href="/IST/Standards/TC42.aspx">Standards</a></li> </ul> </div> <div class="footer-col"> <ul> <li><a href="/IST/Resources/Resources_Home.aspx">RESOURCES</a></li> <li><a href="/IST/Resources/CareerCenter.aspx">Careers</a></li> <li><a href="/IST/Policies/Policies.aspx">Policies</a></li> </ul> </div> <div class="footer-col"> <ul> <li><a href="/IST/About/About.aspx">ABOUT US</a></li> <li><a href="/IST/Membership/Individual_Membership.aspx">Membership</a></li> <li><a href="/IST/About/Donations.aspx">Donate</a></li> <li><a href="/IST/About/About.aspx">Contact</a></li> </ul> </div> </div></div><div id="ste_container_FooterContent" class="ContentItemContainer"><div class="FooterTop"><div class="FooterLogo"><img src="/images/75th%20logo%20alt%20blue%20white%20bkgrnd.png" alt=""> </div> <div class="FooterSocial"> <div class="FooterSocialText"> <p>Stay Connected!</p> </div> <div class="FooterSocialImg"><a href="https://www.linkedin.com/company/society-for-imaging-science-and-technology-is&t-"><img src="/images/Icons/linkedin36blue.png" alt="" style="margin-right: 10px;"></a><a href="https://twitter.com/ImagingOrg"><img src="/images/Icons/twitter36blue.png" alt="" style=""></a></div> </div></div></div><div id="ste_container_NewContentHtml1" class="ContentItemContainer"><div class="FooterBottom"><p style="text-align: center;">© Copyright 2023 Society for Imaging Sciences and Technology. All Rights Reserved.</p></div></div><div class="ContentRecordPageButtonPanel"> </div> </div> </div> </div> </div> <div class="footer-nav-copyright"> <div class="container" role="navigation"> <div class="footer-copyright" data-label= "Footer 2"> <div ID="WTZone10_Page1" class="WTZone iPartsDisplayInlineBlock"> </div> </div> </div> </div> </footer> </div> <!--Jscript from Page.ResgisterStartupScript extention is loaded here --> <Div><script type="text/javascript">Sys.Application.add_load(function () {{ MasterPageBase_Init(); }});</script> <script type="text/javascript">Sys.Application.add_load(function() { { BreadCrumb_load('80409b89-ae6d-45a9-a9d4-96d522ff2047'); } }); </script> </Div><input name="ctl01$TemplateScripts$timeoutsoonmsg" type="hidden" id="timeoutsoonmsg" value="PGgyPllvdSBhcmUgYWJvdXQgdG8gYmUgc2lnbmVkIG91dDwvaDI+DQo8cD5Zb3Ugd2lsbCBiZSBzaWduZWQgb3V0IGluIDxzdHJvbmc+W1NlY29uZHNSZW1haW5pbmddPC9zdHJvbmc+IHNlY29uZHMgZHVlIHRvIGluYWN0aXZpdHkuIFlvdXIgY2hhbmdlcyB3aWxsIG5vdCBiZSBzYXZlZC4gVG8gY29udGludWUgd29ya2luZyBvbiB0aGUgd2Vic2l0ZSwgY2xpY2sgIlN0YXkgU2lnbmVkIEluIiBiZWxvdy48L3A+" /><input name="ctl01$TemplateScripts$timeoutsoonstaysignintxt" type="hidden" id="timeoutsoonstaysignintxt" value="U3RheSBTaWduZWQgSW4=" /><input name="ctl01$TemplateScripts$timeoutsoonlogouttxt" type="hidden" id="timeoutsoonlogouttxt" value="U2lnbiBPdXQ=" /><input name="ctl01$TemplateScripts$stayLoggedInURL" type="hidden" id="stayLoggedInURL" /><input name="ctl01$TemplateScripts$logoutUrl" type="hidden" id="logoutUrl" value="aHR0cHM6Ly93d3cuaW1hZ2luZy5vcmcvYXNpY29tbW9uL2NvbnRyb2xzL3NoYXJlZC9mb3Jtc2F1dGhlbnRpY2F0aW9uL2xvZ2luLmFzcHg/U2Vzc2lvblRpbWVvdXQ9MSZSZXR1cm5Vcmw9JTJmSVNUJTJmSVNUJTJmQ29uZmVyZW5jZXMlMmZFSSUyZkVJMjAyMyUyZkNvbmZlcmVuY2UlMmZDX0hWRUkuYXNweCUzZg==" /> <!-- Bootstrap Modal --> <div id="BootstrapModal" class="modal fade" tabindex="-1" role="dialog" aria-label="Modal" aria-hidden="true"> <div id="BootstrapDocument" class="modal-dialog modal-xl" role="document"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"> <span aria-hidden="true">×</span> </button> </div> <div class="modal-body p-0 m-0"> <iframe id="ContentFrame" class="modal-content-iframe" width="100%" height="100px" frameborder="0"></iframe> </div> </div> </div> </div> <div id="ctl01_RadAjaxManager1SU"> <span id="ctl01_RadAjaxManager1" style="display:none;"></span> </div><div id="ctl01_WindowManager1" style="display:none;"> <div id="ctl01_GenericWindow" style="display:none;"> <div id="ctl01_GenericWindow_C" style="display:none;"> </div><input id="ctl01_GenericWindow_ClientState" name="ctl01_GenericWindow_ClientState" type="hidden" /> </div><div id="ctl01_ObjectBrowser" style="display:none;"> <div id="ctl01_ObjectBrowser_C" style="display:none;"> </div><input id="ctl01_ObjectBrowser_ClientState" name="ctl01_ObjectBrowser_ClientState" type="hidden" /> </div><div id="ctl01_ObjectBrowserDialog" style="display:none;"> <div id="ctl01_ObjectBrowserDialog_C" style="display:none;"> </div><input id="ctl01_ObjectBrowserDialog_ClientState" name="ctl01_ObjectBrowserDialog_ClientState" type="hidden" /> </div><div id="ctl01_WindowManager1_alerttemplate" style="display:none;"> <div class="rwDialogPopup radalert"> <div class="rwDialogText"> {1} </div> <div> <a onclick="$find('{0}').close(true);" class="rwPopupButton" href="javascript:void(0);"> <span class="rwOuterSpan"> <span class="rwInnerSpan">##LOC[OK]##</span> </span> </a> </div> </div> </div><div id="ctl01_WindowManager1_prompttemplate" style="display:none;"> <div class="rwDialogPopup radprompt"> <div class="rwDialogText"> {1} </div> <div> <script type="text/javascript"> function RadWindowprompt_detectenter(id, ev, input) { if (!ev) ev = window.event; if (ev.keyCode == 13) { var but = input.parentNode.parentNode.getElementsByTagName("A")[0]; if (but) { if (but.click) but.click(); else if (but.onclick) { but.focus(); var click = but.onclick; but.onclick = null; if (click) click.call(but); } } return false; } else return true; } </script> <input title="Enter Value" onkeydown="return RadWindowprompt_detectenter('{0}', event, this);" type="text" class="rwDialogInput" value="{2}" /> </div> <div> <a onclick="$find('{0}').close(this.parentNode.parentNode.getElementsByTagName('input')[0].value);" class="rwPopupButton" href="javascript:void(0);" ><span class="rwOuterSpan"><span class="rwInnerSpan">##LOC[OK]##</span></span></a> <a onclick="$find('{0}').close(null);" class="rwPopupButton" href="javascript:void(0);"><span class="rwOuterSpan"><span class="rwInnerSpan">##LOC[Cancel]##</span></span></a> </div> </div> </div><div id="ctl01_WindowManager1_confirmtemplate" style="display:none;"> <div class="rwDialogPopup radconfirm"> <div class="rwDialogText"> {1} </div> <div> <a onclick="$find('{0}').close(true);" class="rwPopupButton" href="javascript:void(0);" ><span class="rwOuterSpan"><span class="rwInnerSpan">##LOC[OK]##</span></span></a> <a onclick="$find('{0}').close(false);" class="rwPopupButton" href="javascript:void(0);"><span class="rwOuterSpan"><span class="rwInnerSpan">##LOC[Cancel]##</span></span></a> </div> </div> </div><input id="ctl01_WindowManager1_ClientState" name="ctl01_WindowManager1_ClientState" type="hidden" /> </div> <script type="text/javascript"> //<![CDATA[ var gCartCount; var cartDiv = $get("CartItemCount"); if (cartDiv != null){ jQuery.ajax({ type: "POST", url: gWebRoot + "/WebMethodUtilities.aspx/GetCartItemCount", data: "{}", contentType: "application/json; charset=utf-8", dataType: 'json', success: function(result) { if (result.d != '' && result.d != null) { gCartCount = result.d; if (gCartCount != null) { cartDiv.innerHTML = gCartCount; } } }, async: true }); } function CheckForChildren() { var contentRecordPageButtonPanelHasChildren = false; var contentRecordPageButtonPanel = jQuery('div.ContentRecordPageButtonPanel'); for (var i = 0, max = contentRecordPageButtonPanel.length; i < max; i++) { if (contentRecordPageButtonPanel[i].children.length > 0) { contentRecordPageButtonPanelHasChildren = true; break; } } if (!contentRecordPageButtonPanelHasChildren) { jQuery("Body").addClass("TemplateAreaEasyEditOn"); } } if (gIsEasyEditEnabled) CheckForChildren(); //]]> </script> <div class="aspNetHidden"> <input type="hidden" name="__VIEWSTATEGENERATOR" id="__VIEWSTATEGENERATOR" value="06E94F83" /> </div> <script type="text/javascript"> //<![CDATA[ if(typeof(window['ctl01_TemplateBody_ContentPage1_contentitemdisplaybasejsmanager'])==='undefined') { window['ctl01_TemplateBody_ContentPage1_contentitemdisplaybasejsmanager']=new Asi_WebRoot_AsiCommon_ContentManagement_DownloadDocument(); }if(typeof(window['ctl01_TemplateBody_ContentPage1_contentitemdisplaybasejsmanager'])!=='undefined') { window['ctl01_TemplateBody_ContentPage1_contentitemdisplaybasejsmanager'].OnLoad('#ctl01_TemplateBody_ContentPage1_downloadButton','#ctl01_TemplateBody_ContentPage1_HiddenDownloadPathField'); }if(typeof(window['ctl01_TemplateBody_ContentPage2_contentitemdisplaybasejsmanager'])==='undefined') { window['ctl01_TemplateBody_ContentPage2_contentitemdisplaybasejsmanager']=new Asi_WebRoot_AsiCommon_ContentManagement_DownloadDocument(); }if(typeof(window['ctl01_TemplateBody_ContentPage2_contentitemdisplaybasejsmanager'])!=='undefined') { window['ctl01_TemplateBody_ContentPage2_contentitemdisplaybasejsmanager'].OnLoad('#ctl01_TemplateBody_ContentPage2_downloadButton','#ctl01_TemplateBody_ContentPage2_HiddenDownloadPathField'); }if(typeof(window['ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_contentitemdisplaybasejsmanager'])==='undefined') { window['ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_contentitemdisplaybasejsmanager']=new Asi_WebRoot_AsiCommon_ContentManagement_DownloadDocument(); }if(typeof(window['ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_contentitemdisplaybasejsmanager'])!=='undefined') { window['ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_contentitemdisplaybasejsmanager'].OnLoad('#ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_downloadButton','#ctl01_TemplateBody_WebPartManager1_gwpciCornerArt_ciCornerArt_HiddenDownloadPathField'); }__Document_Head_Init('https://www.imaging.org/NoCookies.html', '', false);window.__TsmHiddenField = $get('ctl01_ScriptManager1_TSM');NavigationList_NavControlId = '_rptWrapper';NavigationList_Init();var ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties = new SimpleSearchFieldProperties(); ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties.WatermarkClass = 'Watermarked'; ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties.WatermarkText = 'Keyword search'; ctl01_ciNewUtilityNavigationCommon2_ctl05_SearchTermsProperties.SearchTarget = 'https://www.imaging.org/Search'; var ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties = new SimpleSearchFieldProperties(); ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties.WatermarkClass = 'Watermarked'; ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties.WatermarkText = 'Keyword search'; ctl01_ciNewUtilityNavigationCommon2_ctl08_SearchTermsProperties.SearchTarget = 'https://www.imaging.org/Search'; NavigationList_NavControlId = 'ctl01_ciPrimaryNavigation_NavControl_NavMenu';NavigationList_Init();PageNavR_NavMenuClientID = 'ctl01_ciPrimaryNavigation_NavControl_NavMenu';var __wpmExportWarning='This Web Part Page has been personalized. As a result, one or more Web Part properties may contain confidential information. Make sure the properties contain information that is safe for others to read. After exporting this Web Part, view properties in the Web Part description file (.WebPart) by using a text editor such as Microsoft Notepad.';var __wpmCloseProviderWarning='You are about to close this Web Part. It is currently providing data to other Web Parts, and these connections will be deleted if this Web Part is closed. To close this Web Part, click OK. To keep this Web Part, click Cancel.';var __wpmDeleteWarning='You are about to permanently delete this Web Part. Are you sure you want to do this? To delete this Web Part, click OK. To keep this Web Part, click Cancel.';__wpm = new WebPartManager(); __wpm.overlayContainerElement = document.getElementById('ctl01_TemplateBody_WebPartManager1___Drag'); __wpm.personalizationScopeShared = false; var zoneElement; var zoneObject; zoneElement = document.getElementById('ctl01_TemplateBody_ContentPage1_WebPartZone1_Page1');if (zoneElement != null) {zoneObject = __wpm.AddZone(zoneElement, 'ctl01$TemplateBody$ContentPage1$WebPartZone1_Page1', true, false, 'Blue'); zoneObject.AddWebPart(document.getElementById('WebPart_gwpciCornerArt'), document.getElementById('WebPartTitle_gwpciCornerArt'), false); zoneObject.AddWebPart(document.getElementById('WebPart_gwpciSponsors_0155da0917194f109ff1dbb88bccc53e'), document.getElementById('WebPartTitle_gwpciSponsors_0155da0917194f109ff1dbb88bccc53e'), false); }zoneElement = document.getElementById('ctl01_TemplateBody_ContentPage2_WebPartZone2_Page1');if (zoneElement != null) {zoneObject = __wpm.AddZone(zoneElement, 'ctl01$TemplateBody$ContentPage2$WebPartZone2_Page1', true, false, 'Blue'); zoneObject.AddWebPart(document.getElementById('WebPart_gwpciConfCCO'), document.getElementById('WebPartTitle_gwpciConfCCO'), false); }if(typeof(window['ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_jsmanager'])=='undefined') { window['ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_jsmanager']=new Asi_Web_iParts_ContentCollectionOrganizer_ContentCollectionOrganizerDisplay('ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage', 'False'); }Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadMenu, {"_childListElementCssClass":"rmRootGroup rmToggleHandles rmHorizontal","_skin":"NaturalHeritageSites","attributes":{"Translate":"Yes","PerspectiveId":"80409b89-ae6d-45a9-a9d4-96d522ff2047","NavigationArea":"1","MaxDataBindDepth":"3"},"autoScrollMinimumWidth":100,"clientStateFieldID":"ctl01_ciPrimaryNavigation_NavControl_NavMenu_ClientState","collapseAnimation":"{\"duration\":450}","defaultGroupSettings":"{\"flow\":0,\"expandDirection\":2,\"offsetX\":0}","expandAnimation":"{\"duration\":450}","itemData":[],"showToggleHandle":true}, {"itemClicking":PageNavR_OnClientItemClicking,"itemClosed":PageNavR_OnItemClosed,"itemOpened":PageNavR_OnItemOpened,"load":PageNavR_OnClientLoadHandler}, null, $get("ctl01_ciPrimaryNavigation_NavControl_NavMenu")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadTabStrip, {"_autoPostBack":true,"_postBackOnClick":true,"_postBackReference":"__doPostBack(\u0027ctl01$TemplateBody$WebPartManager1$gwpciConfCCO$ciConfCCO$radTab_Top\u0027,\u0027arguments\u0027)","_scrollButtonsPosition":1,"_selectedIndex":1,"_skin":"MetroTouch","causesValidation":false,"clientStateFieldID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radTab_Top_ClientState","enableAriaSupport":true,"multiPageID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage","selectedIndexes":["1"],"tabData":[{"value":"1","_implPageViewID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_1","attributes":{"translate":"yes"}},{"value":"2","_implPageViewID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_2","attributes":{"translate":"yes"}},{"value":"3","_implPageViewID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_3","attributes":{"translate":"yes"}},{"value":"4","_implPageViewID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_4","attributes":{"translate":"yes"}}]}, null, null, $get("ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radTab_Top")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadMultiPage, {"clientStateFieldID":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage_ClientState","pageViewData":[{"id":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_1"},{"id":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_2"},{"id":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_3"},{"id":"ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_Page_4"}],"selectedIndex":1}, null, null, $get("ctl01_TemplateBody_WebPartManager1_gwpciConfCCO_ciConfCCO_radPage")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadAjaxManager, {"_updatePanels":"","ajaxSettings":[],"clientEvents":{OnRequestStart:"",OnResponseEnd:""},"defaultLoadingPanelID":"AjaxStatusLoadingPanel","enableAJAX":true,"enableHistory":false,"links":[],"styles":[],"uniqueID":"ctl01$RadAjaxManager1","updatePanelsRenderMode":0}, null, null, $get("ctl01_RadAjaxManager1")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadWindow, {"_dockMode":false,"behaviors":117,"clientStateFieldID":"ctl01_GenericWindow_ClientState","enableAriaSupport":true,"formID":"aspnetForm","height":"550px","iconUrl":"","localization":"{\"Close\":\"Close\",\"Maximize\":\"Maximize\",\"Minimize\":\"Minimize\",\"Reload\":\"Reload\",\"PinOn\":\"PinOn\",\"PinOff\":\"PinOff\",\"Restore\":\"Restore\",\"OK\":\"OK\",\"Cancel\":\"Cancel\",\"Yes\":\"Yes\",\"No\":\"No\"}","minimizeIconUrl":"","modal":true,"name":"GenericWindow","shortcuts":"[[\u0027close\u0027,\u0027Esc\u0027]]","showContentDuringLoad":false,"skin":"MetroTouch","visibleStatusbar":false,"width":"800px"}, null, null, $get("ctl01_GenericWindow")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadWindow, {"_dockMode":false,"behaviors":117,"clientStateFieldID":"ctl01_ObjectBrowser_ClientState","enableAriaSupport":true,"formID":"aspnetForm","height":"550px","iconUrl":"","localization":"{\"Close\":\"Close\",\"Maximize\":\"Maximize\",\"Minimize\":\"Minimize\",\"Reload\":\"Reload\",\"PinOn\":\"PinOn\",\"PinOff\":\"PinOff\",\"Restore\":\"Restore\",\"OK\":\"OK\",\"Cancel\":\"Cancel\",\"Yes\":\"Yes\",\"No\":\"No\"}","minimizeIconUrl":"","modal":true,"name":"ObjectBrowser","shortcuts":"[[\u0027close\u0027,\u0027Esc\u0027]]","showContentDuringLoad":false,"skin":"MetroTouch","visibleStatusbar":false,"width":"760px"}, null, null, $get("ctl01_ObjectBrowser")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadWindow, {"_dockMode":false,"behaviors":117,"clientStateFieldID":"ctl01_ObjectBrowserDialog_ClientState","enableAriaSupport":true,"formID":"aspnetForm","height":"400px","iconUrl":"","localization":"{\"Close\":\"Close\",\"Maximize\":\"Maximize\",\"Minimize\":\"Minimize\",\"Reload\":\"Reload\",\"PinOn\":\"PinOn\",\"PinOff\":\"PinOff\",\"Restore\":\"Restore\",\"OK\":\"OK\",\"Cancel\":\"Cancel\",\"Yes\":\"Yes\",\"No\":\"No\"}","minimizeIconUrl":"","modal":true,"name":"ObjectBrowserDialog","shortcuts":"[[\u0027close\u0027,\u0027Esc\u0027]]","showContentDuringLoad":false,"skin":"MetroTouch","visibleStatusbar":false,"width":"600px"}, null, null, $get("ctl01_ObjectBrowserDialog")); }); Sys.Application.add_init(function() { $create(Telerik.Web.UI.RadWindowManager, {"behaviors":117,"clientStateFieldID":"ctl01_WindowManager1_ClientState","enableAriaSupport":true,"formID":"aspnetForm","iconUrl":"","localization":"{\"Close\":\"Close\",\"Maximize\":\"Maximize\",\"Minimize\":\"Minimize\",\"Reload\":\"Reload\",\"PinOn\":\"PinOn\",\"PinOff\":\"PinOff\",\"Restore\":\"Restore\",\"OK\":\"OK\",\"Cancel\":\"Cancel\",\"Yes\":\"Yes\",\"No\":\"No\"}","minimizeIconUrl":"","name":"WindowManager1","shortcuts":"[[\u0027close\u0027,\u0027Esc\u0027]]","skin":"MetroTouch","windowControls":"['ctl01_GenericWindow','ctl01_ObjectBrowser','ctl01_ObjectBrowserDialog']"}, null, {"child":"ctl01_GenericWindow"}, $get("ctl01_WindowManager1")); }); //]]> </script> </form> <div id="fb-root"></div> </body> </html>