CINXE.COM

David Melcher | University of Trento - Academia.edu

<!DOCTYPE html> <html lang="en" xmlns:fb="http://www.facebook.com/2008/fbml" class="wf-loading"> <head prefix="og: https://ogp.me/ns# fb: https://ogp.me/ns/fb# academia: https://ogp.me/ns/fb/academia#"> <meta charset="utf-8"> <meta name=viewport content="width=device-width, initial-scale=1"> <meta rel="search" type="application/opensearchdescription+xml" href="/open_search.xml" title="Academia.edu"> <title>David Melcher | University of Trento - Academia.edu</title> <!-- _ _ _ | | (_) | | __ _ ___ __ _ __| | ___ _ __ ___ _ __ _ ___ __| |_ _ / _` |/ __/ _` |/ _` |/ _ \ '_ ` _ \| |/ _` | / _ \/ _` | | | | | (_| | (_| (_| | (_| | __/ | | | | | | (_| || __/ (_| | |_| | \__,_|\___\__,_|\__,_|\___|_| |_| |_|_|\__,_(_)___|\__,_|\__,_| We're hiring! See https://www.academia.edu/hiring --> <link href="//a.academia-assets.com/images/favicons/favicon-production.ico" rel="shortcut icon" type="image/vnd.microsoft.icon"> <link rel="apple-touch-icon" sizes="57x57" href="//a.academia-assets.com/images/favicons/apple-touch-icon-57x57.png"> <link rel="apple-touch-icon" sizes="60x60" href="//a.academia-assets.com/images/favicons/apple-touch-icon-60x60.png"> <link rel="apple-touch-icon" sizes="72x72" href="//a.academia-assets.com/images/favicons/apple-touch-icon-72x72.png"> <link rel="apple-touch-icon" sizes="76x76" href="//a.academia-assets.com/images/favicons/apple-touch-icon-76x76.png"> <link rel="apple-touch-icon" sizes="114x114" href="//a.academia-assets.com/images/favicons/apple-touch-icon-114x114.png"> <link rel="apple-touch-icon" sizes="120x120" href="//a.academia-assets.com/images/favicons/apple-touch-icon-120x120.png"> <link rel="apple-touch-icon" sizes="144x144" href="//a.academia-assets.com/images/favicons/apple-touch-icon-144x144.png"> <link rel="apple-touch-icon" sizes="152x152" href="//a.academia-assets.com/images/favicons/apple-touch-icon-152x152.png"> <link rel="apple-touch-icon" sizes="180x180" href="//a.academia-assets.com/images/favicons/apple-touch-icon-180x180.png"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-32x32.png" sizes="32x32"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-194x194.png" sizes="194x194"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-96x96.png" sizes="96x96"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/android-chrome-192x192.png" sizes="192x192"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-16x16.png" sizes="16x16"> <link rel="manifest" href="//a.academia-assets.com/images/favicons/manifest.json"> <meta name="msapplication-TileColor" content="#2b5797"> <meta name="msapplication-TileImage" content="//a.academia-assets.com/images/favicons/mstile-144x144.png"> <meta name="theme-color" content="#ffffff"> <script> window.performance && window.performance.measure && window.performance.measure("Time To First Byte", "requestStart", "responseStart"); </script> <script> (function() { if (!window.URLSearchParams || !window.history || !window.history.replaceState) { return; } var searchParams = new URLSearchParams(window.location.search); var paramsToDelete = [ 'fs', 'sm', 'swp', 'iid', 'nbs', 'rcc', // related content category 'rcpos', // related content carousel position 'rcpg', // related carousel page 'rchid', // related content hit id 'f_ri', // research interest id, for SEO tracking 'f_fri', // featured research interest, for SEO tracking (param key without value) 'f_rid', // from research interest directory for SEO tracking 'f_loswp', // from research interest pills on LOSWP sidebar for SEO tracking 'rhid', // referrring hit id ]; if (paramsToDelete.every((key) => searchParams.get(key) === null)) { return; } paramsToDelete.forEach((key) => { searchParams.delete(key); }); var cleanUrl = new URL(window.location.href); cleanUrl.search = searchParams.toString(); history.replaceState({}, document.title, cleanUrl); })(); </script> <script async src="https://www.googletagmanager.com/gtag/js?id=G-5VKX33P2DS"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-5VKX33P2DS', { cookie_domain: 'academia.edu', send_page_view: false, }); gtag('event', 'page_view', { 'controller': "profiles/works", 'action': "summary", 'controller_action': 'profiles/works#summary', 'logged_in': 'false', 'edge': 'unknown', // Send nil if there is no A/B test bucket, in case some records get logged // with missing data - that way we can distinguish between the two cases. // ab_test_bucket should be of the form <ab_test_name>:<bucket> 'ab_test_bucket': null, }) </script> <script type="text/javascript"> window.sendUserTiming = function(timingName) { if (!(window.performance && window.performance.measure)) return; var entries = window.performance.getEntriesByName(timingName, "measure"); if (entries.length !== 1) return; var timingValue = Math.round(entries[0].duration); gtag('event', 'timing_complete', { name: timingName, value: timingValue, event_category: 'User-centric', }); }; window.sendUserTiming("Time To First Byte"); </script> <meta name="csrf-param" content="authenticity_token" /> <meta name="csrf-token" content="o9JMeZ-T1sfKZmcF2Jej9_K3HSIM_x4pTRGB-luvVs9uU43T2JVJlhk-zxvo7CduSL2ef30k7b_QqTwhbr5BXw" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/wow-3d36c19b4875b226bfed0fcba1dcea3f2fe61148383d97c0465c016b8c969290.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/social/home-79e78ce59bef0a338eb6540ec3d93b4a7952115b56c57f1760943128f4544d42.css" /><script type="application/ld+json">{"@context":"https://schema.org","@type":"ProfilePage","mainEntity":{"@context":"https://schema.org","@type":"Person","name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher","image":"https://0.academia-photos.com/7323984/20635768/20234682/s200_david.melcher.jpg","sameAs":[]},"dateCreated":"2013-12-03T17:47:15-08:00","dateModified":"2023-04-25T12:17:40-07:00","name":"David Melcher","description":"Researcher and teacher in Cognitive Neuroscience","image":"https://0.academia-photos.com/7323984/20635768/20234682/s200_david.melcher.jpg","thumbnailUrl":"https://0.academia-photos.com/7323984/20635768/20234682/s65_david.melcher.jpg","primaryImageOfPage":{"@type":"ImageObject","url":"https://0.academia-photos.com/7323984/20635768/20234682/s200_david.melcher.jpg","width":200},"sameAs":[],"relatedLink":"https://www.academia.edu/100090952/Influence_of_bottom_up_saliency_level_on_performance_in_a_visual_WM_task"}</script><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/heading-95367dc03b794f6737f30123738a886cf53b7a65cdef98a922a98591d60063e3.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/button-8c9ae4b5c8a2531640c354d92a1f3579c8ff103277ef74913e34c8a76d4e6c00.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/body-170d1319f0e354621e81ca17054bb147da2856ec0702fe440a99af314a6338c5.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/text_button-d1941ab08e91e29ee143084c4749da4aaffa350a2ac6eec2306b1d7a352d911a.css" /><link crossorigin="" href="https://fonts.gstatic.com/" rel="preconnect" /><link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,opsz,wght@0,9..40,100..1000;1,9..40,100..1000&amp;family=Gupter:wght@400;500;700&amp;family=IBM+Plex+Mono:wght@300;400&amp;family=Material+Symbols+Outlined:opsz,wght,FILL,GRAD@20,400,0,0&amp;display=swap" rel="stylesheet" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/common-2b6f90dbd75f5941bc38f4ad716615f3ac449e7398313bb3bc225fba451cd9fa.css" /> <meta name="author" content="david melcher" /> <meta name="description" content="Researcher and teacher in Cognitive Neuroscience" /> <meta name="google-site-verification" content="bKJMBZA7E43xhDOopFZkssMMkBRjvYERV-NaN4R6mrs" /> <script> var $controller_name = 'works'; var $action_name = "summary"; var $rails_env = 'production'; var $app_rev = '107520bac59918e2ceae62eaadd15bff3d1e7904'; var $domain = 'academia.edu'; var $app_host = "academia.edu"; var $asset_host = "academia-assets.com"; var $start_time = new Date().getTime(); var $recaptcha_key = "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB"; var $recaptcha_invisible_key = "6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj"; var $disableClientRecordHit = false; </script> <script> window.Aedu = { hit_data: null }; window.Aedu.SiteStats = {"premium_universities_count":14001,"monthly_visitors":"111 million","monthly_visitor_count":111388986,"monthly_visitor_count_in_millions":111,"user_count":284001176,"paper_count":55203019,"paper_count_in_millions":55,"page_count":432000000,"page_count_in_millions":432,"pdf_count":16500000,"pdf_count_in_millions":16}; window.Aedu.serverRenderTime = new Date(1740877736000); window.Aedu.timeDifference = new Date().getTime() - 1740877736000; window.Aedu.isUsingCssV1 = false; window.Aedu.enableLocalization = true; window.Aedu.activateFullstory = false; window.Aedu.serviceAvailability = { status: {"attention_db":"on","bibliography_db":"on","contacts_db":"on","email_db":"on","indexability_db":"on","mentions_db":"on","news_db":"on","notifications_db":"on","offsite_mentions_db":"on","redshift":"on","redshift_exports_db":"on","related_works_db":"on","ring_db":"on","user_tests_db":"on"}, serviceEnabled: function(service) { return this.status[service] === "on"; }, readEnabled: function(service) { return this.serviceEnabled(service) || this.status[service] === "read_only"; }, }; window.Aedu.viewApmTrace = function() { // Check if x-apm-trace-id meta tag is set, and open the trace in APM // in a new window if it is. var apmTraceId = document.head.querySelector('meta[name="x-apm-trace-id"]'); if (apmTraceId) { var traceId = apmTraceId.content; // Use trace ID to construct URL, an example URL looks like: // https://app.datadoghq.com/apm/traces?query=trace_id%31298410148923562634 var apmUrl = 'https://app.datadoghq.com/apm/traces?query=trace_id%3A' + traceId; window.open(apmUrl, '_blank'); } }; </script> <!--[if lt IE 9]> <script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.2/html5shiv.min.js"></script> <![endif]--> <link href="https://fonts.googleapis.com/css?family=Roboto:100,100i,300,300i,400,400i,500,500i,700,700i,900,900i" rel="stylesheet"> <link rel="preload" href="//maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css" as="style" onload="this.rel='stylesheet'"> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/libraries-a9675dcb01ec4ef6aa807ba772c7a5a00c1820d3ff661c1038a20f80d06bb4e4.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/academia-1eb081e01ca8bc0c1b1d866df79d9eb4dd2c484e4beecf76e79a7806c72fee08.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system_legacy-056a9113b9a0f5343d013b29ee1929d5a18be35fdcdceb616600b4db8bd20054.css" /> <script src="//a.academia-assets.com/assets/webpack_bundles/runtime-bundle-005434038af4252ca37c527588411a3d6a0eabb5f727fac83f8bbe7fd88d93bb.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/webpack_libraries_and_infrequently_changed.wjs-bundle-a7f06b7b91deb14294c0fd04acc3d1303a356edfef84048171548420b79efa13.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/core_webpack.wjs-bundle-caa56a0f54c25da9fc0ce496a02fe99f5ff17476fd61737267d5dfef010828de.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/sentry.wjs-bundle-5fe03fddca915c8ba0f7edbe64c194308e8ce5abaed7bffe1255ff37549c4808.js"></script> <script> jade = window.jade || {}; jade.helpers = window.$h; jade._ = window._; </script> <!-- Google Tag Manager --> <script id="tag-manager-head-root">(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0], j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src= 'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f); })(window,document,'script','dataLayer_old','GTM-5G9JF7Z');</script> <!-- End Google Tag Manager --> <script> window.gptadslots = []; window.googletag = window.googletag || {}; window.googletag.cmd = window.googletag.cmd || []; </script> <script type="text/javascript"> // TODO(jacob): This should be defined, may be rare load order problem. // Checking if null is just a quick fix, will default to en if unset. // Better fix is to run this immedietely after I18n is set. if (window.I18n != null) { I18n.defaultLocale = "en"; I18n.locale = "en"; I18n.fallbacks = true; } </script> <link rel="canonical" href="https://unitn.academia.edu/DavidMelcher" /> </head> <!--[if gte IE 9 ]> <body class='ie ie9 c-profiles/works a-summary logged_out'> <![endif]--> <!--[if !(IE) ]><!--> <body class='c-profiles/works a-summary logged_out'> <!--<![endif]--> <div id="fb-root"></div><script>window.fbAsyncInit = function() { FB.init({ appId: "2369844204", version: "v8.0", status: true, cookie: true, xfbml: true }); // Additional initialization code. if (window.InitFacebook) { // facebook.ts already loaded, set it up. window.InitFacebook(); } else { // Set a flag for facebook.ts to find when it loads. window.academiaAuthReadyFacebook = true; } };</script><script>window.fbAsyncLoad = function() { // Protection against double calling of this function if (window.FB) { return; } (function(d, s, id){ var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) {return;} js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_US/sdk.js"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk')); } if (!window.defer_facebook) { // Autoload if not deferred window.fbAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.fbAsyncLoad(); }, 5000); }</script> <div id="google-root"></div><script>window.loadGoogle = function() { if (window.InitGoogle) { // google.ts already loaded, set it up. window.InitGoogle("331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"); } else { // Set a flag for google.ts to use when it loads. window.GoogleClientID = "331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"; } };</script><script>window.googleAsyncLoad = function() { // Protection against double calling of this function (function(d) { var js; var id = 'google-jssdk'; var ref = d.getElementsByTagName('script')[0]; if (d.getElementById(id)) { return; } js = d.createElement('script'); js.id = id; js.async = true; js.onload = loadGoogle; js.src = "https://accounts.google.com/gsi/client" ref.parentNode.insertBefore(js, ref); }(document)); } if (!window.defer_google) { // Autoload if not deferred window.googleAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.googleAsyncLoad(); }, 5000); }</script> <div id="tag-manager-body-root"> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-5G9JF7Z" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <!-- Event listeners for analytics --> <script> window.addEventListener('load', function() { if (document.querySelector('input[name="commit"]')) { document.querySelector('input[name="commit"]').addEventListener('click', function() { gtag('event', 'click', { event_category: 'button', event_label: 'Log In' }) }) } }); </script> </div> <script>var _comscore = _comscore || []; _comscore.push({ c1: "2", c2: "26766707" }); (function() { var s = document.createElement("script"), el = document.getElementsByTagName("script")[0]; s.async = true; s.src = (document.location.protocol == "https:" ? "https://sb" : "http://b") + ".scorecardresearch.com/beacon.js"; el.parentNode.insertBefore(s, el); })();</script><img src="https://sb.scorecardresearch.com/p?c1=2&amp;c2=26766707&amp;cv=2.0&amp;cj=1" style="position: absolute; visibility: hidden" /> <div id='react-modal'></div> <div class='DesignSystem'> <a class='u-showOnFocus' href='#site'> Skip to main content </a> </div> <div id="upgrade_ie_banner" style="display: none;"><p>Academia.edu no longer supports Internet Explorer.</p><p>To browse Academia.edu and the wider internet faster and more securely, please take a few seconds to&nbsp;<a href="https://www.academia.edu/upgrade-browser">upgrade your browser</a>.</p></div><script>// Show this banner for all versions of IE if (!!window.MSInputMethodContext || /(MSIE)/.test(navigator.userAgent)) { document.getElementById('upgrade_ie_banner').style.display = 'block'; }</script> <div class="DesignSystem bootstrap ShrinkableNav"><div class="navbar navbar-default main-header"><div class="container-wrapper" id="main-header-container"><div class="container"><div class="navbar-header"><div class="nav-left-wrapper u-mt0x"><div class="nav-logo"><a data-main-header-link-target="logo_home" href="https://www.academia.edu/"><img class="visible-xs-inline-block" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015-A.svg" width="24" height="24" /><img width="145.2" height="18" class="hidden-xs" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015.svg" /></a></div><div class="nav-search"><div class="SiteSearch-wrapper select2-no-default-pills"><form class="js-SiteSearch-form DesignSystem" action="https://www.academia.edu/search" accept-charset="UTF-8" method="get"><i class="SiteSearch-icon fa fa-search u-fw700 u-positionAbsolute u-tcGrayDark"></i><input class="js-SiteSearch-form-input SiteSearch-form-input form-control" data-main-header-click-target="search_input" name="q" placeholder="Search" type="text" value="" /></form></div></div></div><div class="nav-right-wrapper pull-right"><ul class="NavLinks js-main-nav list-unstyled"><li class="NavLinks-link"><a class="js-header-login-url Button Button--inverseGray Button--sm u-mb4x" id="nav_log_in" rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="NavLinks-link u-p0x"><a class="Button Button--inverseGray Button--sm u-mb4x" rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li></ul><button class="hidden-lg hidden-md hidden-sm u-ml4x navbar-toggle collapsed" data-target=".js-mobile-header-links" data-toggle="collapse" type="button"><span class="icon-bar"></span><span class="icon-bar"></span><span class="icon-bar"></span></button></div></div><div class="collapse navbar-collapse js-mobile-header-links"><ul class="nav navbar-nav"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li><li class="u-borderColorGrayLight u-borderBottom1 js-mobile-nav-expand-trigger"><a href="#">more&nbsp<span class="caret"></span></a></li><li><ul class="js-mobile-nav-expand-section nav navbar-nav u-m0x collapse"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/about">About</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/press">Press</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/documents">Papers</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/hiring"><i class="fa fa-briefcase"></i>&nbsp;We're Hiring!</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://support.academia.edu/hc/en-us"><i class="fa fa-question-circle"></i>&nbsp;Help Center</a></li><li class="js-mobile-nav-collapse-trigger u-borderColorGrayLight u-borderBottom1 dropup" style="display:none"><a href="#">less&nbsp<span class="caret"></span></a></li></ul></li></ul></div></div></div><script>(function(){ var $moreLink = $(".js-mobile-nav-expand-trigger"); var $lessLink = $(".js-mobile-nav-collapse-trigger"); var $section = $('.js-mobile-nav-expand-section'); $moreLink.click(function(ev){ ev.preventDefault(); $moreLink.hide(); $lessLink.show(); $section.collapse('show'); }); $lessLink.click(function(ev){ ev.preventDefault(); $moreLink.show(); $lessLink.hide(); $section.collapse('hide'); }); })() if ($a.is_logged_in() || false) { new Aedu.NavigationController({ el: '.js-main-nav', showHighlightedNotification: false }); } else { $(".js-header-login-url").attr("href", $a.loginUrlWithRedirect()); } Aedu.autocompleteSearch = new AutocompleteSearch({el: '.js-SiteSearch-form'});</script></div></div> <div id='site' class='fixed'> <div id="content" class="clearfix"> <script>document.addEventListener('DOMContentLoaded', function(){ var $dismissible = $(".dismissible_banner"); $dismissible.click(function(ev) { $dismissible.hide(); }); });</script> <script src="//a.academia-assets.com/assets/webpack_bundles/profile.wjs-bundle-0ceb3be4b43a1e81bd83edaf59acf60dd046a0a3ef0d3b827386b5556dbaf5c2.js" defer="defer"></script><script>$viewedUser = Aedu.User.set_viewed( {"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher","photo":"https://0.academia-photos.com/7323984/20635768/20234682/s65_david.melcher.jpg","has_photo":true,"department":{"id":392353,"name":"Center for Mind/Brain Sciences","url":"https://unitn.academia.edu/Departments/Center_for_Mind_Brain_Sciences/Documents","university":{"id":2214,"name":"University of Trento","url":"https://unitn.academia.edu/"}},"position":"Faculty Member","position_id":1,"is_analytics_public":false,"interests":[{"id":21548,"name":"Cognitive Neuroscience","url":"https://www.academia.edu/Documents/in/Cognitive_Neuroscience"},{"id":803,"name":"Philosophy","url":"https://www.academia.edu/Documents/in/Philosophy"},{"id":221,"name":"Psychology","url":"https://www.academia.edu/Documents/in/Psychology"},{"id":859,"name":"Communication","url":"https://www.academia.edu/Documents/in/Communication"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"}]} ); if ($a.is_logged_in() && $viewedUser.is_current_user()) { $('body').addClass('profile-viewed-by-owner'); } $socialProfiles = []</script><div id="js-react-on-rails-context" style="display:none" data-rails-context="{&quot;inMailer&quot;:false,&quot;i18nLocale&quot;:&quot;en&quot;,&quot;i18nDefaultLocale&quot;:&quot;en&quot;,&quot;href&quot;:&quot;https://unitn.academia.edu/DavidMelcher&quot;,&quot;location&quot;:&quot;/DavidMelcher&quot;,&quot;scheme&quot;:&quot;https&quot;,&quot;host&quot;:&quot;unitn.academia.edu&quot;,&quot;port&quot;:null,&quot;pathname&quot;:&quot;/DavidMelcher&quot;,&quot;search&quot;:null,&quot;httpAcceptLanguage&quot;:null,&quot;serverSide&quot;:false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="ProfileCheckPaperUpdate" data-props="{}" data-trace="false" data-dom-id="ProfileCheckPaperUpdate-react-component-509aa67b-d1bf-427b-8fe8-16cbe29f9f60"></div> <div id="ProfileCheckPaperUpdate-react-component-509aa67b-d1bf-427b-8fe8-16cbe29f9f60"></div> <div class="DesignSystem"><div class="onsite-ping" id="onsite-ping"></div></div><div class="profile-user-info DesignSystem"><div class="social-profile-container"><div class="left-panel-container"><div class="user-info-component-wrapper"><div class="user-summary-cta-container"><div class="user-summary-container"><div class="social-profile-avatar-container"><img class="profile-avatar u-positionAbsolute" alt="David Melcher" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/7323984/20635768/20234682/s200_david.melcher.jpg" /></div><div class="title-container"><h1 class="ds2-5-heading-sans-serif-sm">David Melcher</h1><div class="affiliations-container fake-truncate js-profile-affiliations"><div><a class="u-tcGrayDarker" href="https://unitn.academia.edu/">University of Trento</a>, <a class="u-tcGrayDarker" href="https://unitn.academia.edu/Departments/Center_for_Mind_Brain_Sciences/Documents">Center for Mind/Brain Sciences</a>, <span class="u-tcGrayDarker">Faculty Member</span></div><div><a class="u-tcGrayDarker" href="https://nyuad.academia.edu/">New York University Abu Dhabi</a>, <a class="u-tcGrayDarker" href="https://nyuad.academia.edu/Departments/Psychology/Documents">Psychology</a>, <span class="u-tcGrayDarker">Faculty Member</span></div></div></div></div><div class="sidebar-cta-container"><button class="ds2-5-button hidden profile-cta-button grow js-profile-follow-button" data-broccoli-component="user-info.follow-button" data-click-track="profile-user-info-follow-button" data-follow-user-fname="David" data-follow-user-id="7323984" data-follow-user-source="profile_button" data-has-google="false"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">add</span>Follow</button><button class="ds2-5-button hidden profile-cta-button grow js-profile-unfollow-button" data-broccoli-component="user-info.unfollow-button" data-click-track="profile-user-info-unfollow-button" data-unfollow-user-id="7323984"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">done</span>Following</button></div></div><div class="user-stats-container"><a><div class="stat-container js-profile-followers"><p class="label">Followers</p><p class="data">49</p></div></a><a><div class="stat-container js-profile-followees" data-broccoli-component="user-info.followees-count" data-click-track="profile-expand-user-info-following"><p class="label">Following</p><p class="data">17</p></div></a><a><div class="stat-container js-profile-coauthors" data-broccoli-component="user-info.coauthors-count" data-click-track="profile-expand-user-info-coauthors"><p class="label">Co-authors</p><p class="data">14</p></div></a><div class="js-mentions-count-container" style="display: none;"><a href="/DavidMelcher/mentions"><div class="stat-container"><p class="label">Mentions</p><p class="data"></p></div></a></div><span><div class="stat-container"><p class="label"><span class="js-profile-total-view-text">Public Views</span></p><p class="data"><span class="js-profile-view-count"></span></p></div></span></div><div class="user-bio-container"><div class="profile-bio fake-truncate js-profile-about" style="margin: 0px;">Researcher and teacher in Cognitive Neuroscience<br /><b>Address:&nbsp;</b>Trento, Trentino-Alto Adige, Italy<br /><div class="js-profile-less-about u-linkUnstyled u-tcGrayDarker u-textDecorationUnderline u-displayNone">less</div></div></div><div class="suggested-academics-container"><div class="suggested-academics--header"><p class="ds2-5-body-md-bold">Related Authors</p></div><ul class="suggested-user-card-list"><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://uow.academia.edu/DanielDHutto"><img class="profile-avatar u-positionAbsolute" alt="Daniel D. Hutto" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/2048/972/17184758/s200_daniel_d..hutto.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://uow.academia.edu/DanielDHutto">Daniel D. Hutto</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">University of Wollongong</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://utexas.academia.edu/GalenStrawson"><img class="profile-avatar u-positionAbsolute" alt="Galen Strawson" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/5684/2195/760075/s200_galen.strawson.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://utexas.academia.edu/GalenStrawson">Galen Strawson</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">The University of Texas at Austin</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://harvard.academia.edu/StevenPinker"><img class="profile-avatar u-positionAbsolute" alt="Steven Pinker" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/12758/4264/18675036/s200_steven.pinker.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://harvard.academia.edu/StevenPinker">Steven Pinker</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Harvard University</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://ucmerced.academia.edu/MichaelSpivey"><img class="profile-avatar u-positionAbsolute" alt="Michael Spivey" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/20720/6987/32622142/s200_michael.spivey.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://ucmerced.academia.edu/MichaelSpivey">Michael Spivey</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">University of California, Merced</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://upf.academia.edu/JavierD%C3%ADazNoci"><img class="profile-avatar u-positionAbsolute" alt="Javier Díaz Noci" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/35664/11769/160437038/s200_javier.d_az_noci.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://upf.academia.edu/JavierD%C3%ADazNoci">Javier Díaz Noci</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Pompeu Fabra University</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://memphis.academia.edu/ShaunGallagher"><img class="profile-avatar u-positionAbsolute" alt="Shaun Gallagher" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/112762/30471/28017/s200_shaun.gallagher.jpeg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://memphis.academia.edu/ShaunGallagher">Shaun Gallagher</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">University of Memphis</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://pennstate.academia.edu/JohnJohnson"><img class="profile-avatar u-positionAbsolute" alt="John Johnson" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/121122/32485/29852/s200_john.johnson.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://pennstate.academia.edu/JohnJohnson">John Johnson</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Pennsylvania State University</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://cria.academia.edu/ArmandoMarquesGuedes"><img class="profile-avatar u-positionAbsolute" alt="Armando Marques-Guedes" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/134181/3401094/148494125/s200_armando.marques-guedes.png" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://cria.academia.edu/ArmandoMarquesGuedes">Armando Marques-Guedes</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">UNL - New University of Lisbon</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://upf.academia.edu/CarlosAScolari"><img class="profile-avatar u-positionAbsolute" alt="Carlos A. Scolari" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/287027/58393/19335670/s200_carlos_a..scolari.png" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://upf.academia.edu/CarlosAScolari">Carlos A. Scolari</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Pompeu Fabra University</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://oxfordbrookes.academia.edu/FabioCuzzolin"><img class="profile-avatar u-positionAbsolute" alt="Fabio Cuzzolin" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/366407/112374/61740579/s200_fabio.cuzzolin.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://oxfordbrookes.academia.edu/FabioCuzzolin">Fabio Cuzzolin</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Oxford Brookes University</p></div></div></ul></div><div class="ri-section"><div class="ri-section-header"><span>Interests</span></div><div class="ri-tags-container"><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="7323984" href="https://www.academia.edu/Documents/in/Cognitive_Neuroscience"><div id="js-react-on-rails-context" style="display:none" data-rails-context="{&quot;inMailer&quot;:false,&quot;i18nLocale&quot;:&quot;en&quot;,&quot;i18nDefaultLocale&quot;:&quot;en&quot;,&quot;href&quot;:&quot;https://unitn.academia.edu/DavidMelcher&quot;,&quot;location&quot;:&quot;/DavidMelcher&quot;,&quot;scheme&quot;:&quot;https&quot;,&quot;host&quot;:&quot;unitn.academia.edu&quot;,&quot;port&quot;:null,&quot;pathname&quot;:&quot;/DavidMelcher&quot;,&quot;search&quot;:null,&quot;httpAcceptLanguage&quot;:null,&quot;serverSide&quot;:false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Cognitive Neuroscience&quot;]}" data-trace="false" data-dom-id="Pill-react-component-6297beb3-a809-4ccb-94c3-ff4cd3038d66"></div> <div id="Pill-react-component-6297beb3-a809-4ccb-94c3-ff4cd3038d66"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="7323984" href="https://www.academia.edu/Documents/in/Philosophy"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Philosophy&quot;]}" data-trace="false" data-dom-id="Pill-react-component-c92cac25-5fda-4dd4-8d7e-10d058de9536"></div> <div id="Pill-react-component-c92cac25-5fda-4dd4-8d7e-10d058de9536"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="7323984" href="https://www.academia.edu/Documents/in/Psychology"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Psychology&quot;]}" data-trace="false" data-dom-id="Pill-react-component-6439c42f-32f4-4c68-b585-0ae03f1f0f77"></div> <div id="Pill-react-component-6439c42f-32f4-4c68-b585-0ae03f1f0f77"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="7323984" href="https://www.academia.edu/Documents/in/Communication"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Communication&quot;]}" data-trace="false" data-dom-id="Pill-react-component-94accefe-91ed-4aa7-af24-33df2b487d1f"></div> <div id="Pill-react-component-94accefe-91ed-4aa7-af24-33df2b487d1f"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="7323984" href="https://www.academia.edu/Documents/in/Artificial_Intelligence"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Artificial Intelligence&quot;]}" data-trace="false" data-dom-id="Pill-react-component-c989c8ff-7893-46fa-88c3-f74b3540f6b4"></div> <div id="Pill-react-component-c989c8ff-7893-46fa-88c3-f74b3540f6b4"></div> </a></div></div><div class="external-links-container"><ul class="profile-links new-profile js-UserInfo-social"><li class="left-most js-UserInfo-social-cv" data-broccoli-component="user-info.cv-button" data-click-track="profile-user-info-cv" data-cv-filename="Melcher_CV_2020_April.pdf" data-placement="top" data-toggle="tooltip" href="/DavidMelcher/CurriculumVitae"><button class="ds2-5-text-link ds2-5-text-link--small" style="font-size: 20px; letter-spacing: 0.8px"><span class="ds2-5-text-link__content">CV</span></button></li><li class="profile-profiles js-social-profiles-container"><i class="fa fa-spin fa-spinner"></i></li></ul></div></div></div><div class="right-panel-container"><div class="user-content-wrapper"><div class="uploads-container" id="social-redesign-work-container"><div class="upload-header"><h2 class="ds2-5-heading-sans-serif-xs">Uploads</h2></div><div class="nav-container backbone-profile-documents-nav hidden-xs"><ul class="nav-tablist" role="tablist"><li class="nav-chip active" role="presentation"><a data-section-name="" data-toggle="tab" href="#all" role="tab">all</a></li><li class="nav-chip" role="presentation"><a class="js-profile-docs-nav-section u-textTruncate" data-click-track="profile-works-tab" data-section-name="Research-highlights" data-toggle="tab" href="#researchhighlights" role="tab" title="Research highlights"><span>13</span>&nbsp;<span class="ds2-5-body-sm-bold">Research highlights</span></a></li><li class="nav-chip" role="presentation"><a class="js-profile-docs-nav-section u-textTruncate" data-click-track="profile-works-tab" data-section-name="Recent-publications" data-toggle="tab" href="#recentpublications" role="tab" title="Recent publications"><span>0</span>&nbsp;<span class="ds2-5-body-sm-bold">Recent publications</span></a></li><li class="nav-chip more-tab" role="presentation"><a class="js-profile-documents-more-tab link-unstyled u-textTruncate" data-toggle="dropdown" role="tab">More&nbsp;&nbsp;<i class="fa fa-chevron-down"></i></a><ul class="js-profile-documents-more-dropdown dropdown-menu dropdown-menu-right profile-documents-more-dropdown" role="menu"><li role="presentation"><a data-click-track="profile-works-tab" data-section-name="Temporal-aspects-of-perception" data-toggle="tab" href="#temporalaspectsofperception" role="tab" style="border: none;"><span>10</span>&nbsp;Temporal aspects of perception</a></li><li role="presentation"><a data-click-track="profile-works-tab" data-section-name="Visual-stability-and-remapping" data-toggle="tab" href="#visualstabilityandremapping" role="tab" style="border: none;"><span>13</span>&nbsp;Visual stability and remapping</a></li><li role="presentation"><a data-click-track="profile-works-tab" data-section-name="Individuation,-estimation-and-numerical-cognition" data-toggle="tab" href="#individuationestimationandnumericalcognition" role="tab" style="border: none;"><span>11</span>&nbsp;Individuation, estimation and numerical cognition</a></li><li role="presentation"><a data-click-track="profile-works-tab" data-section-name="Memory:-scenes,-events" data-toggle="tab" href="#memoryscenesevents" role="tab" style="border: none;"><span>10</span>&nbsp;Memory: scenes, events</a></li><li role="presentation"><a data-click-track="profile-works-tab" data-section-name="Attention-and-visual-awareness" data-toggle="tab" href="#attentionandvisualawareness" role="tab" style="border: none;"><span>9</span>&nbsp;Attention and visual awareness</a></li><li role="presentation"><a data-click-track="profile-works-tab" data-section-name="Art-&amp;-Neuroscience" data-toggle="tab" href="#artneuroscience" role="tab" style="border: none;"><span>4</span>&nbsp;Art &amp; Neuroscience</a></li><li role="presentation"><a data-click-track="profile-works-tab" data-section-name="Other-Papers" data-toggle="tab" href="#otherpapers" role="tab" style="border: none;"><span>3</span>&nbsp;Other Papers</a></li><li role="presentation"><a data-click-track="profile-works-tab" data-section-name="Papers" data-toggle="tab" href="#papers" role="tab" style="border: none;"><span>86</span>&nbsp;Papers</a></li></ul></li></ul></div><div class="divider ds-divider-16" style="margin: 0px;"></div><div class="documents-container backbone-social-profile-documents" style="width: 100%;"><div class="u-taCenter"></div><div class="profile--tab_content_container js-tab-pane tab-pane active" id="all"><div class="profile--tab_heading_container js-section-heading" data-section="Research highlights" id="Research highlights"><h3 class="profile--tab_heading_container">Research highlights by David Melcher</h3></div><div class="js-work-strip profile--work_container" data-work-id="37613826"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/37613826/Frequency_modulation_of_neural_oscillations_according_to_visual_task_demands"><img alt="Research paper thumbnail of Frequency modulation of neural oscillations according to visual task demands" class="work-thumbnail" src="https://attachments.academia-assets.com/57596697/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/37613826/Frequency_modulation_of_neural_oscillations_according_to_visual_task_demands">Frequency modulation of neural oscillations according to visual task demands</a></div><div class="wp-workCard_item"><span>PNAS</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Temporal integration in visual perception is thought to occur within cycles of occipital alpha-ba...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Temporal integration in visual perception is thought to occur within cycles of occipital alpha-band (8–12 Hz) oscillations. Successive stimuli may be integrated when they fall within the same alpha cycle and segregated for different alpha cycles. Consequently , the speed of alpha oscillations correlates with the temporal resolution of perception, such that lower alpha frequencies provide longer time windows for perceptual integration and higher alpha frequencies correspond to faster sampling and segregation. Can the brain&#39;s rhythmic activity be dynamically controlled to adjust its processing speed according to different visual task demands? We recorded magnetoencephalography (MEG) while participants switched between task instructions for temporal integration and segregation, holding stimuli and task difficulty constant. We found that the peak frequency of alpha oscillations decreased when visual task demands required temporal integration compared with segregation. Alpha frequency was strategically modulated immediately before and during stimulus processing, suggesting a preparatory top-down source of modulation. Its neural generators were located in occipital and inferotem-poral cortex. The frequency modulation was specific to alpha oscillations and did not occur in the delta (1–3 Hz), theta (3–7 Hz), beta (15–30 Hz), or gamma (30–50 Hz) frequency range. These results show that alpha frequency is under top-down control to increase or decrease the temporal resolution of visual perception. visual perception | temporal integration | alpha oscillations | oscillation frequency | top-down control V isual perception is tasked both with constructing stable representations over time as well as maximizing sensitivity to transient changes. A large body of work has shown that neural oscillations in the alpha band (8–12 Hz) are partially responsible for determining the temporal resolution of perception, such that when discrete events occur within the same oscillatory cycle they can become perceptually integrated (1–8). For instance, individuals with higher peak alpha frequencies have perception with higher temporal resolution (2, 9), indicating that lower peak frequencies correspond to integration over longer time windows. Furthermore, trial-to-trial variability in spontaneous alpha frequency predicts accuracy in a temporal discrimination paradigm (9) and the rate of illusory flicker (10). These findings raise the question of whether variability in peak alpha frequency is stochastic, varying randomly across persons and fluctuating over time within the same person, or might instead be strategically modulated based on task demands. It is known that the temporal resolution of visual perception can be modulated by attention (11–13), and oscillatory phase and power can be controlled by top-down factors (14–18). Here, we investigated whether the peak frequency of alpha oscillations is subject to attentional control, increasing or decreasing so as to effectively lengthen or shorten the temporal window of perceptual integration. To examine this question, we recorded magnetoencephalography (MEG) data while observers performed two different tasks, one requiring visual integration over time and the other involving visual segregation over time. Both tasks are variants of a classical perceptual integration paradigm called the missing-element task (MET) (19). In the MET, an array of elements is presented in two successive frames, separated by a short interstimulus interval (ISI). When superimposed, the elements in both frames occupy all but one of the positions in the array and the observer&#39;s job is to identify the position with the missing element (Fig. 1A, blue). As ISI decreases, the two frames become perceptually integrated and identifying the missing element is simple. We introduced another condition that we term the odd-element task (OET) (20). In the OET, one-half of one of the elements is presented in the first frame and the other half is presented in the second (Fig. 1A, red). Here, the observer&#39;s task is to decide which location contained this odd element, which becomes easier as ISI increases and the frames are perceptually segregated. Critically, the use of these two tasks allowed us to determine whether modulations of peak alpha frequency were specifically tied to temporal resolution, as opposed to a more general fluctuation in attention or in visual sensitivity or criterion (21, 22). We hypothesized that if frequency modulation of alpha supports top-down control over temporal integration , then we should observe higher frequencies during the OET (when segregation is beneficial) compared with the MET (when integration is beneficial). Results By mapping psychometric functions relating performance on both tasks to ISI (sigmoid fit R 2 = 0.94 for OET, R 2 = 0.93 for MET), we could identify intersection points for each observer Significance Neural oscillations are hypothesized to play an important role in modulating perceptual processing in accordance with top-down goals. For instance, the amplitude, phase, and spatial distribution of alpha-band oscillations change with attention. Given recent links between the peak frequency of alpha oscillations and the temporal resolution of perception, we investigated whether frequency modulation occurs when task demands emphasize integration or segregation of visual input over time. We found that alpha frequency in occipital–temporal cortex decreased during, and in anticipation of, stimulus processing when task demands required temporal integration compared with segregation. These results demonstrate a unique top-down mechanism by which the brain controls the temporal resolution of visual processing in accordance with current task demands.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="484136cdd9070f21c4e2af1849d9dcec" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:57596697,&quot;asset_id&quot;:37613826,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/57596697/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="37613826"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="37613826"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 37613826; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=37613826]").text(description); $(".js-view-count[data-work-id=37613826]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 37613826; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='37613826']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "484136cdd9070f21c4e2af1849d9dcec" } } $('.js-work-strip[data-work-id=37613826]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":37613826,"title":"Frequency modulation of neural oscillations according to visual task demands","internal_url":"https://www.academia.edu/37613826/Frequency_modulation_of_neural_oscillations_according_to_visual_task_demands","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":57596697,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/57596697/thumbnails/1.jpg","file_name":"Wutz_Melcher_Samaha_PNAS_2018.pdf","download_url":"https://www.academia.edu/attachments/57596697/download_file","bulk_download_file_name":"Frequency_modulation_of_neural_oscillati.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/57596697/Wutz_Melcher_Samaha_PNAS_2018-libre.pdf?1539987457=\u0026response-content-disposition=attachment%3B+filename%3DFrequency_modulation_of_neural_oscillati.pdf\u0026Expires=1738812505\u0026Signature=R0nSSrLXtiIGoBxor270XrmqKYx-lqn4Ct4-z7V~42FDAZID4E3oD4Ne4f6wLxO-aL63EoEHyKUCNrq5fX-pcJToueEx2PGbjlr5xX4Z2UNjjjyexvIEOq6V8i6tD3SJHQFLHXUa1a38i6KJvaXu48kt1x9-UhupGCk-ruspUQnA6xKIUrbrkzreWDD7LuK-hViHbF646RMpB3pEorwkHIFSnRrIwlsIthLORfC6MjYzGqy1NPLTNf9s5ut2Tnan8wyxY3DDpzPgnryhUQTycVfwWIHhwQqTGalZrFwn6RgcU39nYaoJvqfLuX5h~G5y3naOrn0pST1eXr4Ka~Bkvg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="37613827"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/37613827/Temporal_Integration_Windows_in_Neural_Processing_and_Perception_Aligned_to_Saccadic_Eye_Movements"><img alt="Research paper thumbnail of Temporal Integration Windows in Neural Processing and Perception Aligned to Saccadic Eye Movements" class="work-thumbnail" src="https://attachments.academia-assets.com/57596698/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/37613827/Temporal_Integration_Windows_in_Neural_Processing_and_Perception_Aligned_to_Saccadic_Eye_Movements">Temporal Integration Windows in Neural Processing and Perception Aligned to Saccadic Eye Movements</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/EvelynM25">Evelyn M</a></span></div><div class="wp-workCard_item"><span>Current Biology</span><span>, 2016</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="4fe69bfb6a977d3be3715ee5a52b6a4a" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:57596698,&quot;asset_id&quot;:37613827,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/57596698/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="37613827"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="37613827"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 37613827; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=37613827]").text(description); $(".js-view-count[data-work-id=37613827]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 37613827; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='37613827']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "4fe69bfb6a977d3be3715ee5a52b6a4a" } } $('.js-work-strip[data-work-id=37613827]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":37613827,"title":"Temporal Integration Windows in Neural Processing and Perception Aligned to Saccadic Eye Movements","internal_url":"https://www.academia.edu/37613827/Temporal_Integration_Windows_in_Neural_Processing_and_Perception_Aligned_to_Saccadic_Eye_Movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":57596698,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/57596698/thumbnails/1.jpg","file_name":"Wutz_Melcher_CurrBiol_2016.pdf","download_url":"https://www.academia.edu/attachments/57596698/download_file","bulk_download_file_name":"Temporal_Integration_Windows_in_Neural_P.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/57596698/Wutz_Melcher_CurrBiol_2016-libre.pdf?1539987459=\u0026response-content-disposition=attachment%3B+filename%3DTemporal_Integration_Windows_in_Neural_P.pdf\u0026Expires=1740881335\u0026Signature=BlQbaDYTFqIJv6fdNLQy4jROhrVt6Ko01W2EcCFZH4hbaQ5I2G7k3YCdbaH3uCZl1LHcPt1uDMHYR0zqE1yOV9xzDMEJzh3HiuPu3SOk6uWlQQHEeVCv2DvDrPpleYNW4eP6SwOeWGXrdiLO4fvg74D6RXkL7xFtKNLbpyu7q~SleyiqlOKkKcOam3rbdOoyqdga8W~0Gp309UdEZrArJW6y7zX5N5yRV2Qoh04wx~MKKKUwXdkq4bIy3~uQg9ewkb~uJiKA535zlS~vrJTONJbUe~h8ARCFHCWUx7GKabhWemmb8wSitRVg20Vs9ZDWtCIB5UQ3~x30~cT0npJT6A__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="37514760"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/37514760/Alpha_band_sensory_entrainment_alters_the_duration_of_temporal_windows_in_visual_perception"><img alt="Research paper thumbnail of Alpha-band sensory entrainment alters the duration of temporal windows in visual perception" class="work-thumbnail" src="https://attachments.academia-assets.com/57487443/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/37514760/Alpha_band_sensory_entrainment_alters_the_duration_of_temporal_windows_in_visual_perception">Alpha-band sensory entrainment alters the duration of temporal windows in visual perception</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/LucaRonconi1">Luca Ronconi</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The phase and frequency of neural oscillations in the alpha band (8–12 Hz) have been recently pro...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The phase and frequency of neural oscillations in the alpha band (8–12 Hz) have been recently proposed as key parameters for the temporal resolution of visual perception. Here, we tested the possible causal links between these oscillatory features and temporal integration/segregation. The individual alpha frequency (IAF) peak as obtained from resting-state electroencephalography was used to set the frequency of sensory (audiovisual) entrainment for the lower (IAF − 2 Hz) and upper (IAF + 2 Hz) alpha. Entrainment at IAF ± 2 Hz was administered in the prestimulus interval to align oscillations to a faster or slower rhythm. We densely sampled in time the accuracy for integration/segregation by using identical stimuli with different instructions. The spectral peaks of performance fluctuations over time were found in the upper or lower alpha band for the IAF + 2 and IAF − 2 Hz entrainment, respectively, implying that faster entrainment resulted in faster behavioral fluctuations. Moreover, the entrainment frequency had opposite effects on temporal resolution: faster entrainment improved segregation while slower entrainment improved integration. Performance fluctuations were almost in anti-phase between the two tasks, such that highest integration performance coincided with lowest segregation performance. These findings provide evidence for a direct link between changes in the alpha band and the temporal resolution of perception. Despite our subjective impression of a continuous and smooth reality, the continuous flow of information coming from the sensory world is not elaborated in an analog fashion. On the contrary, our brain samples information periodically by discretizing sensory inputs according to its hardwired rhythms within and across the different sensory modalities 1,2. On the one hand, the capacity to combine information over time can be advantageous for accurate and precise percepts and actions of objects that tend to remain stable over time. However, temporal integration might reduce sensitivity to rapid changes in incoming sensory input due to a dynamic environment or our own actions. For example, temporal integration of motion signals might lead to misinterpreting a 180 degree change in direction as a reduction in speed. Likewise, long temporal integration windows would reduce the effectiveness of tactile feedback during active touch. Thus, sensory processing relies on a balance between temporal integration (to improve our perceptual interpretations at low temporal resolution) and segregation (sensitivity to change with high temporal resolution). The brain may balance between these two needs through alternating between elaboration of input over time (integration) and sensitivity to new input (segregation), in a rhythmic way. The idea that perceptual processing depends on the rhythmic sampling of sensory information was initially introduce in seminal neurophysiological studies 3–5 and later confirmed using electroencephalographical (EEG) recording in humans, which consistently show a relationship between oscillatory phase and sensitivity to new input 6–9. Together, these findings argue that fluctuations in detection, reflecting sensitivity to new input, are related to alpha rhythms. The idea of a rhythmic process finds support also in nonhuman primates studies showing that spikes in sensory areas are more likely to occur at a specific phase of the local field potential oscillations relative to the opposite phase 10. Building on this idea, other studies have investigated the role of neural oscillations in the temporal resolution of visual perception, defined as whether two items in a sequence are perceived as separate, individual events or instead combined into a single unique percept 11–19 (for reviews see 20,21). The key idea is that if two stimuli fall into the same oscillatory cycle they are temporally bound into a single percept, while two stimuli falling into separate cycles are parsed into two unique temporal events. There is increasing M/EEG evidence for a role of the Published: xx xx xxxx OPEN</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="c139be83e6eed5b8e3d5b02f9286d7d0" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:57487443,&quot;asset_id&quot;:37514760,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/57487443/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="37514760"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="37514760"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 37514760; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=37514760]").text(description); $(".js-view-count[data-work-id=37514760]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 37514760; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='37514760']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "c139be83e6eed5b8e3d5b02f9286d7d0" } } $('.js-work-strip[data-work-id=37514760]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":37514760,"title":"Alpha-band sensory entrainment alters the duration of temporal windows in visual perception","internal_url":"https://www.academia.edu/37514760/Alpha_band_sensory_entrainment_alters_the_duration_of_temporal_windows_in_visual_perception","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":57487443,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/57487443/thumbnails/1.jpg","file_name":"Ronconi_Busch_Melcher_Sci_Rep_2018.pdf","download_url":"https://www.academia.edu/attachments/57487443/download_file","bulk_download_file_name":"Alpha_band_sensory_entrainment_alters_th.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/57487443/Ronconi_Busch_Melcher_Sci_Rep_2018-libre.pdf?1538482736=\u0026response-content-disposition=attachment%3B+filename%3DAlpha_band_sensory_entrainment_alters_th.pdf\u0026Expires=1738812505\u0026Signature=JtWcADVyZGmaCx20moTiZma2ThuLCmOKSF6KAmT63NsiijZvVysUl3QkZFIY09VO4yplQ6ijDWgvUO6ldGuPSt0F2oRkPho7S31WB4RMLGUdXQY96xRElyckBxg97b318AHptBDEcbyijkzaCB9MHRHz6HtFaguf8Svg0bUEY5LkTg7Mt5WgBTpMw45rO5FKG1zghge6QtibOvUaTyqmjM6pVzK5f0LjKxejv-TVv6qTGKY9OCKgtVeQQsX5VqSdl~NfdPrDh7Nt-qKHWIJefT1PQ-Y-R5-N91wmM2MApylK0lpnqQbeeX7dARTHHT12LdwvAoUNibjuLLCzUpvK7w__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="37514748"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/37514748/Multiple_oscillatory_rhythms_determine_the_temporal_organization_of_perception"><img alt="Research paper thumbnail of Multiple oscillatory rhythms determine the temporal organization of perception" class="work-thumbnail" src="https://attachments.academia-assets.com/57487428/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/37514748/Multiple_oscillatory_rhythms_determine_the_temporal_organization_of_perception">Multiple oscillatory rhythms determine the temporal organization of perception</a></div><div class="wp-workCard_item"><span>PNAS</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Incoming Q:12 sensory input is condensed by our perceptual system to represent and store informat...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Incoming Q:12 sensory input is condensed by our perceptual system to represent and store information in an optimal way. In the temporal domain, this process has been described in terms of temporal windows (TWs) of integration/segregation, in which the phase of ongoing neural oscillations determines whether two stimuli are integrated into a single percept or segregated into separate events. However, the durations of TWs can vary substantially, raising the question of whether different TWs map onto unique oscillations or, rather, reflect a single, general fluctuation in cortical excitability (e.g., in the alpha band). We used multivariate decoding of electroencephalography (EEG) data to investigate perception of stimuli that either repeated in the same location (two-flash fusion) or moved in space (apparent motion). By manipulating the interstimulus interval (ISI), we created bistable stimuli that caused subjects to perceive either integration (fusion/apparent motion) or segregation (two unrelated flashes). Training a classifier searchlight on the whole time/frequency/channel space, we found that the perceptual outcome (integration vs. segregation) could be reliably decoded from the phase of prestimulus oscillations in right posterior-parietal channels. The highest decoding accuracy for the two-flash fusion task [interstimulus interval (ISI) = 40 ms] was evident in the phase of alpha oscillations (8-10 Hz), while the highest decoding accuracy for the apparent motion task (ISI = 120 ms) was evident in the phase of theta oscillations (6-7 Hz). These results reveal a precise relationship between specific TW durations (short or long) and specific neural oscillations. Such oscillations at different frequencies may provide a hierarchical framework for the temporal organization of perception.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="ae4785db8e6230e70dbf19d345933ab4" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:57487428,&quot;asset_id&quot;:37514748,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/57487428/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="37514748"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="37514748"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 37514748; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=37514748]").text(description); $(".js-view-count[data-work-id=37514748]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 37514748; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='37514748']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "ae4785db8e6230e70dbf19d345933ab4" } } $('.js-work-strip[data-work-id=37514748]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":37514748,"title":"Multiple oscillatory rhythms determine the temporal organization of perception","internal_url":"https://www.academia.edu/37514748/Multiple_oscillatory_rhythms_determine_the_temporal_organization_of_perception","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":57487428,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/57487428/thumbnails/1.jpg","file_name":"Ronconi_Melcher_PNAS_2017.pdf","download_url":"https://www.academia.edu/attachments/57487428/download_file","bulk_download_file_name":"Multiple_oscillatory_rhythms_determine_t.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/57487428/Ronconi_Melcher_PNAS_2017-libre.pdf?1538482730=\u0026response-content-disposition=attachment%3B+filename%3DMultiple_oscillatory_rhythms_determine_t.pdf\u0026Expires=1740881335\u0026Signature=GHRTd~GIwbFtKeIvuDpZXa7tznH101MJZig7F25CEt-vQ-Zk9VE5nVHQyv-Zh~4luFAVXn~VZKwQhZj2NrL45HBwxNVe8yxONfdaPHt-IEY4kVC03OOcaSlpJm3kHsFy2GpiSQwschK16MA4DAD-JlHWBCSVMuT24OlWLpx5YmxkzrwedUEnVXlCrkbirR6s66cTvdzWi6VM9xrP4X5zL4Cgd2ChLkHo81qX~BVBqG65ESTZkvFc7VnXdno5Ki8md1wteyUEo5w098dFbrZXxaWfO4J7q0mQ6tFJQWJhT8CZT0diSRRsyndE7xi5pmoihXlLyJDtcgk3VzifstTLyQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="5309664"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/5309664/Visual_scene_memory_and_the_guidance_of_saccadic_eye_movements"><img alt="Research paper thumbnail of Visual scene memory and the guidance of saccadic eye movements" class="work-thumbnail" src="https://attachments.academia-assets.com/49352419/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/5309664/Visual_scene_memory_and_the_guidance_of_saccadic_eye_movements">Visual scene memory and the guidance of saccadic eye movements</a></div><div class="wp-workCard_item"><span>Vision Research</span><span>, 2001</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">An unresolved question is how much information can be remembered from visual scenes when they are...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">An unresolved question is how much information can be remembered from visual scenes when they are inspected by saccadic eye movements. Subjects used saccadic eye movements to scan a computer-generated scene, and afterwards, recalled as many objects as they could. Scene memory was quite good: it improved with display duration, it persisted over time long after the display was removed, and it continued to accumulate with additional viewings of the same display (Melcher, D. The persistance of memory for scenes. Nature 412, 401). The occurrence of saccadic eye movements was important to ensure good recall performance, even though subjects often recalled non-fixated objects. Inter-saccadic intervals increased with display duration, showing an influence of duration on global scanning strategy. The choice of saccadic target was predicted by a Random Selection with Distance Weighting (RSDW) model, in which the target for each saccade is selected at random from all available objects, weighted according to distance from fixation, regardless of which objects had previously been fixated. The results show that the visual memory that was reflected in the recall reports was not utilized for the immediate decision about where to look in the scene. Visual memory can be excellent, but it is not always reflected in oculomotor measures, perhaps because the cost of rapid on-line memory retrieval is too great.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="58541d2819de179724f5bc9ace7f4287" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49352419,&quot;asset_id&quot;:5309664,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49352419/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="5309664"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="5309664"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5309664; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5309664]").text(description); $(".js-view-count[data-work-id=5309664]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5309664; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='5309664']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "58541d2819de179724f5bc9ace7f4287" } } $('.js-work-strip[data-work-id=5309664]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":5309664,"title":"Visual scene memory and the guidance of saccadic eye movements","internal_url":"https://www.academia.edu/5309664/Visual_scene_memory_and_the_guidance_of_saccadic_eye_movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49352419,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49352419/thumbnails/1.jpg","file_name":"Visual_scene_memory_and_the_guidance_of_20161004-26595-l0wp6k.pdf","download_url":"https://www.academia.edu/attachments/49352419/download_file","bulk_download_file_name":"Visual_scene_memory_and_the_guidance_of.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49352419/Visual_scene_memory_and_the_guidance_of_20161004-26595-l0wp6k-libre.pdf?1475607798=\u0026response-content-disposition=attachment%3B+filename%3DVisual_scene_memory_and_the_guidance_of.pdf\u0026Expires=1740881335\u0026Signature=O3dIHXs5jn4VhrIdSGXDVEVoMnfqqKjBC~fej-~t-JDw2hzNXMn03ETVcnA8rxe4lULjdy0FtwwVrZmdZ731ZBkFbRaVoqet3vby5LfxWz7wYTCF42EaaVts2QMQPa-mh-szbe6SRs1pCSQ4m6w4x7bUKSWwsDU1p3YlkXTmWfVbhXE2rMHYGSSyMfmgaG6F9ps3uABui9617BF1jnKBoxcsRySBkn1-nBYxb-ttvjml2qC~Ohls~fRQCihQje1KZ-7ovppMwRX0n0Np2mJxpK-bneDRSWlre-fAgTKPglaf1FLu8xXvm-NyPHdgATgZ6wcbC-vlwEbJLu1q0OJ5MA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058778"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058778/Brains_of_verbal_memory_specialists_show_anatomical_differences_in_language_memory_and_visual_systems"><img alt="Research paper thumbnail of Brains of verbal memory specialists show anatomical differences in language, memory and visual systems" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058778/Brains_of_verbal_memory_specialists_show_anatomical_differences_in_language_memory_and_visual_systems">Brains of verbal memory specialists show anatomical differences in language, memory and visual systems</a></div><div class="wp-workCard_item"><span>NeuroImage</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">We studied a group of verbal memory specialists to determine whether intensive oral text memory i...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">We studied a group of verbal memory specialists to determine whether intensive oral text memory is associated with structural features of hippocampal and lateral-temporal regions implicated in language processing. Professional Vedic Sanskrit Pandits in India train from childhood for around 10years in an ancient, formalized tradition of oral Sanskrit text memorization and recitation, mastering the exact pronunciation and invariant content of multiple 40,000-100,000 word oral texts. We conducted structural analysis of gray matter density, cortical thickness, local gyrification, and white matter structure, relative to matched controls. We found massive gray matter density and cortical thickness increases in Pandit brains in language, memory and visual systems, including i) bilateral lateral temporal cortices and ii) the anterior cingulate cortex and the hippocampus, regions associated with long and short-term memory. Differences in hippocampal morphometry matched those previously documented for expert spatial navigators and individuals with good verbal working memory. The findings provide unique insight into the brain organization implementing formalized oral knowledge systems.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058778"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058778"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058778; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058778]").text(description); $(".js-view-count[data-work-id=29058778]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058778; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058778']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058778]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058778,"title":"Brains of verbal memory specialists show anatomical differences in language, memory and visual systems","internal_url":"https://www.academia.edu/29058778/Brains_of_verbal_memory_specialists_show_anatomical_differences_in_language_memory_and_visual_systems","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="5309663"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/5309663/Predictive_remapping_of_visual_features_precedes_saccadic_eye_movements"><img alt="Research paper thumbnail of Predictive remapping of visual features precedes saccadic eye movements" class="work-thumbnail" src="https://attachments.academia-assets.com/49352411/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/5309663/Predictive_remapping_of_visual_features_precedes_saccadic_eye_movements">Predictive remapping of visual features precedes saccadic eye movements</a></div><div class="wp-workCard_item"><span>Nature Neuroscience</span><span>, 2007</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The frequent occurrence of saccadic eye movements raises the question of how information is combi...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The frequent occurrence of saccadic eye movements raises the question of how information is combined across separate glances into a stable, continuous percept. Here I show that visual form processing is altered at both the current fixation position and the location of the saccadic target before the saccade. When human observers prepared to follow a displacement of the stimulus with the eyes, visual form adaptation was transferred from current fixation to the future gaze position. This transfer of adaptation also influenced the perception of test stimuli shown at an intermediate position between fixation and saccadic target. Additionally, I found a presaccadic transfer of adaptation when observers prepared to move their eyes toward a stationary adapting stimulus in peripheral vision. The remapping of visual processing, demonstrated here with form adaptation, may help to explain our impression of a smooth transition, with no temporal delay, of visual perception across glances.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="63f6eaafe442b1608e35e35314650380" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49352411,&quot;asset_id&quot;:5309663,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49352411/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="5309663"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="5309663"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5309663; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5309663]").text(description); $(".js-view-count[data-work-id=5309663]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5309663; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='5309663']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "63f6eaafe442b1608e35e35314650380" } } $('.js-work-strip[data-work-id=5309663]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":5309663,"title":"Predictive remapping of visual features precedes saccadic eye movements","internal_url":"https://www.academia.edu/5309663/Predictive_remapping_of_visual_features_precedes_saccadic_eye_movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49352411,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49352411/thumbnails/1.jpg","file_name":"Predictive_remapping_of_visual_features_20161004-5636-vz31ej.pdf","download_url":"https://www.academia.edu/attachments/49352411/download_file","bulk_download_file_name":"Predictive_remapping_of_visual_features.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49352411/Predictive_remapping_of_visual_features_20161004-5636-vz31ej-libre.pdf?1475607808=\u0026response-content-disposition=attachment%3B+filename%3DPredictive_remapping_of_visual_features.pdf\u0026Expires=1740881335\u0026Signature=NvWWM3rEPT5vHgFr2SThSTvLaW80PsXJK55afRzbAXoqTEIj2oh5o9b05GMfzEOeUyX3j0CZI99Hwu4Fkwx4hEMbHL74FvDFu-hDN8XkV1v3V1YhMEJHLCQTTAiHpY9e0FjzeL42JW2ZObrg6Ioza9leCOX9Efb2grrez0T6dgrX4T4JyluqVUHmNH1w-ZTR7YSRhI2lqIetMvP59mfl5P6DI0GrX46P2-TYWtD2cOcF-U4RJj28j6tF77ldXQzgm3ObwCPA1stp2~mvm4OTH00vqkUnibZtWdfEQTV0uXXR5xmLfK42~xpJ3VWfxKbLTbF0Qjy3HVv2nCsRkZ6~zQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="5309666"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/5309666/Spatiotopic_temporal_integration_of_visual_motion_across_saccadic_eye_movements"><img alt="Research paper thumbnail of Spatiotopic temporal integration of visual motion across saccadic eye movements" class="work-thumbnail" src="https://attachments.academia-assets.com/49352397/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/5309666/Spatiotopic_temporal_integration_of_visual_motion_across_saccadic_eye_movements">Spatiotopic temporal integration of visual motion across saccadic eye movements</a></div><div class="wp-workCard_item"><span>Nature Neuroscience</span><span>, 2003</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Although seldom aware of this, humans make an average of 3-5 saccadic eye movements per second. A...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Although seldom aware of this, humans make an average of 3-5 saccadic eye movements per second. Although this is an efficient strategy to make maximum use of the high resolution of central vision 1 , it raises the question of how information from separate glances is combined to give a stable perception of the world. One potential solution would be to combine visual information across saccades. However, this mechanism could be useful only if information from the same spatiotopic position is integrated. Previous studies of trans-saccadic integration show that visual information is not integrated across saccades, at least in the case of visual patterns 2-4 (but see ref. 5). Many simple visual tasks, such as contrast sensitivity and shape discrimination, have a limited integration time falling within the duration of a typical fixation 6 . Thus, the failure to find visual integration in these tasks does not necessarily indicate that spatiotopic integration does not occur; simple patterns might be detected in a single fixation.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="41c3f4ced3dc0b37d3a9e7104158825a" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49352397,&quot;asset_id&quot;:5309666,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49352397/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="5309666"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="5309666"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5309666; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5309666]").text(description); $(".js-view-count[data-work-id=5309666]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5309666; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='5309666']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "41c3f4ced3dc0b37d3a9e7104158825a" } } $('.js-work-strip[data-work-id=5309666]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":5309666,"title":"Spatiotopic temporal integration of visual motion across saccadic eye movements","internal_url":"https://www.academia.edu/5309666/Spatiotopic_temporal_integration_of_visual_motion_across_saccadic_eye_movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49352397,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49352397/thumbnails/1.jpg","file_name":"melcher_morronenn.pdf","download_url":"https://www.academia.edu/attachments/49352397/download_file","bulk_download_file_name":"Spatiotopic_temporal_integration_of_visu.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49352397/melcher_morronenn-libre.pdf?1475607813=\u0026response-content-disposition=attachment%3B+filename%3DSpatiotopic_temporal_integration_of_visu.pdf\u0026Expires=1740881335\u0026Signature=YqfFCAUMXQqp2akjtlcFvx3djs58uWkWMNLNKTBWU6~gXQv7BuJ3b37xyRHySh6U9r60vny21vg9moyb-JagJnOkZgs0fAhwQ3AztoS~vsQ6IFY7bsnXXttuKoo1g6xYISEd~ZtZexhx8DJ55ks8fdtSeH8KH6qZWIMLo2uoDepETAggSiJdnWGYuiGYqUmHt-px4cMrte0IN3iawabMajM-baysIwYi3l2xxJ2A~1umkNkpHyukC-Z2G0n2DF4Nbsi3vvds0ym-jg6aWoJV87Hb1MiwGoasQP3GazFJ4e3E~yuJs0Df0wAJX3LX7W-KlGNXQjBcscSEIBJ21rs24Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058736"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058736/Subitizing_reflects_visuo_spatial_object_individuation_capacity"><img alt="Research paper thumbnail of Subitizing reflects visuo-spatial object individuation capacity" class="work-thumbnail" src="https://attachments.academia-assets.com/49512560/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058736/Subitizing_reflects_visuo_spatial_object_individuation_capacity">Subitizing reflects visuo-spatial object individuation capacity</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/ManuelaPiazza1">Manuela Piazza</a></span></div><div class="wp-workCard_item"><span>Cognition</span><span>, 2011</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Subitizing is the immediate apprehension of the exact number of items in small sets. Despite more...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Subitizing is the immediate apprehension of the exact number of items in small sets. Despite more than a 100 years of research around this phenomenon, its nature and origin are still unknown. One view posits that it reflects a number estimation process common for small and large sets, which precision decreases as the number of items increases, according to Weber&#39;s law. Another view proposes that it reflects a non-numerical mechanism of visual indexing of multiple objects in parallel that is limited in capacity. In a previous research we have gathered evidence against the Weberian estimation hypothesis. Here we provide first direct evidence for the alternative object indexing hypothesis, and show that subitizing reflects a domain general mechanism shared with other tasks that require multiple object individuation.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="8ba0781b8c17a14ae10ba7ed75d440f3" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512560,&quot;asset_id&quot;:29058736,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512560/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058736"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058736"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058736; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058736]").text(description); $(".js-view-count[data-work-id=29058736]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058736; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058736']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "8ba0781b8c17a14ae10ba7ed75d440f3" } } $('.js-work-strip[data-work-id=29058736]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058736,"title":"Subitizing reflects visuo-spatial object individuation capacity","internal_url":"https://www.academia.edu/29058736/Subitizing_reflects_visuo_spatial_object_individuation_capacity","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512560,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512560/thumbnails/1.jpg","file_name":"Subitizing_reflects_visuo-spatial_object20161010-18908-oszwv7.pdf","download_url":"https://www.academia.edu/attachments/49512560/download_file","bulk_download_file_name":"Subitizing_reflects_visuo_spatial_object.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512560/Subitizing_reflects_visuo-spatial_object20161010-18908-oszwv7-libre.pdf?1476136405=\u0026response-content-disposition=attachment%3B+filename%3DSubitizing_reflects_visuo_spatial_object.pdf\u0026Expires=1740881335\u0026Signature=GasNJxm0YjlMT-vXr7c8wOM3yfSkF0DCe~6YSYfCBHWmQbBnXKurAQ44dMeC-2elKwNPXLjqq00HV9w1T5V417pk1xs4Xc~PxSzEUDXJIJhXFRBpocrN4sQJOYbDNFNxO4XWDmgnIWnT6-zhWNzZMvqVPvTkaUirYBqcMlnHYoCJBgls5fZmuwr5FMrqLkx2-V9k72kJ6QSMuzS69k~k1lg3BJm8lKArHOL~23yHqGknFejhcMfl9F8ynWoVQ836g8SpdDBrmcZ7~px0seUbKgiEBwikqyjvW-IQ9JMhbYq2uHmJ4mcf~hpsZJEoAx6diqkfvgqWN3sk4mQ2BTYNOw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058738"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058738/Spatiotopic_Transfer_of_Visual_Form_Adaptation_across_Saccadic_Eye_Movements"><img alt="Research paper thumbnail of Spatiotopic Transfer of Visual-Form Adaptation across Saccadic Eye Movements" class="work-thumbnail" src="https://attachments.academia-assets.com/49512563/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058738/Spatiotopic_Transfer_of_Visual_Form_Adaptation_across_Saccadic_Eye_Movements">Spatiotopic Transfer of Visual-Form Adaptation across Saccadic Eye Movements</a></div><div class="wp-workCard_item"><span>Current Biology</span><span>, 2005</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Although conscious perception is smooth and continuous, the input to the visual system is a serie...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Although conscious perception is smooth and continuous, the input to the visual system is a series of short, discrete fixations interleaved with rapid shifts of the eye. One possible explanation for visual stability is that internal maps of objects and their visual properties are remapped around the time of saccades, but numerous studies have demonstrated that visual patterns are not combined across saccades. Here, we report that visual-form aftereffects transfer across separate fixations when adaptor and test are presented in the same spatial position. The magnitude of the transsaccadic adaptation increased with stimulus complexity, suggesting a progressive construction of spatiotopic receptive fields along the visual-form pathway. These results demonstrate that basic shape information is combined across saccades, allowing for predictive and consistent information from the past to be incorporated into each new fixation.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="c1baf5e1b451073010dccc47b10c813d" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512563,&quot;asset_id&quot;:29058738,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512563/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058738"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058738"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058738; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058738]").text(description); $(".js-view-count[data-work-id=29058738]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058738; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058738']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "c1baf5e1b451073010dccc47b10c813d" } } $('.js-work-strip[data-work-id=29058738]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058738,"title":"Spatiotopic Transfer of Visual-Form Adaptation across Saccadic Eye Movements","internal_url":"https://www.academia.edu/29058738/Spatiotopic_Transfer_of_Visual_Form_Adaptation_across_Saccadic_Eye_Movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512563,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512563/thumbnails/1.jpg","file_name":"melcher2005.pdf","download_url":"https://www.academia.edu/attachments/49512563/download_file","bulk_download_file_name":"Spatiotopic_Transfer_of_Visual_Form_Adap.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512563/melcher2005-libre.pdf?1476136402=\u0026response-content-disposition=attachment%3B+filename%3DSpatiotopic_Transfer_of_Visual_Form_Adap.pdf\u0026Expires=1740881335\u0026Signature=Dt3wMBmNoCwrzf21RABQwLdnFtPD74GBqjhZr2ZKOtGsfDXD-1Qz9YyoSozyR1CwdQGQVzFRirW4TpsPTwa2WrJOpTmMIU9-h4UY~Oo3Sq0h28~xILBRJj2cIxKvHdun1nRPBBVj8tX3O6ppSroktohzcDwixOItu3mZFlcig6~jbijJ812unHFR5KJdOPDVXZ~YU9IH1Gw0eY7fZ7tq0F9MZd4J3pfs0i8gIDq1IFytObxt61pJYA1fsHES5n74VxlhDRMsA2zycd5pWdsIxY8booBFwCn8O1MrDBWO~nq8t5MupRU7kBJOW2PFzYqrE3vJntIH4S-FDNacEJlGUA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058757"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058757/Temporal_Windows_in_Visual_Processing_Prestimulus_Brain_State_and_Poststimulus_Phase_Reset_Segregate_Visual_Transients_on_Different_Temporal_Scales"><img alt="Research paper thumbnail of Temporal Windows in Visual Processing: &quot;Prestimulus Brain State&quot; and &quot;Poststimulus Phase Reset&quot; Segregate Visual Transients on Different Temporal Scales" class="work-thumbnail" src="https://attachments.academia-assets.com/49512568/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058757/Temporal_Windows_in_Visual_Processing_Prestimulus_Brain_State_and_Poststimulus_Phase_Reset_Segregate_Visual_Transients_on_Different_Temporal_Scales">Temporal Windows in Visual Processing: &quot;Prestimulus Brain State&quot; and &quot;Poststimulus Phase Reset&quot; Segregate Visual Transients on Different Temporal Scales</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/ChristophBraun">Christoph Braun</a></span></div><div class="wp-workCard_item"><span>Journal of Neuroscience</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Dynamic vision requires both stability of the current perceptual representation and sensitivity t...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Dynamic vision requires both stability of the current perceptual representation and sensitivity to the accumulation of sensory evidence over time. Here we study the electrophysiological signatures of this intricate balance between temporal segregation and integration in vision. Within a forward masking paradigm with short and long stimulus onset asynchronies (SOA), we manipulated the temporal overlap of the visual persistence of two successive transients. Human observers enumerated the items presented in the second target display as a measure of the informational capacity read-out from this partly temporally integrated visual percept. We observed higher ␤-power immediately before mask display onset in incorrect trials, in which enumeration failed due to stronger integration of mask and target visual information. This effect was timescale specific, distinguishing between segregation and integration of visual transients that were distant in time (long SOA). Conversely, for short SOA trials, mask onset evoked a stronger visual response when mask and targets were correctly segregated in time. Examination of the target-related response profile revealed the importance of an evoked ␣-phase reset for the segregation of those rapid visual transients. Investigating this precise mapping of the temporal relationships of visual signals onto electrophysiological responses highlights how the stream of visual information is carved up into discrete temporal windows that mediate between segregated and integrated percepts. Fragmenting the stream of visual information provides a means to stabilize perceptual events within one instant in time.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="2d5b8a8cda22bc5b720bd489197bb8dc" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512568,&quot;asset_id&quot;:29058757,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512568/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058757"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058757"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058757; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058757]").text(description); $(".js-view-count[data-work-id=29058757]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058757; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058757']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "2d5b8a8cda22bc5b720bd489197bb8dc" } } $('.js-work-strip[data-work-id=29058757]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058757,"title":"Temporal Windows in Visual Processing: \"Prestimulus Brain State\" and \"Poststimulus Phase Reset\" Segregate Visual Transients on Different Temporal Scales","internal_url":"https://www.academia.edu/29058757/Temporal_Windows_in_Visual_Processing_Prestimulus_Brain_State_and_Poststimulus_Phase_Reset_Segregate_Visual_Transients_on_Different_Temporal_Scales","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512568,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512568/thumbnails/1.jpg","file_name":"1554.full.pdf","download_url":"https://www.academia.edu/attachments/49512568/download_file","bulk_download_file_name":"Temporal_Windows_in_Visual_Processing_Pr.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512568/1554.full-libre.pdf?1476136401=\u0026response-content-disposition=attachment%3B+filename%3DTemporal_Windows_in_Visual_Processing_Pr.pdf\u0026Expires=1740065947\u0026Signature=TfVREfODw6EhkP1lj49ao2nN5T9zbNgRgZveLraUYj8BItiujddCv27APEYSPrCIWFf0e7XPPLyaaIYGsERdqNSU-mwDCCH0TWEYhSM-OhSNnXqK0DTRrC-hIhtjsNEfjpmF5kFlPdhq6hZHuxzYx7WBMXaoXTqExeq41pl9PymeX~WSfdCmHyfeBDnA4gKSFYEjPseBBqz6iVMu~mRKPUQe809jwUMd-99f7CLqWdYXfjY3aFh9MDJ-yE5XK-VZyR0fDYdeOiBrKgvTt~IkimSjYnq7bgXwy2je3A1nq5CxQiMkRot8OQS1HtmbJO6q3zocSF3n1TZmp7oed2r8VQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058759"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058759/Trans_saccadic_perception"><img alt="Research paper thumbnail of Trans-saccadic perception" class="work-thumbnail" src="https://attachments.academia-assets.com/49512571/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058759/Trans_saccadic_perception">Trans-saccadic perception</a></div><div class="wp-workCard_item"><span>Trends in Cognitive Sciences</span><span>, 2008</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6fa24278973050c5b848c7e842c257f6" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512571,&quot;asset_id&quot;:29058759,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512571/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058759"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058759"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058759; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058759]").text(description); $(".js-view-count[data-work-id=29058759]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058759; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058759']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6fa24278973050c5b848c7e842c257f6" } } $('.js-work-strip[data-work-id=29058759]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058759,"title":"Trans-saccadic perception","internal_url":"https://www.academia.edu/29058759/Trans_saccadic_perception","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512571,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512571/thumbnails/1.jpg","file_name":"David_Melcher_1_and_Carol_L._Colby20161010-18905-bdjtky.pdf","download_url":"https://www.academia.edu/attachments/49512571/download_file","bulk_download_file_name":"Trans_saccadic_perception.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512571/David_Melcher_1_and_Carol_L._Colby20161010-18905-bdjtky-libre.pdf?1476136396=\u0026response-content-disposition=attachment%3B+filename%3DTrans_saccadic_perception.pdf\u0026Expires=1740881335\u0026Signature=M-rLMSbXJaoz75fJjXbPEFrepwxYZU0EYZfIUquo6I8T~XqhMjFHk2PqkDdXCKTBTEypRmHlyvtZRWluXPnmVlI2OI-8xpL9PvT6ZptRcK8gib920JSQbb10VVRjsj~z8Sj7d1Vv3HulSJtmV57YXnpjw6nu-v3n0SX6GF0IBLaUPsLUmtVlxVBdXYe8S8Fw1UqdKp8AmUwfFoMHf1LDMgzLH~PW9xb0pgFZenO5C2S25teFvTaSsC-I2V-R-Ra-nGehpT~7ijqeSP~2M969ETdbJ4jWJNXq0b85AHPXwCzGSJe4~ZP~OmURzBQY7TvSvCSPtDAJFs2UDR8bAoLEXw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058767"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058767/Persistence_of_visual_memory_for_scenes"><img alt="Research paper thumbnail of Persistence of visual memory for scenes" class="work-thumbnail" src="https://attachments.academia-assets.com/49512587/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058767/Persistence_of_visual_memory_for_scenes">Persistence of visual memory for scenes</a></div><div class="wp-workCard_item"><span>Nature</span><span>, 2001</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="9224c675b9816e17f77764d86933d1d8" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512587,&quot;asset_id&quot;:29058767,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512587/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058767"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058767"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058767; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058767]").text(description); $(".js-view-count[data-work-id=29058767]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058767; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058767']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "9224c675b9816e17f77764d86933d1d8" } } $('.js-work-strip[data-work-id=29058767]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058767,"title":"Persistence of visual memory for scenes","internal_url":"https://www.academia.edu/29058767/Persistence_of_visual_memory_for_scenes","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512587,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512587/thumbnails/1.jpg","file_name":"Melcher_D._Persistence_of_visual_memory_20161010-2393-xyv975.pdf","download_url":"https://www.academia.edu/attachments/49512587/download_file","bulk_download_file_name":"Persistence_of_visual_memory_for_scenes.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512587/Melcher_D._Persistence_of_visual_memory_20161010-2393-xyv975-libre.pdf?1476136376=\u0026response-content-disposition=attachment%3B+filename%3DPersistence_of_visual_memory_for_scenes.pdf\u0026Expires=1740881335\u0026Signature=cofW44RKPwWlD4AvlIfrOhk5TOls4kNoMcXmPJ4uKKFHb1oiQwF0Ix4FnKRqqoivoqYYm7hmfDGmR0v3KLJUmC9~zIUi3DavcQBuTLdidj8TMWxKZ0gRAgLyaVQjpJ9DzThNJelysLAldoLEoRpUe05qTCCcwAgL~AqtlWtBWm2hTCBzqVO-1SxH32brGdRouoGZ3u~K2etpA7ub3krscwB5L7Qk6~FoLwJWmYxiPvf6aLhfxqlGSoXXQuxCyPMftZewiakr2ByfDlJVyWlHVEy0nhSZh~KswFSEZz6cRF2dnY~qu4Ulktp-JXcIjt20Lebnew2aNFTFwZmkWpwGFA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="profile--tab_heading_container js-section-heading" data-section="Recent publications" id="Recent publications"><h3 class="profile--tab_heading_container">Recent publications by David Melcher</h3></div><div class="profile--tab_heading_container js-section-heading" data-section="Temporal aspects of perception" id="Temporal aspects of perception"><h3 class="profile--tab_heading_container">Temporal aspects of perception by David Melcher</h3></div><div class="js-work-strip profile--work_container" data-work-id="5309667"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/5309667/The_role_of_attention_in_central_and_peripheral_motion_integration"><img alt="Research paper thumbnail of The role of attention in central and peripheral motion integration" class="work-thumbnail" src="https://attachments.academia-assets.com/49352401/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/5309667/The_role_of_attention_in_central_and_peripheral_motion_integration">The role of attention in central and peripheral motion integration</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://york.academia.edu/AurelioBruno">Aurelio Bruno</a></span></div><div class="wp-workCard_item"><span>Vision Research</span><span>, 2004</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Attention has been shown to modulate visual processing in a wide variety of tasks. We tested the ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Attention has been shown to modulate visual processing in a wide variety of tasks. We tested the influence of attention on the temporal integration of motion for both central and peripherally viewed targets (6°· 6°). Consistent with previous results, motion sensitivity for a brief motion signal (70-3500 ms) embedded in noise (10 s) increased as a function of motion duration up to a critical duration of about 1.5 s. Summation times for centrally and peripherally viewed targets were similar. An effect of eccentricity was found, however, in a double-motion task, in which two brief (150 ms) motion signals were presented with varying delays (0-7 s) of random noise between the two signals. Specifically, the maximum delay between the two signals that still supported temporal summation (summation constant) was about three times longer for centrally viewed targets (3.5-4.5 s versus 1.5-2 s). We investigated the role of spatial attention in the double-motion task by adding a concurrent color contrast discrimination task. The addition of the concurrent task dramatically reduced differences in the summation constant for central and peripheral targets, without reducing overall motion sensitivity. Thus, attention appears to specifically modulate temporal summation, suggesting that the long integration times found for motion coherence are mediated by attention.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6d775e31a6cd5eb64d43b8f314af6248" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49352401,&quot;asset_id&quot;:5309667,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49352401/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="5309667"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="5309667"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5309667; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5309667]").text(description); $(".js-view-count[data-work-id=5309667]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5309667; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='5309667']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6d775e31a6cd5eb64d43b8f314af6248" } } $('.js-work-strip[data-work-id=5309667]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":5309667,"title":"The role of attention in central and peripheral motion integration","internal_url":"https://www.academia.edu/5309667/The_role_of_attention_in_central_and_peripheral_motion_integration","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49352401,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49352401/thumbnails/1.jpg","file_name":"The_role_of_attention_in_central_and_per20161004-8547-s3nd1w.pdf","download_url":"https://www.academia.edu/attachments/49352401/download_file","bulk_download_file_name":"The_role_of_attention_in_central_and_per.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49352401/The_role_of_attention_in_central_and_per20161004-8547-s3nd1w-libre.pdf?1475607811=\u0026response-content-disposition=attachment%3B+filename%3DThe_role_of_attention_in_central_and_per.pdf\u0026Expires=1740881335\u0026Signature=R~nTa9W4g~a6igDlMkffvpQVyfoZtsUJ13pKk25EhQ4-6prYgE8vgWGAQgHI3~i1ITUeA5GKdCiaYFAqruwoF0ZISLDd27Ot2MHKDNZCCFS7EMUvE-vrghvYLtL8Lh0z7l5uOCIcTv~ydzTaQfeAH0mnVP1iVW0KtF7U9VgwoNnOFYhKNK6YPDtd6SdF~IrxKedXBWxaIqnMG4~90Y8Wni0dG6Fqdko~bwj0aqkD8-0hmrODkrMIS4h9LS95Mbu9ofma5W7lf9BFCg6FXMtylXv5klpvvMUhqKl5wls4Q5GJMMeuL3g3Q9oWQcndB6E-vgAK5AbgDIe-XSUh9tkPZg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058734"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058734/Temporal_buffering_and_visual_capacity_The_time_course_of_object_formation_underlies_capacity_limits_in_visual_cognition"><img alt="Research paper thumbnail of Temporal buffering and visual capacity: The time course of object formation underlies capacity limits in visual cognition" class="work-thumbnail" src="https://attachments.academia-assets.com/49512559/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058734/Temporal_buffering_and_visual_capacity_The_time_course_of_object_formation_underlies_capacity_limits_in_visual_cognition">Temporal buffering and visual capacity: The time course of object formation underlies capacity limits in visual cognition</a></div><div class="wp-workCard_item"><span>Attention, Perception, &amp; Psychophysics</span><span>, 2013</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Capacity limits are a hallmark of visual cognition. The upper boundary of our ability to individu...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Capacity limits are a hallmark of visual cognition. The upper boundary of our ability to individuate and remember objects is well known but-despite its central role in visual information processing-not well understood. Here, we investigated the role of temporal limits in the perceptual processes of forming &quot;object files.&quot; Specifically, we examined the two fundamental mechanisms of object file formation-individuation and identification-by selectively interfering with visual processing by using forward and backward masking with variable stimulus onset asynchronies. While target detection was almost unaffected by these two types of masking, they showed distinct effects on the two different stages of object formation. Forward &quot;integration&quot; masking selectively impaired object individuation, whereas backward &quot;interruption&quot; masking only affected identification and the consolidation of information into visual working memory. We therefore conclude that the inherent temporal dynamics of visual information processing are an essential component in creating the capacity limits in object individuation and visual working memory.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="f008eec55e2f17811e805b6e77494f50" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512559,&quot;asset_id&quot;:29058734,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512559/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058734"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058734"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058734; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058734]").text(description); $(".js-view-count[data-work-id=29058734]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058734; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058734']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "f008eec55e2f17811e805b6e77494f50" } } $('.js-work-strip[data-work-id=29058734]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058734,"title":"Temporal buffering and visual capacity: The time course of object formation underlies capacity limits in visual cognition","internal_url":"https://www.academia.edu/29058734/Temporal_buffering_and_visual_capacity_The_time_course_of_object_formation_underlies_capacity_limits_in_visual_cognition","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512559,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512559/thumbnails/1.jpg","file_name":"Temporal_buffering_and_visual_capacity_T20161010-27087-j1rev4.pdf","download_url":"https://www.academia.edu/attachments/49512559/download_file","bulk_download_file_name":"Temporal_buffering_and_visual_capacity_T.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512559/Temporal_buffering_and_visual_capacity_T20161010-27087-j1rev4-libre.pdf?1476136411=\u0026response-content-disposition=attachment%3B+filename%3DTemporal_buffering_and_visual_capacity_T.pdf\u0026Expires=1740881335\u0026Signature=aI2zIxhWIlvR~g4A-KCJgxXfj1vF4yoxESs-v6EH1vGmnAWkZb6nCWOW6-IFpnY4i4JzBWp-BQX~9aJUU1vl1fbGt3tqu6ohmjMPDp4z6H8o-awp-MwGQXUHUa8wpH9pBOTfwEK6TwfcgQOoYyYcG3aQu5cByHAtMWX5dLr~-JdwWrWw~ttTgIkIA21zcD-quaaDnG7-r2O8HpRL56hNmNZwoj5vAVyrlsrhQoapXf2PFOQnWqXsROqChXWOdJJhf7NBmMJBNCRhKfc1bp5SKWiHuSdItRkvo~BTfUhxZPuQNbRa~94EvxzandKlPp3FsBe23WKGucZrgQxm-Aw9iA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058748"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058748/Electrophysiological_signatures_of_temporal_segregation_and_integration_of_visual_information_an_MEG_study"><img alt="Research paper thumbnail of Electrophysiological signatures of temporal segregation and integration of visual information - an MEG study" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058748/Electrophysiological_signatures_of_temporal_segregation_and_integration_of_visual_information_an_MEG_study">Electrophysiological signatures of temporal segregation and integration of visual information - an MEG study</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2013</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058748"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058748"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058748; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058748]").text(description); $(".js-view-count[data-work-id=29058748]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058748; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058748']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058748]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058748,"title":"Electrophysiological signatures of temporal segregation and integration of visual information - an MEG study","internal_url":"https://www.academia.edu/29058748/Electrophysiological_signatures_of_temporal_segregation_and_integration_of_visual_information_an_MEG_study","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058762"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058762/Rapid_enumeration_within_a_fraction_of_a_single_glance_The_role_of_visible_persistence_in_object_individuation_capacity"><img alt="Research paper thumbnail of Rapid enumeration within a fraction of a single glance: The role of visible persistence in object individuation capacity" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058762/Rapid_enumeration_within_a_fraction_of_a_single_glance_The_role_of_visible_persistence_in_object_individuation_capacity">Rapid enumeration within a fraction of a single glance: The role of visible persistence in object individuation capacity</a></div><div class="wp-workCard_item"><span>Visual Cognition</span><span>, 2012</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The number of items that can be individuated at a single glance is limited. Here, we investigate ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The number of items that can be individuated at a single glance is limited. Here, we investigate object individuation at a higher temporal resolution, in fractions of a single glance. In two experiments involving object individuation we manipulated the duration of visual persistence of the target items with a forward masking procedure. The number of items as well as their</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058762"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058762"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058762; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058762]").text(description); $(".js-view-count[data-work-id=29058762]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058762; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058762']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058762]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058762,"title":"Rapid enumeration within a fraction of a single glance: The role of visible persistence in object individuation capacity","internal_url":"https://www.academia.edu/29058762/Rapid_enumeration_within_a_fraction_of_a_single_glance_The_role_of_visible_persistence_in_object_individuation_capacity","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058763"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058763/Temporal_Integration_Windows_for_Naturalistic_Visual_Sequences"><img alt="Research paper thumbnail of Temporal Integration Windows for Naturalistic Visual Sequences" class="work-thumbnail" src="https://attachments.academia-assets.com/49512576/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058763/Temporal_Integration_Windows_for_Naturalistic_Visual_Sequences">Temporal Integration Windows for Naturalistic Visual Sequences</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://iwww-mpg.academia.edu/angelaalbi">angela albi</a></span></div><div class="wp-workCard_item"><span>PLoS ONE</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">There is increasing evidence that the brain possesses mechanisms to integrate incoming sensory in...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">There is increasing evidence that the brain possesses mechanisms to integrate incoming sensory information as it unfolds over time-periods of 2-3 seconds. The ubiquity of this mechanism across modalities, tasks, perception and production has led to the proposal that it may underlie our experience of the subjective present. A critical test of this claim is that this phenomenon should be apparent in naturalistic visual experiences. We tested this using movie-clips as a surrogate for our day-to-day experience, temporally scrambling them to require (re-) integration within and beyond the hypothesized 2-3 second interval. Two independent experiments demonstrate a step-wise increase in the difficulty to follow stimuli at the hypothesized 2-3 second scrambling condition. Moreover, only this difference could not be accounted for by low-level visual properties. This provides the first evidence that this 2-3 second integration window extends to complex, naturalistic visual sequences more consistent with our experience of the subjective present.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="f543170f9ef29084aa2e577859b95fbb" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512576,&quot;asset_id&quot;:29058763,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512576/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058763"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058763"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058763; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058763]").text(description); $(".js-view-count[data-work-id=29058763]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058763; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058763']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "f543170f9ef29084aa2e577859b95fbb" } } $('.js-work-strip[data-work-id=29058763]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058763,"title":"Temporal Integration Windows for Naturalistic Visual Sequences","internal_url":"https://www.academia.edu/29058763/Temporal_Integration_Windows_for_Naturalistic_Visual_Sequences","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512576,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512576/thumbnails/1.jpg","file_name":"54e1fd2e0cf296663793cbbd.pdf","download_url":"https://www.academia.edu/attachments/49512576/download_file","bulk_download_file_name":"Temporal_Integration_Windows_for_Natural.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512576/54e1fd2e0cf296663793cbbd-libre.pdf?1476136390=\u0026response-content-disposition=attachment%3B+filename%3DTemporal_Integration_Windows_for_Natural.pdf\u0026Expires=1740881335\u0026Signature=HNb9nCcEp1tX79AaxviPMTK9sUyfSw0QrsRNi~0LpXn4Corq3aifmqXtVHyF9ORHJz~wLLTar9W4bTbFtO3CnoSWiKoEcOQ6fdW5kRZBW2g8XZ4jfcZgDNfzuLnI6WxHPY12lYh4mGegzo3uhwrFwqksuPXcD-EEYQOX6l0bMxCSTlnE0SM1~bfppTHyBldRnHbYAN0RKS9KQ~y3EvkOFHbBUCaqdhT-I53Hu4JhEoJN1LZvFUv~z2jbbIojdFptITQ~dWyOsM01qQKNACSY1AV-ijxePuT1JN6z7iopFqYKvHW~maxYwRLkStP03GLhnjwzzFSbDOrqZEEiJe239w__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058774"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058774/Dissociation_between_spatial_and_temporal_integration_mechanisms_in_Vernier_fusion"><img alt="Research paper thumbnail of Dissociation between spatial and temporal integration mechanisms in Vernier fusion" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058774/Dissociation_between_spatial_and_temporal_integration_mechanisms_in_Vernier_fusion">Dissociation between spatial and temporal integration mechanisms in Vernier fusion</a></div><div class="wp-workCard_item"><span>Vision research</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The visual system constructs a percept of the world across multiple spatial and temporal scales. ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The visual system constructs a percept of the world across multiple spatial and temporal scales. This raises the questions of whether different scales involve separate integration mechanisms and whether spatial and temporal factors are linked via spatio-temporal reference frames. We investigated this using Vernier fusion, a phenomenon in which the features of two Vernier stimuli presented in close spatio-temporal proximity are fused into a single percept. With increasing spatial offset, perception changes dramatically from a single percept into apparent motion and later, at larger offsets, into two separately perceived stimuli. We tested the link between spatial and temporal integration by presenting two successive Vernier stimuli presented at varying spatial and temporal offsets. The second Vernier either had the same or the opposite offset as the first. We found that the type of percept depended not only on spatial offset, as reported previously, but interacted with the temporal p...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058774"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058774"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058774; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058774]").text(description); $(".js-view-count[data-work-id=29058774]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058774; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058774']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058774]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058774,"title":"Dissociation between spatial and temporal integration mechanisms in Vernier fusion","internal_url":"https://www.academia.edu/29058774/Dissociation_between_spatial_and_temporal_integration_mechanisms_in_Vernier_fusion","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058780"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058780/Expansion_and_Compression_of_Time_Correlate_with_Information_Processing_in_an_Enumeration_Task"><img alt="Research paper thumbnail of Expansion and Compression of Time Correlate with Information Processing in an Enumeration Task" class="work-thumbnail" src="https://attachments.academia-assets.com/49512575/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058780/Expansion_and_Compression_of_Time_Correlate_with_Information_Processing_in_an_Enumeration_Task">Expansion and Compression of Time Correlate with Information Processing in an Enumeration Task</a></div><div class="wp-workCard_item"><span>PLOS ONE</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Perception of temporal duration is subjective and is influenced by factors such as attention and ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Perception of temporal duration is subjective and is influenced by factors such as attention and context. For example, unexpected or emotional events are often experienced as if time subjectively expands, suggesting that the amount of information processed in a unit of time can be increased. Time dilation effects have been measured with an oddball paradigm in which an infrequent stimulus is perceived to last longer than standard stimuli in the rest of the sequence. Likewise, time compression for the oddball occurs when the duration of the standard items is relatively brief. Here, we investigated whether the amount of information processing changes when time is perceived as distorted. On each trial, an oddball stimulus of varying numerosity (1-14 items) and duration was presented along with standard items that were either short (70 ms) or long (1050 ms). Observers were instructed to count the number of dots within the oddball stimulus and to judge its relative duration with respect to the standards on that trial. Consistent with previous results, oddballs were reliably perceived as temporally distorted: expanded for longer standard stimuli blocks and compressed for shorter standards. The occurrence of these distortions of time perception correlated with perceptual processing; i.e. enumeration accuracy increased when time was perceived as expanded and decreased with temporal compression. These results suggest that subjective time distortions are not epiphenomenal, but reflect real changes in sensory processing. Such short-term plasticity in information processing rate could be evolutionarily advantageous in optimizing perception and action during critical moments.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="1d50639fb5ee1e74d6a59f44e15d718b" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512575,&quot;asset_id&quot;:29058780,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512575/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058780"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058780"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058780; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058780]").text(description); $(".js-view-count[data-work-id=29058780]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058780; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058780']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "1d50639fb5ee1e74d6a59f44e15d718b" } } $('.js-work-strip[data-work-id=29058780]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058780,"title":"Expansion and Compression of Time Correlate with Information Processing in an Enumeration Task","internal_url":"https://www.academia.edu/29058780/Expansion_and_Compression_of_Time_Correlate_with_Information_Processing_in_an_Enumeration_Task","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512575,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512575/thumbnails/1.jpg","file_name":"55f0034708ae199d47c03b4c.pdf","download_url":"https://www.academia.edu/attachments/49512575/download_file","bulk_download_file_name":"Expansion_and_Compression_of_Time_Correl.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512575/55f0034708ae199d47c03b4c-libre.pdf?1476136394=\u0026response-content-disposition=attachment%3B+filename%3DExpansion_and_Compression_of_Time_Correl.pdf\u0026Expires=1740881335\u0026Signature=Wy9Ill72Cf7VUTgaAqX4rP2c1fIzihRLM6~G0HhqrHlgFnB-adUaOrra4zulQMpqao99oLGVcTh1E7Aa8mdWr-Ar15tySl9hp08xHvkYA-KmPvQBldHwnfF2wUFIG-GWn6rUAiuW1dvYQNDalD-MogQaI~jxUN7rwnJ8uSVhlv9pxPb78ewuXuWA~Z~ScOK9JKH95p2vxY0jHfE8N8qAUpB7EUQwxD2aBebbdxXtSBv0VwR5ITAN67M7JySGlFWXmfdUhWQj7IKFKlo3B5Mg3XzGZ6aHQgrxQ6gm-eCxdYg8tVpIcdAtLnf~eyjM83vH041STaXFoB0upcNrGaVLlQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="8625239" id="researchhighlights"><div class="js-work-strip profile--work_container" data-work-id="37613826"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/37613826/Frequency_modulation_of_neural_oscillations_according_to_visual_task_demands"><img alt="Research paper thumbnail of Frequency modulation of neural oscillations according to visual task demands" class="work-thumbnail" src="https://attachments.academia-assets.com/57596697/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/37613826/Frequency_modulation_of_neural_oscillations_according_to_visual_task_demands">Frequency modulation of neural oscillations according to visual task demands</a></div><div class="wp-workCard_item"><span>PNAS</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Temporal integration in visual perception is thought to occur within cycles of occipital alpha-ba...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Temporal integration in visual perception is thought to occur within cycles of occipital alpha-band (8–12 Hz) oscillations. Successive stimuli may be integrated when they fall within the same alpha cycle and segregated for different alpha cycles. Consequently , the speed of alpha oscillations correlates with the temporal resolution of perception, such that lower alpha frequencies provide longer time windows for perceptual integration and higher alpha frequencies correspond to faster sampling and segregation. Can the brain&#39;s rhythmic activity be dynamically controlled to adjust its processing speed according to different visual task demands? We recorded magnetoencephalography (MEG) while participants switched between task instructions for temporal integration and segregation, holding stimuli and task difficulty constant. We found that the peak frequency of alpha oscillations decreased when visual task demands required temporal integration compared with segregation. Alpha frequency was strategically modulated immediately before and during stimulus processing, suggesting a preparatory top-down source of modulation. Its neural generators were located in occipital and inferotem-poral cortex. The frequency modulation was specific to alpha oscillations and did not occur in the delta (1–3 Hz), theta (3–7 Hz), beta (15–30 Hz), or gamma (30–50 Hz) frequency range. These results show that alpha frequency is under top-down control to increase or decrease the temporal resolution of visual perception. visual perception | temporal integration | alpha oscillations | oscillation frequency | top-down control V isual perception is tasked both with constructing stable representations over time as well as maximizing sensitivity to transient changes. A large body of work has shown that neural oscillations in the alpha band (8–12 Hz) are partially responsible for determining the temporal resolution of perception, such that when discrete events occur within the same oscillatory cycle they can become perceptually integrated (1–8). For instance, individuals with higher peak alpha frequencies have perception with higher temporal resolution (2, 9), indicating that lower peak frequencies correspond to integration over longer time windows. Furthermore, trial-to-trial variability in spontaneous alpha frequency predicts accuracy in a temporal discrimination paradigm (9) and the rate of illusory flicker (10). These findings raise the question of whether variability in peak alpha frequency is stochastic, varying randomly across persons and fluctuating over time within the same person, or might instead be strategically modulated based on task demands. It is known that the temporal resolution of visual perception can be modulated by attention (11–13), and oscillatory phase and power can be controlled by top-down factors (14–18). Here, we investigated whether the peak frequency of alpha oscillations is subject to attentional control, increasing or decreasing so as to effectively lengthen or shorten the temporal window of perceptual integration. To examine this question, we recorded magnetoencephalography (MEG) data while observers performed two different tasks, one requiring visual integration over time and the other involving visual segregation over time. Both tasks are variants of a classical perceptual integration paradigm called the missing-element task (MET) (19). In the MET, an array of elements is presented in two successive frames, separated by a short interstimulus interval (ISI). When superimposed, the elements in both frames occupy all but one of the positions in the array and the observer&#39;s job is to identify the position with the missing element (Fig. 1A, blue). As ISI decreases, the two frames become perceptually integrated and identifying the missing element is simple. We introduced another condition that we term the odd-element task (OET) (20). In the OET, one-half of one of the elements is presented in the first frame and the other half is presented in the second (Fig. 1A, red). Here, the observer&#39;s task is to decide which location contained this odd element, which becomes easier as ISI increases and the frames are perceptually segregated. Critically, the use of these two tasks allowed us to determine whether modulations of peak alpha frequency were specifically tied to temporal resolution, as opposed to a more general fluctuation in attention or in visual sensitivity or criterion (21, 22). We hypothesized that if frequency modulation of alpha supports top-down control over temporal integration , then we should observe higher frequencies during the OET (when segregation is beneficial) compared with the MET (when integration is beneficial). Results By mapping psychometric functions relating performance on both tasks to ISI (sigmoid fit R 2 = 0.94 for OET, R 2 = 0.93 for MET), we could identify intersection points for each observer Significance Neural oscillations are hypothesized to play an important role in modulating perceptual processing in accordance with top-down goals. For instance, the amplitude, phase, and spatial distribution of alpha-band oscillations change with attention. Given recent links between the peak frequency of alpha oscillations and the temporal resolution of perception, we investigated whether frequency modulation occurs when task demands emphasize integration or segregation of visual input over time. We found that alpha frequency in occipital–temporal cortex decreased during, and in anticipation of, stimulus processing when task demands required temporal integration compared with segregation. These results demonstrate a unique top-down mechanism by which the brain controls the temporal resolution of visual processing in accordance with current task demands.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="484136cdd9070f21c4e2af1849d9dcec" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:57596697,&quot;asset_id&quot;:37613826,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/57596697/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="37613826"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="37613826"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 37613826; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=37613826]").text(description); $(".js-view-count[data-work-id=37613826]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 37613826; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='37613826']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "484136cdd9070f21c4e2af1849d9dcec" } } $('.js-work-strip[data-work-id=37613826]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":37613826,"title":"Frequency modulation of neural oscillations according to visual task demands","internal_url":"https://www.academia.edu/37613826/Frequency_modulation_of_neural_oscillations_according_to_visual_task_demands","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":57596697,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/57596697/thumbnails/1.jpg","file_name":"Wutz_Melcher_Samaha_PNAS_2018.pdf","download_url":"https://www.academia.edu/attachments/57596697/download_file","bulk_download_file_name":"Frequency_modulation_of_neural_oscillati.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/57596697/Wutz_Melcher_Samaha_PNAS_2018-libre.pdf?1539987457=\u0026response-content-disposition=attachment%3B+filename%3DFrequency_modulation_of_neural_oscillati.pdf\u0026Expires=1738812505\u0026Signature=R0nSSrLXtiIGoBxor270XrmqKYx-lqn4Ct4-z7V~42FDAZID4E3oD4Ne4f6wLxO-aL63EoEHyKUCNrq5fX-pcJToueEx2PGbjlr5xX4Z2UNjjjyexvIEOq6V8i6tD3SJHQFLHXUa1a38i6KJvaXu48kt1x9-UhupGCk-ruspUQnA6xKIUrbrkzreWDD7LuK-hViHbF646RMpB3pEorwkHIFSnRrIwlsIthLORfC6MjYzGqy1NPLTNf9s5ut2Tnan8wyxY3DDpzPgnryhUQTycVfwWIHhwQqTGalZrFwn6RgcU39nYaoJvqfLuX5h~G5y3naOrn0pST1eXr4Ka~Bkvg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="37613827"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/37613827/Temporal_Integration_Windows_in_Neural_Processing_and_Perception_Aligned_to_Saccadic_Eye_Movements"><img alt="Research paper thumbnail of Temporal Integration Windows in Neural Processing and Perception Aligned to Saccadic Eye Movements" class="work-thumbnail" src="https://attachments.academia-assets.com/57596698/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/37613827/Temporal_Integration_Windows_in_Neural_Processing_and_Perception_Aligned_to_Saccadic_Eye_Movements">Temporal Integration Windows in Neural Processing and Perception Aligned to Saccadic Eye Movements</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/EvelynM25">Evelyn M</a></span></div><div class="wp-workCard_item"><span>Current Biology</span><span>, 2016</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="4fe69bfb6a977d3be3715ee5a52b6a4a" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:57596698,&quot;asset_id&quot;:37613827,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/57596698/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="37613827"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="37613827"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 37613827; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=37613827]").text(description); $(".js-view-count[data-work-id=37613827]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 37613827; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='37613827']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "4fe69bfb6a977d3be3715ee5a52b6a4a" } } $('.js-work-strip[data-work-id=37613827]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":37613827,"title":"Temporal Integration Windows in Neural Processing and Perception Aligned to Saccadic Eye Movements","internal_url":"https://www.academia.edu/37613827/Temporal_Integration_Windows_in_Neural_Processing_and_Perception_Aligned_to_Saccadic_Eye_Movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":57596698,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/57596698/thumbnails/1.jpg","file_name":"Wutz_Melcher_CurrBiol_2016.pdf","download_url":"https://www.academia.edu/attachments/57596698/download_file","bulk_download_file_name":"Temporal_Integration_Windows_in_Neural_P.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/57596698/Wutz_Melcher_CurrBiol_2016-libre.pdf?1539987459=\u0026response-content-disposition=attachment%3B+filename%3DTemporal_Integration_Windows_in_Neural_P.pdf\u0026Expires=1740881335\u0026Signature=BlQbaDYTFqIJv6fdNLQy4jROhrVt6Ko01W2EcCFZH4hbaQ5I2G7k3YCdbaH3uCZl1LHcPt1uDMHYR0zqE1yOV9xzDMEJzh3HiuPu3SOk6uWlQQHEeVCv2DvDrPpleYNW4eP6SwOeWGXrdiLO4fvg74D6RXkL7xFtKNLbpyu7q~SleyiqlOKkKcOam3rbdOoyqdga8W~0Gp309UdEZrArJW6y7zX5N5yRV2Qoh04wx~MKKKUwXdkq4bIy3~uQg9ewkb~uJiKA535zlS~vrJTONJbUe~h8ARCFHCWUx7GKabhWemmb8wSitRVg20Vs9ZDWtCIB5UQ3~x30~cT0npJT6A__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="37514760"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/37514760/Alpha_band_sensory_entrainment_alters_the_duration_of_temporal_windows_in_visual_perception"><img alt="Research paper thumbnail of Alpha-band sensory entrainment alters the duration of temporal windows in visual perception" class="work-thumbnail" src="https://attachments.academia-assets.com/57487443/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/37514760/Alpha_band_sensory_entrainment_alters_the_duration_of_temporal_windows_in_visual_perception">Alpha-band sensory entrainment alters the duration of temporal windows in visual perception</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/LucaRonconi1">Luca Ronconi</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The phase and frequency of neural oscillations in the alpha band (8–12 Hz) have been recently pro...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The phase and frequency of neural oscillations in the alpha band (8–12 Hz) have been recently proposed as key parameters for the temporal resolution of visual perception. Here, we tested the possible causal links between these oscillatory features and temporal integration/segregation. The individual alpha frequency (IAF) peak as obtained from resting-state electroencephalography was used to set the frequency of sensory (audiovisual) entrainment for the lower (IAF − 2 Hz) and upper (IAF + 2 Hz) alpha. Entrainment at IAF ± 2 Hz was administered in the prestimulus interval to align oscillations to a faster or slower rhythm. We densely sampled in time the accuracy for integration/segregation by using identical stimuli with different instructions. The spectral peaks of performance fluctuations over time were found in the upper or lower alpha band for the IAF + 2 and IAF − 2 Hz entrainment, respectively, implying that faster entrainment resulted in faster behavioral fluctuations. Moreover, the entrainment frequency had opposite effects on temporal resolution: faster entrainment improved segregation while slower entrainment improved integration. Performance fluctuations were almost in anti-phase between the two tasks, such that highest integration performance coincided with lowest segregation performance. These findings provide evidence for a direct link between changes in the alpha band and the temporal resolution of perception. Despite our subjective impression of a continuous and smooth reality, the continuous flow of information coming from the sensory world is not elaborated in an analog fashion. On the contrary, our brain samples information periodically by discretizing sensory inputs according to its hardwired rhythms within and across the different sensory modalities 1,2. On the one hand, the capacity to combine information over time can be advantageous for accurate and precise percepts and actions of objects that tend to remain stable over time. However, temporal integration might reduce sensitivity to rapid changes in incoming sensory input due to a dynamic environment or our own actions. For example, temporal integration of motion signals might lead to misinterpreting a 180 degree change in direction as a reduction in speed. Likewise, long temporal integration windows would reduce the effectiveness of tactile feedback during active touch. Thus, sensory processing relies on a balance between temporal integration (to improve our perceptual interpretations at low temporal resolution) and segregation (sensitivity to change with high temporal resolution). The brain may balance between these two needs through alternating between elaboration of input over time (integration) and sensitivity to new input (segregation), in a rhythmic way. The idea that perceptual processing depends on the rhythmic sampling of sensory information was initially introduce in seminal neurophysiological studies 3–5 and later confirmed using electroencephalographical (EEG) recording in humans, which consistently show a relationship between oscillatory phase and sensitivity to new input 6–9. Together, these findings argue that fluctuations in detection, reflecting sensitivity to new input, are related to alpha rhythms. The idea of a rhythmic process finds support also in nonhuman primates studies showing that spikes in sensory areas are more likely to occur at a specific phase of the local field potential oscillations relative to the opposite phase 10. Building on this idea, other studies have investigated the role of neural oscillations in the temporal resolution of visual perception, defined as whether two items in a sequence are perceived as separate, individual events or instead combined into a single unique percept 11–19 (for reviews see 20,21). The key idea is that if two stimuli fall into the same oscillatory cycle they are temporally bound into a single percept, while two stimuli falling into separate cycles are parsed into two unique temporal events. There is increasing M/EEG evidence for a role of the Published: xx xx xxxx OPEN</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="c139be83e6eed5b8e3d5b02f9286d7d0" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:57487443,&quot;asset_id&quot;:37514760,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/57487443/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="37514760"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="37514760"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 37514760; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=37514760]").text(description); $(".js-view-count[data-work-id=37514760]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 37514760; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='37514760']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "c139be83e6eed5b8e3d5b02f9286d7d0" } } $('.js-work-strip[data-work-id=37514760]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":37514760,"title":"Alpha-band sensory entrainment alters the duration of temporal windows in visual perception","internal_url":"https://www.academia.edu/37514760/Alpha_band_sensory_entrainment_alters_the_duration_of_temporal_windows_in_visual_perception","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":57487443,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/57487443/thumbnails/1.jpg","file_name":"Ronconi_Busch_Melcher_Sci_Rep_2018.pdf","download_url":"https://www.academia.edu/attachments/57487443/download_file","bulk_download_file_name":"Alpha_band_sensory_entrainment_alters_th.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/57487443/Ronconi_Busch_Melcher_Sci_Rep_2018-libre.pdf?1538482736=\u0026response-content-disposition=attachment%3B+filename%3DAlpha_band_sensory_entrainment_alters_th.pdf\u0026Expires=1738812505\u0026Signature=JtWcADVyZGmaCx20moTiZma2ThuLCmOKSF6KAmT63NsiijZvVysUl3QkZFIY09VO4yplQ6ijDWgvUO6ldGuPSt0F2oRkPho7S31WB4RMLGUdXQY96xRElyckBxg97b318AHptBDEcbyijkzaCB9MHRHz6HtFaguf8Svg0bUEY5LkTg7Mt5WgBTpMw45rO5FKG1zghge6QtibOvUaTyqmjM6pVzK5f0LjKxejv-TVv6qTGKY9OCKgtVeQQsX5VqSdl~NfdPrDh7Nt-qKHWIJefT1PQ-Y-R5-N91wmM2MApylK0lpnqQbeeX7dARTHHT12LdwvAoUNibjuLLCzUpvK7w__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="37514748"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/37514748/Multiple_oscillatory_rhythms_determine_the_temporal_organization_of_perception"><img alt="Research paper thumbnail of Multiple oscillatory rhythms determine the temporal organization of perception" class="work-thumbnail" src="https://attachments.academia-assets.com/57487428/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/37514748/Multiple_oscillatory_rhythms_determine_the_temporal_organization_of_perception">Multiple oscillatory rhythms determine the temporal organization of perception</a></div><div class="wp-workCard_item"><span>PNAS</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Incoming Q:12 sensory input is condensed by our perceptual system to represent and store informat...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Incoming Q:12 sensory input is condensed by our perceptual system to represent and store information in an optimal way. In the temporal domain, this process has been described in terms of temporal windows (TWs) of integration/segregation, in which the phase of ongoing neural oscillations determines whether two stimuli are integrated into a single percept or segregated into separate events. However, the durations of TWs can vary substantially, raising the question of whether different TWs map onto unique oscillations or, rather, reflect a single, general fluctuation in cortical excitability (e.g., in the alpha band). We used multivariate decoding of electroencephalography (EEG) data to investigate perception of stimuli that either repeated in the same location (two-flash fusion) or moved in space (apparent motion). By manipulating the interstimulus interval (ISI), we created bistable stimuli that caused subjects to perceive either integration (fusion/apparent motion) or segregation (two unrelated flashes). Training a classifier searchlight on the whole time/frequency/channel space, we found that the perceptual outcome (integration vs. segregation) could be reliably decoded from the phase of prestimulus oscillations in right posterior-parietal channels. The highest decoding accuracy for the two-flash fusion task [interstimulus interval (ISI) = 40 ms] was evident in the phase of alpha oscillations (8-10 Hz), while the highest decoding accuracy for the apparent motion task (ISI = 120 ms) was evident in the phase of theta oscillations (6-7 Hz). These results reveal a precise relationship between specific TW durations (short or long) and specific neural oscillations. Such oscillations at different frequencies may provide a hierarchical framework for the temporal organization of perception.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="ae4785db8e6230e70dbf19d345933ab4" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:57487428,&quot;asset_id&quot;:37514748,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/57487428/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="37514748"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="37514748"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 37514748; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=37514748]").text(description); $(".js-view-count[data-work-id=37514748]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 37514748; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='37514748']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "ae4785db8e6230e70dbf19d345933ab4" } } $('.js-work-strip[data-work-id=37514748]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":37514748,"title":"Multiple oscillatory rhythms determine the temporal organization of perception","internal_url":"https://www.academia.edu/37514748/Multiple_oscillatory_rhythms_determine_the_temporal_organization_of_perception","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":57487428,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/57487428/thumbnails/1.jpg","file_name":"Ronconi_Melcher_PNAS_2017.pdf","download_url":"https://www.academia.edu/attachments/57487428/download_file","bulk_download_file_name":"Multiple_oscillatory_rhythms_determine_t.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/57487428/Ronconi_Melcher_PNAS_2017-libre.pdf?1538482730=\u0026response-content-disposition=attachment%3B+filename%3DMultiple_oscillatory_rhythms_determine_t.pdf\u0026Expires=1740881335\u0026Signature=GHRTd~GIwbFtKeIvuDpZXa7tznH101MJZig7F25CEt-vQ-Zk9VE5nVHQyv-Zh~4luFAVXn~VZKwQhZj2NrL45HBwxNVe8yxONfdaPHt-IEY4kVC03OOcaSlpJm3kHsFy2GpiSQwschK16MA4DAD-JlHWBCSVMuT24OlWLpx5YmxkzrwedUEnVXlCrkbirR6s66cTvdzWi6VM9xrP4X5zL4Cgd2ChLkHo81qX~BVBqG65ESTZkvFc7VnXdno5Ki8md1wteyUEo5w098dFbrZXxaWfO4J7q0mQ6tFJQWJhT8CZT0diSRRsyndE7xi5pmoihXlLyJDtcgk3VzifstTLyQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="5309664"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/5309664/Visual_scene_memory_and_the_guidance_of_saccadic_eye_movements"><img alt="Research paper thumbnail of Visual scene memory and the guidance of saccadic eye movements" class="work-thumbnail" src="https://attachments.academia-assets.com/49352419/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/5309664/Visual_scene_memory_and_the_guidance_of_saccadic_eye_movements">Visual scene memory and the guidance of saccadic eye movements</a></div><div class="wp-workCard_item"><span>Vision Research</span><span>, 2001</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">An unresolved question is how much information can be remembered from visual scenes when they are...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">An unresolved question is how much information can be remembered from visual scenes when they are inspected by saccadic eye movements. Subjects used saccadic eye movements to scan a computer-generated scene, and afterwards, recalled as many objects as they could. Scene memory was quite good: it improved with display duration, it persisted over time long after the display was removed, and it continued to accumulate with additional viewings of the same display (Melcher, D. The persistance of memory for scenes. Nature 412, 401). The occurrence of saccadic eye movements was important to ensure good recall performance, even though subjects often recalled non-fixated objects. Inter-saccadic intervals increased with display duration, showing an influence of duration on global scanning strategy. The choice of saccadic target was predicted by a Random Selection with Distance Weighting (RSDW) model, in which the target for each saccade is selected at random from all available objects, weighted according to distance from fixation, regardless of which objects had previously been fixated. The results show that the visual memory that was reflected in the recall reports was not utilized for the immediate decision about where to look in the scene. Visual memory can be excellent, but it is not always reflected in oculomotor measures, perhaps because the cost of rapid on-line memory retrieval is too great.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="58541d2819de179724f5bc9ace7f4287" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49352419,&quot;asset_id&quot;:5309664,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49352419/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="5309664"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="5309664"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5309664; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5309664]").text(description); $(".js-view-count[data-work-id=5309664]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5309664; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='5309664']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "58541d2819de179724f5bc9ace7f4287" } } $('.js-work-strip[data-work-id=5309664]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":5309664,"title":"Visual scene memory and the guidance of saccadic eye movements","internal_url":"https://www.academia.edu/5309664/Visual_scene_memory_and_the_guidance_of_saccadic_eye_movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49352419,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49352419/thumbnails/1.jpg","file_name":"Visual_scene_memory_and_the_guidance_of_20161004-26595-l0wp6k.pdf","download_url":"https://www.academia.edu/attachments/49352419/download_file","bulk_download_file_name":"Visual_scene_memory_and_the_guidance_of.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49352419/Visual_scene_memory_and_the_guidance_of_20161004-26595-l0wp6k-libre.pdf?1475607798=\u0026response-content-disposition=attachment%3B+filename%3DVisual_scene_memory_and_the_guidance_of.pdf\u0026Expires=1740881335\u0026Signature=O3dIHXs5jn4VhrIdSGXDVEVoMnfqqKjBC~fej-~t-JDw2hzNXMn03ETVcnA8rxe4lULjdy0FtwwVrZmdZ731ZBkFbRaVoqet3vby5LfxWz7wYTCF42EaaVts2QMQPa-mh-szbe6SRs1pCSQ4m6w4x7bUKSWwsDU1p3YlkXTmWfVbhXE2rMHYGSSyMfmgaG6F9ps3uABui9617BF1jnKBoxcsRySBkn1-nBYxb-ttvjml2qC~Ohls~fRQCihQje1KZ-7ovppMwRX0n0Np2mJxpK-bneDRSWlre-fAgTKPglaf1FLu8xXvm-NyPHdgATgZ6wcbC-vlwEbJLu1q0OJ5MA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058778"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058778/Brains_of_verbal_memory_specialists_show_anatomical_differences_in_language_memory_and_visual_systems"><img alt="Research paper thumbnail of Brains of verbal memory specialists show anatomical differences in language, memory and visual systems" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058778/Brains_of_verbal_memory_specialists_show_anatomical_differences_in_language_memory_and_visual_systems">Brains of verbal memory specialists show anatomical differences in language, memory and visual systems</a></div><div class="wp-workCard_item"><span>NeuroImage</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">We studied a group of verbal memory specialists to determine whether intensive oral text memory i...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">We studied a group of verbal memory specialists to determine whether intensive oral text memory is associated with structural features of hippocampal and lateral-temporal regions implicated in language processing. Professional Vedic Sanskrit Pandits in India train from childhood for around 10years in an ancient, formalized tradition of oral Sanskrit text memorization and recitation, mastering the exact pronunciation and invariant content of multiple 40,000-100,000 word oral texts. We conducted structural analysis of gray matter density, cortical thickness, local gyrification, and white matter structure, relative to matched controls. We found massive gray matter density and cortical thickness increases in Pandit brains in language, memory and visual systems, including i) bilateral lateral temporal cortices and ii) the anterior cingulate cortex and the hippocampus, regions associated with long and short-term memory. Differences in hippocampal morphometry matched those previously documented for expert spatial navigators and individuals with good verbal working memory. The findings provide unique insight into the brain organization implementing formalized oral knowledge systems.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058778"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058778"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058778; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058778]").text(description); $(".js-view-count[data-work-id=29058778]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058778; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058778']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058778]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058778,"title":"Brains of verbal memory specialists show anatomical differences in language, memory and visual systems","internal_url":"https://www.academia.edu/29058778/Brains_of_verbal_memory_specialists_show_anatomical_differences_in_language_memory_and_visual_systems","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="5309663"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/5309663/Predictive_remapping_of_visual_features_precedes_saccadic_eye_movements"><img alt="Research paper thumbnail of Predictive remapping of visual features precedes saccadic eye movements" class="work-thumbnail" src="https://attachments.academia-assets.com/49352411/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/5309663/Predictive_remapping_of_visual_features_precedes_saccadic_eye_movements">Predictive remapping of visual features precedes saccadic eye movements</a></div><div class="wp-workCard_item"><span>Nature Neuroscience</span><span>, 2007</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The frequent occurrence of saccadic eye movements raises the question of how information is combi...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The frequent occurrence of saccadic eye movements raises the question of how information is combined across separate glances into a stable, continuous percept. Here I show that visual form processing is altered at both the current fixation position and the location of the saccadic target before the saccade. When human observers prepared to follow a displacement of the stimulus with the eyes, visual form adaptation was transferred from current fixation to the future gaze position. This transfer of adaptation also influenced the perception of test stimuli shown at an intermediate position between fixation and saccadic target. Additionally, I found a presaccadic transfer of adaptation when observers prepared to move their eyes toward a stationary adapting stimulus in peripheral vision. The remapping of visual processing, demonstrated here with form adaptation, may help to explain our impression of a smooth transition, with no temporal delay, of visual perception across glances.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="63f6eaafe442b1608e35e35314650380" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49352411,&quot;asset_id&quot;:5309663,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49352411/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="5309663"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="5309663"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5309663; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5309663]").text(description); $(".js-view-count[data-work-id=5309663]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5309663; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='5309663']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "63f6eaafe442b1608e35e35314650380" } } $('.js-work-strip[data-work-id=5309663]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":5309663,"title":"Predictive remapping of visual features precedes saccadic eye movements","internal_url":"https://www.academia.edu/5309663/Predictive_remapping_of_visual_features_precedes_saccadic_eye_movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49352411,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49352411/thumbnails/1.jpg","file_name":"Predictive_remapping_of_visual_features_20161004-5636-vz31ej.pdf","download_url":"https://www.academia.edu/attachments/49352411/download_file","bulk_download_file_name":"Predictive_remapping_of_visual_features.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49352411/Predictive_remapping_of_visual_features_20161004-5636-vz31ej-libre.pdf?1475607808=\u0026response-content-disposition=attachment%3B+filename%3DPredictive_remapping_of_visual_features.pdf\u0026Expires=1740881335\u0026Signature=NvWWM3rEPT5vHgFr2SThSTvLaW80PsXJK55afRzbAXoqTEIj2oh5o9b05GMfzEOeUyX3j0CZI99Hwu4Fkwx4hEMbHL74FvDFu-hDN8XkV1v3V1YhMEJHLCQTTAiHpY9e0FjzeL42JW2ZObrg6Ioza9leCOX9Efb2grrez0T6dgrX4T4JyluqVUHmNH1w-ZTR7YSRhI2lqIetMvP59mfl5P6DI0GrX46P2-TYWtD2cOcF-U4RJj28j6tF77ldXQzgm3ObwCPA1stp2~mvm4OTH00vqkUnibZtWdfEQTV0uXXR5xmLfK42~xpJ3VWfxKbLTbF0Qjy3HVv2nCsRkZ6~zQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="5309666"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/5309666/Spatiotopic_temporal_integration_of_visual_motion_across_saccadic_eye_movements"><img alt="Research paper thumbnail of Spatiotopic temporal integration of visual motion across saccadic eye movements" class="work-thumbnail" src="https://attachments.academia-assets.com/49352397/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/5309666/Spatiotopic_temporal_integration_of_visual_motion_across_saccadic_eye_movements">Spatiotopic temporal integration of visual motion across saccadic eye movements</a></div><div class="wp-workCard_item"><span>Nature Neuroscience</span><span>, 2003</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Although seldom aware of this, humans make an average of 3-5 saccadic eye movements per second. A...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Although seldom aware of this, humans make an average of 3-5 saccadic eye movements per second. Although this is an efficient strategy to make maximum use of the high resolution of central vision 1 , it raises the question of how information from separate glances is combined to give a stable perception of the world. One potential solution would be to combine visual information across saccades. However, this mechanism could be useful only if information from the same spatiotopic position is integrated. Previous studies of trans-saccadic integration show that visual information is not integrated across saccades, at least in the case of visual patterns 2-4 (but see ref. 5). Many simple visual tasks, such as contrast sensitivity and shape discrimination, have a limited integration time falling within the duration of a typical fixation 6 . Thus, the failure to find visual integration in these tasks does not necessarily indicate that spatiotopic integration does not occur; simple patterns might be detected in a single fixation.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="41c3f4ced3dc0b37d3a9e7104158825a" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49352397,&quot;asset_id&quot;:5309666,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49352397/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="5309666"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="5309666"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5309666; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5309666]").text(description); $(".js-view-count[data-work-id=5309666]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5309666; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='5309666']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "41c3f4ced3dc0b37d3a9e7104158825a" } } $('.js-work-strip[data-work-id=5309666]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":5309666,"title":"Spatiotopic temporal integration of visual motion across saccadic eye movements","internal_url":"https://www.academia.edu/5309666/Spatiotopic_temporal_integration_of_visual_motion_across_saccadic_eye_movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49352397,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49352397/thumbnails/1.jpg","file_name":"melcher_morronenn.pdf","download_url":"https://www.academia.edu/attachments/49352397/download_file","bulk_download_file_name":"Spatiotopic_temporal_integration_of_visu.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49352397/melcher_morronenn-libre.pdf?1475607813=\u0026response-content-disposition=attachment%3B+filename%3DSpatiotopic_temporal_integration_of_visu.pdf\u0026Expires=1740881335\u0026Signature=YqfFCAUMXQqp2akjtlcFvx3djs58uWkWMNLNKTBWU6~gXQv7BuJ3b37xyRHySh6U9r60vny21vg9moyb-JagJnOkZgs0fAhwQ3AztoS~vsQ6IFY7bsnXXttuKoo1g6xYISEd~ZtZexhx8DJ55ks8fdtSeH8KH6qZWIMLo2uoDepETAggSiJdnWGYuiGYqUmHt-px4cMrte0IN3iawabMajM-baysIwYi3l2xxJ2A~1umkNkpHyukC-Z2G0n2DF4Nbsi3vvds0ym-jg6aWoJV87Hb1MiwGoasQP3GazFJ4e3E~yuJs0Df0wAJX3LX7W-KlGNXQjBcscSEIBJ21rs24Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058736"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058736/Subitizing_reflects_visuo_spatial_object_individuation_capacity"><img alt="Research paper thumbnail of Subitizing reflects visuo-spatial object individuation capacity" class="work-thumbnail" src="https://attachments.academia-assets.com/49512560/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058736/Subitizing_reflects_visuo_spatial_object_individuation_capacity">Subitizing reflects visuo-spatial object individuation capacity</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/ManuelaPiazza1">Manuela Piazza</a></span></div><div class="wp-workCard_item"><span>Cognition</span><span>, 2011</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Subitizing is the immediate apprehension of the exact number of items in small sets. Despite more...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Subitizing is the immediate apprehension of the exact number of items in small sets. Despite more than a 100 years of research around this phenomenon, its nature and origin are still unknown. One view posits that it reflects a number estimation process common for small and large sets, which precision decreases as the number of items increases, according to Weber&#39;s law. Another view proposes that it reflects a non-numerical mechanism of visual indexing of multiple objects in parallel that is limited in capacity. In a previous research we have gathered evidence against the Weberian estimation hypothesis. Here we provide first direct evidence for the alternative object indexing hypothesis, and show that subitizing reflects a domain general mechanism shared with other tasks that require multiple object individuation.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="8ba0781b8c17a14ae10ba7ed75d440f3" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512560,&quot;asset_id&quot;:29058736,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512560/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058736"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058736"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058736; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058736]").text(description); $(".js-view-count[data-work-id=29058736]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058736; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058736']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "8ba0781b8c17a14ae10ba7ed75d440f3" } } $('.js-work-strip[data-work-id=29058736]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058736,"title":"Subitizing reflects visuo-spatial object individuation capacity","internal_url":"https://www.academia.edu/29058736/Subitizing_reflects_visuo_spatial_object_individuation_capacity","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512560,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512560/thumbnails/1.jpg","file_name":"Subitizing_reflects_visuo-spatial_object20161010-18908-oszwv7.pdf","download_url":"https://www.academia.edu/attachments/49512560/download_file","bulk_download_file_name":"Subitizing_reflects_visuo_spatial_object.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512560/Subitizing_reflects_visuo-spatial_object20161010-18908-oszwv7-libre.pdf?1476136405=\u0026response-content-disposition=attachment%3B+filename%3DSubitizing_reflects_visuo_spatial_object.pdf\u0026Expires=1740881335\u0026Signature=GasNJxm0YjlMT-vXr7c8wOM3yfSkF0DCe~6YSYfCBHWmQbBnXKurAQ44dMeC-2elKwNPXLjqq00HV9w1T5V417pk1xs4Xc~PxSzEUDXJIJhXFRBpocrN4sQJOYbDNFNxO4XWDmgnIWnT6-zhWNzZMvqVPvTkaUirYBqcMlnHYoCJBgls5fZmuwr5FMrqLkx2-V9k72kJ6QSMuzS69k~k1lg3BJm8lKArHOL~23yHqGknFejhcMfl9F8ynWoVQ836g8SpdDBrmcZ7~px0seUbKgiEBwikqyjvW-IQ9JMhbYq2uHmJ4mcf~hpsZJEoAx6diqkfvgqWN3sk4mQ2BTYNOw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058738"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058738/Spatiotopic_Transfer_of_Visual_Form_Adaptation_across_Saccadic_Eye_Movements"><img alt="Research paper thumbnail of Spatiotopic Transfer of Visual-Form Adaptation across Saccadic Eye Movements" class="work-thumbnail" src="https://attachments.academia-assets.com/49512563/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058738/Spatiotopic_Transfer_of_Visual_Form_Adaptation_across_Saccadic_Eye_Movements">Spatiotopic Transfer of Visual-Form Adaptation across Saccadic Eye Movements</a></div><div class="wp-workCard_item"><span>Current Biology</span><span>, 2005</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Although conscious perception is smooth and continuous, the input to the visual system is a serie...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Although conscious perception is smooth and continuous, the input to the visual system is a series of short, discrete fixations interleaved with rapid shifts of the eye. One possible explanation for visual stability is that internal maps of objects and their visual properties are remapped around the time of saccades, but numerous studies have demonstrated that visual patterns are not combined across saccades. Here, we report that visual-form aftereffects transfer across separate fixations when adaptor and test are presented in the same spatial position. The magnitude of the transsaccadic adaptation increased with stimulus complexity, suggesting a progressive construction of spatiotopic receptive fields along the visual-form pathway. These results demonstrate that basic shape information is combined across saccades, allowing for predictive and consistent information from the past to be incorporated into each new fixation.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="c1baf5e1b451073010dccc47b10c813d" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512563,&quot;asset_id&quot;:29058738,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512563/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058738"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058738"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058738; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058738]").text(description); $(".js-view-count[data-work-id=29058738]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058738; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058738']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "c1baf5e1b451073010dccc47b10c813d" } } $('.js-work-strip[data-work-id=29058738]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058738,"title":"Spatiotopic Transfer of Visual-Form Adaptation across Saccadic Eye Movements","internal_url":"https://www.academia.edu/29058738/Spatiotopic_Transfer_of_Visual_Form_Adaptation_across_Saccadic_Eye_Movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512563,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512563/thumbnails/1.jpg","file_name":"melcher2005.pdf","download_url":"https://www.academia.edu/attachments/49512563/download_file","bulk_download_file_name":"Spatiotopic_Transfer_of_Visual_Form_Adap.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512563/melcher2005-libre.pdf?1476136402=\u0026response-content-disposition=attachment%3B+filename%3DSpatiotopic_Transfer_of_Visual_Form_Adap.pdf\u0026Expires=1740881335\u0026Signature=Dt3wMBmNoCwrzf21RABQwLdnFtPD74GBqjhZr2ZKOtGsfDXD-1Qz9YyoSozyR1CwdQGQVzFRirW4TpsPTwa2WrJOpTmMIU9-h4UY~Oo3Sq0h28~xILBRJj2cIxKvHdun1nRPBBVj8tX3O6ppSroktohzcDwixOItu3mZFlcig6~jbijJ812unHFR5KJdOPDVXZ~YU9IH1Gw0eY7fZ7tq0F9MZd4J3pfs0i8gIDq1IFytObxt61pJYA1fsHES5n74VxlhDRMsA2zycd5pWdsIxY8booBFwCn8O1MrDBWO~nq8t5MupRU7kBJOW2PFzYqrE3vJntIH4S-FDNacEJlGUA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058757"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058757/Temporal_Windows_in_Visual_Processing_Prestimulus_Brain_State_and_Poststimulus_Phase_Reset_Segregate_Visual_Transients_on_Different_Temporal_Scales"><img alt="Research paper thumbnail of Temporal Windows in Visual Processing: &quot;Prestimulus Brain State&quot; and &quot;Poststimulus Phase Reset&quot; Segregate Visual Transients on Different Temporal Scales" class="work-thumbnail" src="https://attachments.academia-assets.com/49512568/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058757/Temporal_Windows_in_Visual_Processing_Prestimulus_Brain_State_and_Poststimulus_Phase_Reset_Segregate_Visual_Transients_on_Different_Temporal_Scales">Temporal Windows in Visual Processing: &quot;Prestimulus Brain State&quot; and &quot;Poststimulus Phase Reset&quot; Segregate Visual Transients on Different Temporal Scales</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/ChristophBraun">Christoph Braun</a></span></div><div class="wp-workCard_item"><span>Journal of Neuroscience</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Dynamic vision requires both stability of the current perceptual representation and sensitivity t...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Dynamic vision requires both stability of the current perceptual representation and sensitivity to the accumulation of sensory evidence over time. Here we study the electrophysiological signatures of this intricate balance between temporal segregation and integration in vision. Within a forward masking paradigm with short and long stimulus onset asynchronies (SOA), we manipulated the temporal overlap of the visual persistence of two successive transients. Human observers enumerated the items presented in the second target display as a measure of the informational capacity read-out from this partly temporally integrated visual percept. We observed higher ␤-power immediately before mask display onset in incorrect trials, in which enumeration failed due to stronger integration of mask and target visual information. This effect was timescale specific, distinguishing between segregation and integration of visual transients that were distant in time (long SOA). Conversely, for short SOA trials, mask onset evoked a stronger visual response when mask and targets were correctly segregated in time. Examination of the target-related response profile revealed the importance of an evoked ␣-phase reset for the segregation of those rapid visual transients. Investigating this precise mapping of the temporal relationships of visual signals onto electrophysiological responses highlights how the stream of visual information is carved up into discrete temporal windows that mediate between segregated and integrated percepts. Fragmenting the stream of visual information provides a means to stabilize perceptual events within one instant in time.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="2d5b8a8cda22bc5b720bd489197bb8dc" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512568,&quot;asset_id&quot;:29058757,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512568/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058757"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058757"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058757; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058757]").text(description); $(".js-view-count[data-work-id=29058757]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058757; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058757']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "2d5b8a8cda22bc5b720bd489197bb8dc" } } $('.js-work-strip[data-work-id=29058757]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058757,"title":"Temporal Windows in Visual Processing: \"Prestimulus Brain State\" and \"Poststimulus Phase Reset\" Segregate Visual Transients on Different Temporal Scales","internal_url":"https://www.academia.edu/29058757/Temporal_Windows_in_Visual_Processing_Prestimulus_Brain_State_and_Poststimulus_Phase_Reset_Segregate_Visual_Transients_on_Different_Temporal_Scales","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512568,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512568/thumbnails/1.jpg","file_name":"1554.full.pdf","download_url":"https://www.academia.edu/attachments/49512568/download_file","bulk_download_file_name":"Temporal_Windows_in_Visual_Processing_Pr.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512568/1554.full-libre.pdf?1476136401=\u0026response-content-disposition=attachment%3B+filename%3DTemporal_Windows_in_Visual_Processing_Pr.pdf\u0026Expires=1740065947\u0026Signature=TfVREfODw6EhkP1lj49ao2nN5T9zbNgRgZveLraUYj8BItiujddCv27APEYSPrCIWFf0e7XPPLyaaIYGsERdqNSU-mwDCCH0TWEYhSM-OhSNnXqK0DTRrC-hIhtjsNEfjpmF5kFlPdhq6hZHuxzYx7WBMXaoXTqExeq41pl9PymeX~WSfdCmHyfeBDnA4gKSFYEjPseBBqz6iVMu~mRKPUQe809jwUMd-99f7CLqWdYXfjY3aFh9MDJ-yE5XK-VZyR0fDYdeOiBrKgvTt~IkimSjYnq7bgXwy2je3A1nq5CxQiMkRot8OQS1HtmbJO6q3zocSF3n1TZmp7oed2r8VQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058759"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058759/Trans_saccadic_perception"><img alt="Research paper thumbnail of Trans-saccadic perception" class="work-thumbnail" src="https://attachments.academia-assets.com/49512571/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058759/Trans_saccadic_perception">Trans-saccadic perception</a></div><div class="wp-workCard_item"><span>Trends in Cognitive Sciences</span><span>, 2008</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6fa24278973050c5b848c7e842c257f6" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512571,&quot;asset_id&quot;:29058759,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512571/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058759"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058759"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058759; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058759]").text(description); $(".js-view-count[data-work-id=29058759]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058759; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058759']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6fa24278973050c5b848c7e842c257f6" } } $('.js-work-strip[data-work-id=29058759]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058759,"title":"Trans-saccadic perception","internal_url":"https://www.academia.edu/29058759/Trans_saccadic_perception","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512571,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512571/thumbnails/1.jpg","file_name":"David_Melcher_1_and_Carol_L._Colby20161010-18905-bdjtky.pdf","download_url":"https://www.academia.edu/attachments/49512571/download_file","bulk_download_file_name":"Trans_saccadic_perception.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512571/David_Melcher_1_and_Carol_L._Colby20161010-18905-bdjtky-libre.pdf?1476136396=\u0026response-content-disposition=attachment%3B+filename%3DTrans_saccadic_perception.pdf\u0026Expires=1740881335\u0026Signature=M-rLMSbXJaoz75fJjXbPEFrepwxYZU0EYZfIUquo6I8T~XqhMjFHk2PqkDdXCKTBTEypRmHlyvtZRWluXPnmVlI2OI-8xpL9PvT6ZptRcK8gib920JSQbb10VVRjsj~z8Sj7d1Vv3HulSJtmV57YXnpjw6nu-v3n0SX6GF0IBLaUPsLUmtVlxVBdXYe8S8Fw1UqdKp8AmUwfFoMHf1LDMgzLH~PW9xb0pgFZenO5C2S25teFvTaSsC-I2V-R-Ra-nGehpT~7ijqeSP~2M969ETdbJ4jWJNXq0b85AHPXwCzGSJe4~ZP~OmURzBQY7TvSvCSPtDAJFs2UDR8bAoLEXw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058767"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058767/Persistence_of_visual_memory_for_scenes"><img alt="Research paper thumbnail of Persistence of visual memory for scenes" class="work-thumbnail" src="https://attachments.academia-assets.com/49512587/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058767/Persistence_of_visual_memory_for_scenes">Persistence of visual memory for scenes</a></div><div class="wp-workCard_item"><span>Nature</span><span>, 2001</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="9224c675b9816e17f77764d86933d1d8" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512587,&quot;asset_id&quot;:29058767,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512587/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058767"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058767"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058767; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058767]").text(description); $(".js-view-count[data-work-id=29058767]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058767; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058767']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "9224c675b9816e17f77764d86933d1d8" } } $('.js-work-strip[data-work-id=29058767]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058767,"title":"Persistence of visual memory for scenes","internal_url":"https://www.academia.edu/29058767/Persistence_of_visual_memory_for_scenes","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512587,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512587/thumbnails/1.jpg","file_name":"Melcher_D._Persistence_of_visual_memory_20161010-2393-xyv975.pdf","download_url":"https://www.academia.edu/attachments/49512587/download_file","bulk_download_file_name":"Persistence_of_visual_memory_for_scenes.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512587/Melcher_D._Persistence_of_visual_memory_20161010-2393-xyv975-libre.pdf?1476136376=\u0026response-content-disposition=attachment%3B+filename%3DPersistence_of_visual_memory_for_scenes.pdf\u0026Expires=1740881335\u0026Signature=cofW44RKPwWlD4AvlIfrOhk5TOls4kNoMcXmPJ4uKKFHb1oiQwF0Ix4FnKRqqoivoqYYm7hmfDGmR0v3KLJUmC9~zIUi3DavcQBuTLdidj8TMWxKZ0gRAgLyaVQjpJ9DzThNJelysLAldoLEoRpUe05qTCCcwAgL~AqtlWtBWm2hTCBzqVO-1SxH32brGdRouoGZ3u~K2etpA7ub3krscwB5L7Qk6~FoLwJWmYxiPvf6aLhfxqlGSoXXQuxCyPMftZewiakr2ByfDlJVyWlHVEy0nhSZh~KswFSEZz6cRF2dnY~qu4Ulktp-JXcIjt20Lebnew2aNFTFwZmkWpwGFA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="8625238" id="recentpublications"></div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="8625250" id="temporalaspectsofperception"><div class="js-work-strip profile--work_container" data-work-id="5309667"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/5309667/The_role_of_attention_in_central_and_peripheral_motion_integration"><img alt="Research paper thumbnail of The role of attention in central and peripheral motion integration" class="work-thumbnail" src="https://attachments.academia-assets.com/49352401/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/5309667/The_role_of_attention_in_central_and_peripheral_motion_integration">The role of attention in central and peripheral motion integration</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://york.academia.edu/AurelioBruno">Aurelio Bruno</a></span></div><div class="wp-workCard_item"><span>Vision Research</span><span>, 2004</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Attention has been shown to modulate visual processing in a wide variety of tasks. We tested the ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Attention has been shown to modulate visual processing in a wide variety of tasks. We tested the influence of attention on the temporal integration of motion for both central and peripherally viewed targets (6°· 6°). Consistent with previous results, motion sensitivity for a brief motion signal (70-3500 ms) embedded in noise (10 s) increased as a function of motion duration up to a critical duration of about 1.5 s. Summation times for centrally and peripherally viewed targets were similar. An effect of eccentricity was found, however, in a double-motion task, in which two brief (150 ms) motion signals were presented with varying delays (0-7 s) of random noise between the two signals. Specifically, the maximum delay between the two signals that still supported temporal summation (summation constant) was about three times longer for centrally viewed targets (3.5-4.5 s versus 1.5-2 s). We investigated the role of spatial attention in the double-motion task by adding a concurrent color contrast discrimination task. The addition of the concurrent task dramatically reduced differences in the summation constant for central and peripheral targets, without reducing overall motion sensitivity. Thus, attention appears to specifically modulate temporal summation, suggesting that the long integration times found for motion coherence are mediated by attention.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6d775e31a6cd5eb64d43b8f314af6248" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49352401,&quot;asset_id&quot;:5309667,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49352401/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="5309667"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="5309667"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5309667; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5309667]").text(description); $(".js-view-count[data-work-id=5309667]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5309667; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='5309667']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6d775e31a6cd5eb64d43b8f314af6248" } } $('.js-work-strip[data-work-id=5309667]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":5309667,"title":"The role of attention in central and peripheral motion integration","internal_url":"https://www.academia.edu/5309667/The_role_of_attention_in_central_and_peripheral_motion_integration","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49352401,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49352401/thumbnails/1.jpg","file_name":"The_role_of_attention_in_central_and_per20161004-8547-s3nd1w.pdf","download_url":"https://www.academia.edu/attachments/49352401/download_file","bulk_download_file_name":"The_role_of_attention_in_central_and_per.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49352401/The_role_of_attention_in_central_and_per20161004-8547-s3nd1w-libre.pdf?1475607811=\u0026response-content-disposition=attachment%3B+filename%3DThe_role_of_attention_in_central_and_per.pdf\u0026Expires=1740881335\u0026Signature=R~nTa9W4g~a6igDlMkffvpQVyfoZtsUJ13pKk25EhQ4-6prYgE8vgWGAQgHI3~i1ITUeA5GKdCiaYFAqruwoF0ZISLDd27Ot2MHKDNZCCFS7EMUvE-vrghvYLtL8Lh0z7l5uOCIcTv~ydzTaQfeAH0mnVP1iVW0KtF7U9VgwoNnOFYhKNK6YPDtd6SdF~IrxKedXBWxaIqnMG4~90Y8Wni0dG6Fqdko~bwj0aqkD8-0hmrODkrMIS4h9LS95Mbu9ofma5W7lf9BFCg6FXMtylXv5klpvvMUhqKl5wls4Q5GJMMeuL3g3Q9oWQcndB6E-vgAK5AbgDIe-XSUh9tkPZg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058734"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058734/Temporal_buffering_and_visual_capacity_The_time_course_of_object_formation_underlies_capacity_limits_in_visual_cognition"><img alt="Research paper thumbnail of Temporal buffering and visual capacity: The time course of object formation underlies capacity limits in visual cognition" class="work-thumbnail" src="https://attachments.academia-assets.com/49512559/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058734/Temporal_buffering_and_visual_capacity_The_time_course_of_object_formation_underlies_capacity_limits_in_visual_cognition">Temporal buffering and visual capacity: The time course of object formation underlies capacity limits in visual cognition</a></div><div class="wp-workCard_item"><span>Attention, Perception, &amp; Psychophysics</span><span>, 2013</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Capacity limits are a hallmark of visual cognition. The upper boundary of our ability to individu...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Capacity limits are a hallmark of visual cognition. The upper boundary of our ability to individuate and remember objects is well known but-despite its central role in visual information processing-not well understood. Here, we investigated the role of temporal limits in the perceptual processes of forming &quot;object files.&quot; Specifically, we examined the two fundamental mechanisms of object file formation-individuation and identification-by selectively interfering with visual processing by using forward and backward masking with variable stimulus onset asynchronies. While target detection was almost unaffected by these two types of masking, they showed distinct effects on the two different stages of object formation. Forward &quot;integration&quot; masking selectively impaired object individuation, whereas backward &quot;interruption&quot; masking only affected identification and the consolidation of information into visual working memory. We therefore conclude that the inherent temporal dynamics of visual information processing are an essential component in creating the capacity limits in object individuation and visual working memory.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="f008eec55e2f17811e805b6e77494f50" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512559,&quot;asset_id&quot;:29058734,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512559/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058734"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058734"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058734; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058734]").text(description); $(".js-view-count[data-work-id=29058734]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058734; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058734']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "f008eec55e2f17811e805b6e77494f50" } } $('.js-work-strip[data-work-id=29058734]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058734,"title":"Temporal buffering and visual capacity: The time course of object formation underlies capacity limits in visual cognition","internal_url":"https://www.academia.edu/29058734/Temporal_buffering_and_visual_capacity_The_time_course_of_object_formation_underlies_capacity_limits_in_visual_cognition","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512559,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512559/thumbnails/1.jpg","file_name":"Temporal_buffering_and_visual_capacity_T20161010-27087-j1rev4.pdf","download_url":"https://www.academia.edu/attachments/49512559/download_file","bulk_download_file_name":"Temporal_buffering_and_visual_capacity_T.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512559/Temporal_buffering_and_visual_capacity_T20161010-27087-j1rev4-libre.pdf?1476136411=\u0026response-content-disposition=attachment%3B+filename%3DTemporal_buffering_and_visual_capacity_T.pdf\u0026Expires=1740881335\u0026Signature=aI2zIxhWIlvR~g4A-KCJgxXfj1vF4yoxESs-v6EH1vGmnAWkZb6nCWOW6-IFpnY4i4JzBWp-BQX~9aJUU1vl1fbGt3tqu6ohmjMPDp4z6H8o-awp-MwGQXUHUa8wpH9pBOTfwEK6TwfcgQOoYyYcG3aQu5cByHAtMWX5dLr~-JdwWrWw~ttTgIkIA21zcD-quaaDnG7-r2O8HpRL56hNmNZwoj5vAVyrlsrhQoapXf2PFOQnWqXsROqChXWOdJJhf7NBmMJBNCRhKfc1bp5SKWiHuSdItRkvo~BTfUhxZPuQNbRa~94EvxzandKlPp3FsBe23WKGucZrgQxm-Aw9iA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058748"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058748/Electrophysiological_signatures_of_temporal_segregation_and_integration_of_visual_information_an_MEG_study"><img alt="Research paper thumbnail of Electrophysiological signatures of temporal segregation and integration of visual information - an MEG study" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058748/Electrophysiological_signatures_of_temporal_segregation_and_integration_of_visual_information_an_MEG_study">Electrophysiological signatures of temporal segregation and integration of visual information - an MEG study</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2013</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058748"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058748"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058748; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058748]").text(description); $(".js-view-count[data-work-id=29058748]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058748; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058748']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058748]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058748,"title":"Electrophysiological signatures of temporal segregation and integration of visual information - an MEG study","internal_url":"https://www.academia.edu/29058748/Electrophysiological_signatures_of_temporal_segregation_and_integration_of_visual_information_an_MEG_study","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058762"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058762/Rapid_enumeration_within_a_fraction_of_a_single_glance_The_role_of_visible_persistence_in_object_individuation_capacity"><img alt="Research paper thumbnail of Rapid enumeration within a fraction of a single glance: The role of visible persistence in object individuation capacity" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058762/Rapid_enumeration_within_a_fraction_of_a_single_glance_The_role_of_visible_persistence_in_object_individuation_capacity">Rapid enumeration within a fraction of a single glance: The role of visible persistence in object individuation capacity</a></div><div class="wp-workCard_item"><span>Visual Cognition</span><span>, 2012</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The number of items that can be individuated at a single glance is limited. Here, we investigate ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The number of items that can be individuated at a single glance is limited. Here, we investigate object individuation at a higher temporal resolution, in fractions of a single glance. In two experiments involving object individuation we manipulated the duration of visual persistence of the target items with a forward masking procedure. The number of items as well as their</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058762"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058762"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058762; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058762]").text(description); $(".js-view-count[data-work-id=29058762]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058762; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058762']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058762]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058762,"title":"Rapid enumeration within a fraction of a single glance: The role of visible persistence in object individuation capacity","internal_url":"https://www.academia.edu/29058762/Rapid_enumeration_within_a_fraction_of_a_single_glance_The_role_of_visible_persistence_in_object_individuation_capacity","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058763"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058763/Temporal_Integration_Windows_for_Naturalistic_Visual_Sequences"><img alt="Research paper thumbnail of Temporal Integration Windows for Naturalistic Visual Sequences" class="work-thumbnail" src="https://attachments.academia-assets.com/49512576/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058763/Temporal_Integration_Windows_for_Naturalistic_Visual_Sequences">Temporal Integration Windows for Naturalistic Visual Sequences</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://iwww-mpg.academia.edu/angelaalbi">angela albi</a></span></div><div class="wp-workCard_item"><span>PLoS ONE</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">There is increasing evidence that the brain possesses mechanisms to integrate incoming sensory in...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">There is increasing evidence that the brain possesses mechanisms to integrate incoming sensory information as it unfolds over time-periods of 2-3 seconds. The ubiquity of this mechanism across modalities, tasks, perception and production has led to the proposal that it may underlie our experience of the subjective present. A critical test of this claim is that this phenomenon should be apparent in naturalistic visual experiences. We tested this using movie-clips as a surrogate for our day-to-day experience, temporally scrambling them to require (re-) integration within and beyond the hypothesized 2-3 second interval. Two independent experiments demonstrate a step-wise increase in the difficulty to follow stimuli at the hypothesized 2-3 second scrambling condition. Moreover, only this difference could not be accounted for by low-level visual properties. This provides the first evidence that this 2-3 second integration window extends to complex, naturalistic visual sequences more consistent with our experience of the subjective present.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="f543170f9ef29084aa2e577859b95fbb" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512576,&quot;asset_id&quot;:29058763,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512576/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058763"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058763"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058763; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058763]").text(description); $(".js-view-count[data-work-id=29058763]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058763; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058763']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "f543170f9ef29084aa2e577859b95fbb" } } $('.js-work-strip[data-work-id=29058763]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058763,"title":"Temporal Integration Windows for Naturalistic Visual Sequences","internal_url":"https://www.academia.edu/29058763/Temporal_Integration_Windows_for_Naturalistic_Visual_Sequences","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512576,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512576/thumbnails/1.jpg","file_name":"54e1fd2e0cf296663793cbbd.pdf","download_url":"https://www.academia.edu/attachments/49512576/download_file","bulk_download_file_name":"Temporal_Integration_Windows_for_Natural.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512576/54e1fd2e0cf296663793cbbd-libre.pdf?1476136390=\u0026response-content-disposition=attachment%3B+filename%3DTemporal_Integration_Windows_for_Natural.pdf\u0026Expires=1740881335\u0026Signature=HNb9nCcEp1tX79AaxviPMTK9sUyfSw0QrsRNi~0LpXn4Corq3aifmqXtVHyF9ORHJz~wLLTar9W4bTbFtO3CnoSWiKoEcOQ6fdW5kRZBW2g8XZ4jfcZgDNfzuLnI6WxHPY12lYh4mGegzo3uhwrFwqksuPXcD-EEYQOX6l0bMxCSTlnE0SM1~bfppTHyBldRnHbYAN0RKS9KQ~y3EvkOFHbBUCaqdhT-I53Hu4JhEoJN1LZvFUv~z2jbbIojdFptITQ~dWyOsM01qQKNACSY1AV-ijxePuT1JN6z7iopFqYKvHW~maxYwRLkStP03GLhnjwzzFSbDOrqZEEiJe239w__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058774"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058774/Dissociation_between_spatial_and_temporal_integration_mechanisms_in_Vernier_fusion"><img alt="Research paper thumbnail of Dissociation between spatial and temporal integration mechanisms in Vernier fusion" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058774/Dissociation_between_spatial_and_temporal_integration_mechanisms_in_Vernier_fusion">Dissociation between spatial and temporal integration mechanisms in Vernier fusion</a></div><div class="wp-workCard_item"><span>Vision research</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The visual system constructs a percept of the world across multiple spatial and temporal scales. ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The visual system constructs a percept of the world across multiple spatial and temporal scales. This raises the questions of whether different scales involve separate integration mechanisms and whether spatial and temporal factors are linked via spatio-temporal reference frames. We investigated this using Vernier fusion, a phenomenon in which the features of two Vernier stimuli presented in close spatio-temporal proximity are fused into a single percept. With increasing spatial offset, perception changes dramatically from a single percept into apparent motion and later, at larger offsets, into two separately perceived stimuli. We tested the link between spatial and temporal integration by presenting two successive Vernier stimuli presented at varying spatial and temporal offsets. The second Vernier either had the same or the opposite offset as the first. We found that the type of percept depended not only on spatial offset, as reported previously, but interacted with the temporal p...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058774"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058774"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058774; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058774]").text(description); $(".js-view-count[data-work-id=29058774]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058774; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058774']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058774]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058774,"title":"Dissociation between spatial and temporal integration mechanisms in Vernier fusion","internal_url":"https://www.academia.edu/29058774/Dissociation_between_spatial_and_temporal_integration_mechanisms_in_Vernier_fusion","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058780"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058780/Expansion_and_Compression_of_Time_Correlate_with_Information_Processing_in_an_Enumeration_Task"><img alt="Research paper thumbnail of Expansion and Compression of Time Correlate with Information Processing in an Enumeration Task" class="work-thumbnail" src="https://attachments.academia-assets.com/49512575/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058780/Expansion_and_Compression_of_Time_Correlate_with_Information_Processing_in_an_Enumeration_Task">Expansion and Compression of Time Correlate with Information Processing in an Enumeration Task</a></div><div class="wp-workCard_item"><span>PLOS ONE</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Perception of temporal duration is subjective and is influenced by factors such as attention and ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Perception of temporal duration is subjective and is influenced by factors such as attention and context. For example, unexpected or emotional events are often experienced as if time subjectively expands, suggesting that the amount of information processed in a unit of time can be increased. Time dilation effects have been measured with an oddball paradigm in which an infrequent stimulus is perceived to last longer than standard stimuli in the rest of the sequence. Likewise, time compression for the oddball occurs when the duration of the standard items is relatively brief. Here, we investigated whether the amount of information processing changes when time is perceived as distorted. On each trial, an oddball stimulus of varying numerosity (1-14 items) and duration was presented along with standard items that were either short (70 ms) or long (1050 ms). Observers were instructed to count the number of dots within the oddball stimulus and to judge its relative duration with respect to the standards on that trial. Consistent with previous results, oddballs were reliably perceived as temporally distorted: expanded for longer standard stimuli blocks and compressed for shorter standards. The occurrence of these distortions of time perception correlated with perceptual processing; i.e. enumeration accuracy increased when time was perceived as expanded and decreased with temporal compression. These results suggest that subjective time distortions are not epiphenomenal, but reflect real changes in sensory processing. Such short-term plasticity in information processing rate could be evolutionarily advantageous in optimizing perception and action during critical moments.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="1d50639fb5ee1e74d6a59f44e15d718b" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512575,&quot;asset_id&quot;:29058780,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512575/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058780"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058780"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058780; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058780]").text(description); $(".js-view-count[data-work-id=29058780]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058780; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058780']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "1d50639fb5ee1e74d6a59f44e15d718b" } } $('.js-work-strip[data-work-id=29058780]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058780,"title":"Expansion and Compression of Time Correlate with Information Processing in an Enumeration Task","internal_url":"https://www.academia.edu/29058780/Expansion_and_Compression_of_Time_Correlate_with_Information_Processing_in_an_Enumeration_Task","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512575,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512575/thumbnails/1.jpg","file_name":"55f0034708ae199d47c03b4c.pdf","download_url":"https://www.academia.edu/attachments/49512575/download_file","bulk_download_file_name":"Expansion_and_Compression_of_Time_Correl.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512575/55f0034708ae199d47c03b4c-libre.pdf?1476136394=\u0026response-content-disposition=attachment%3B+filename%3DExpansion_and_Compression_of_Time_Correl.pdf\u0026Expires=1740881335\u0026Signature=Wy9Ill72Cf7VUTgaAqX4rP2c1fIzihRLM6~G0HhqrHlgFnB-adUaOrra4zulQMpqao99oLGVcTh1E7Aa8mdWr-Ar15tySl9hp08xHvkYA-KmPvQBldHwnfF2wUFIG-GWn6rUAiuW1dvYQNDalD-MogQaI~jxUN7rwnJ8uSVhlv9pxPb78ewuXuWA~Z~ScOK9JKH95p2vxY0jHfE8N8qAUpB7EUQwxD2aBebbdxXtSBv0VwR5ITAN67M7JySGlFWXmfdUhWQj7IKFKlo3B5Mg3XzGZ6aHQgrxQ6gm-eCxdYg8tVpIcdAtLnf~eyjM83vH041STaXFoB0upcNrGaVLlQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058783"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058783/Continuous_flash_suppression_effectiveness_depends_on_mask_temporal_frequency"><img alt="Research paper thumbnail of Continuous flash suppression effectiveness depends on mask temporal frequency" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058783/Continuous_flash_suppression_effectiveness_depends_on_mask_temporal_frequency">Continuous flash suppression effectiveness depends on mask temporal frequency</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">A variant of binocular rivalry called Continuous Flash Suppression (CFS) (Tsuchiya &amp;amp;amp;amp;a...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">A variant of binocular rivalry called Continuous Flash Suppression (CFS) (Tsuchiya &amp;amp;amp;amp;amp;amp;amp;amp;amp; Koch, 2005) has become a popular tool for investigating visual processing outside of conscious awareness (Yang, Brascamp, Kang, &amp;amp;amp;amp;amp;amp;amp;amp;amp; Blake, 2014). In a CFS paradigm, a series of different Mondrian patterns is flashed to one eye at a steady rate, suppressing awareness of the image presented to the other eye (Tsuchiya, Koch, Gilroy, &amp;amp;amp;amp;amp;amp;amp;amp;amp; Blake, 2006). In most studies using CFS the temporal frequency for the mask images is set to 10 (Tsuchiya &amp;amp;amp;amp;amp;amp;amp;amp;amp; Koch, 2005; Tsuchiya et al., 2006) or 20 (Jiang et al., 2009) Hz. To date, little is known about the precise relationship between masking effectiveness and temporal masking frequency. Given the role of temporal factors in many theories of visual awareness, such as phase coupling of neural oscillations across brain regions or re-entrant processing, we investigated the suppression effectiveness of a wide range of masking frequencies (0-32Hz). In a breakthrough CFS paradigm, participants reported whether an image (a face or house) was presented on each trial while Mondrian-like textures were presented as masks. In condition 1, trials with all different frequencies occurred in random order while in condition 2, trials with the same masking frequency were grouped in blocks.   We found that the response times differed dramatically between temporal masking frequencies, with mask effectiveness following a log-normal curve peaking around 6Hz in both conditions. The static mask (0 Hz: traditional binocular rivalry) yielded similar breakthrough times as higher frequencies of CFS. In practical terms, these results show that, the 10 Hz/20 Hz frequencies used in most CFS studies may not be optimally effective. More generally, these findings support the idea that temporal factors play a critical role in perceptual awareness. Meeting abstract presented at VSS 2015.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058783"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058783"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058783; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058783]").text(description); $(".js-view-count[data-work-id=29058783]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058783; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058783']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058783]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058783,"title":"Continuous flash suppression effectiveness depends on mask temporal frequency","internal_url":"https://www.academia.edu/29058783/Continuous_flash_suppression_effectiveness_depends_on_mask_temporal_frequency","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058787"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058787/Rapid_enumeration_within_a_fraction_of_a_single_glance_The_role_of_visible_persistence_in_object_individuation_capacity"><img alt="Research paper thumbnail of Rapid enumeration within a fraction of a single glance: The role of visible persistence in object individuation capacity" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058787/Rapid_enumeration_within_a_fraction_of_a_single_glance_The_role_of_visible_persistence_in_object_individuation_capacity">Rapid enumeration within a fraction of a single glance: The role of visible persistence in object individuation capacity</a></div><div class="wp-workCard_item"><span>Visual Cognition</span><span>, Jun 1, 2012</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The number of items that can be individuated at a single glance is limited. Here, we investigate ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The number of items that can be individuated at a single glance is limited. Here, we investigate object individuation at a higher temporal resolution, in fractions of a single glance. In two experiments involving object individuation we manipulated the duration of visual persistence of the target items with a forward masking procedure. The number of items as well as their</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058787"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058787"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058787; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058787]").text(description); $(".js-view-count[data-work-id=29058787]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058787; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058787']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058787]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058787,"title":"Rapid enumeration within a fraction of a single glance: The role of visible persistence in object individuation capacity","internal_url":"https://www.academia.edu/29058787/Rapid_enumeration_within_a_fraction_of_a_single_glance_The_role_of_visible_persistence_in_object_individuation_capacity","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058808"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058808/The_temporal_window_of_individuation_limits_visual_capacity"><img alt="Research paper thumbnail of The temporal window of individuation limits visual capacity" class="work-thumbnail" src="https://attachments.academia-assets.com/49512642/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058808/The_temporal_window_of_individuation_limits_visual_capacity">The temporal window of individuation limits visual capacity</a></div><div class="wp-workCard_item"><span>Frontiers in Psychology</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">One of the main tasks of vision is to individuate and recognize specific objects. Unlike the dete...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">One of the main tasks of vision is to individuate and recognize specific objects. Unlike the detection of basic features, object individuation is strictly limited in capacity. Previous studies of capacity, in terms of subitizing ranges or visual working memory, have emphasized spatial limits in the number of objects that can be apprehended simultaneously. Here, we present psychophysical and electrophysiological evidence that capacity limits depend instead on time. Contrary to what is commonly assumed, subitizing, the reading-out a small set of individual objects, is not an instantaneous process. Instead, individuation capacity increases in steps within the lifetime of visual persistence of the stimulus, suggesting that visual capacity limitations arise as a result of the narrow window of feedforward processing. We characterize this temporal window as coordinating individuation and integration of sensory information over a brief interval of around 100 ms. Neural signatures of integration windows are revealed in reset alpha oscillations shortly after stimulus onset within generators in parietal areas. Our findings suggest that shortlived alpha phase synchronization (≈1 cycle) is key for individuation and integration of visual transients on rapid time scales (&lt;100 ms). Within this time frame intermediate-level vision provides an equilibrium between the competing needs to individuate invariant objects, integrate information about those objects over time, and remain sensitive to dynamic changes in sensory input. We discuss theoretical and practical implications of temporal windows in visual processing, how they create a fundamental capacity limit, and their role in constraining the real-time dynamics of visual processing. The temporal window of individuation limits visual capacity. Front. Psychol. 5:952.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="d6d32355bc9b4bf113d942a6acc74557" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512642,&quot;asset_id&quot;:29058808,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512642/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058808"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058808"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058808; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058808]").text(description); $(".js-view-count[data-work-id=29058808]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058808; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058808']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "d6d32355bc9b4bf113d942a6acc74557" } } $('.js-work-strip[data-work-id=29058808]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058808,"title":"The temporal window of individuation limits visual capacity","internal_url":"https://www.academia.edu/29058808/The_temporal_window_of_individuation_limits_visual_capacity","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512642,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512642/thumbnails/1.jpg","file_name":"The_temporal_window_of_individuation_lim20161010-18911-ka5146.pdf","download_url":"https://www.academia.edu/attachments/49512642/download_file","bulk_download_file_name":"The_temporal_window_of_individuation_lim.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512642/The_temporal_window_of_individuation_lim20161010-18911-ka5146-libre.pdf?1476136370=\u0026response-content-disposition=attachment%3B+filename%3DThe_temporal_window_of_individuation_lim.pdf\u0026Expires=1740881335\u0026Signature=Pr3rfkn2c9YllRsZB5pcECRmuP6sB0uU4LLuDTpvOFBTsvllKGo4PI34UL8r7iYYvNjIokqNJ3GVybOwLdOvaIVOmvhhGcAejjfDrqeq6nHgvAG4ywPfvuN~ibQR9S1eD-IdStIQackBCG56~RDsJ3auIuMsvcvIuS4m~9~tWkXV1zg-xIDHkf5t9OJzQUIbtGb3j0X8ehyR-Vi0hA373iRjLsdYx~AQ88CNg2b90eXbpc4FbSvpzVXbVoEoV-OBqi0pMjzULI9B9XIlHQUuyXqNaLTmx2x8LSNGremaBUurzcQlBUGMTt9YLjPppfRlc992beFE7FcuYS3zApI24w__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="8625249" id="visualstabilityandremapping"><div class="js-work-strip profile--work_container" data-work-id="13331231"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/13331231/Spatial_specificity_of_the_remapped_BOLD_response_across_saccades"><img alt="Research paper thumbnail of Spatial specificity of the remapped BOLD response across saccades" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/13331231/Spatial_specificity_of_the_remapped_BOLD_response_across_saccades">Spatial specificity of the remapped BOLD response across saccades</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/JensSchwarzbach">Jens Schwarzbach</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a></span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="13331231"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="13331231"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 13331231; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=13331231]").text(description); $(".js-view-count[data-work-id=13331231]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 13331231; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='13331231']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=13331231]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":13331231,"title":"Spatial specificity of the remapped BOLD response across saccades","internal_url":"https://www.academia.edu/13331231/Spatial_specificity_of_the_remapped_BOLD_response_across_saccades","owner_id":32571414,"coauthors_can_edit":true,"owner":{"id":32571414,"first_name":"Jens","middle_initials":null,"last_name":"Schwarzbach","page_name":"JensSchwarzbach","domain_name":"unitn","created_at":"2015-06-26T13:57:58.821-07:00","display_name":"Jens Schwarzbach","url":"https://unitn.academia.edu/JensSchwarzbach"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058781"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058781/Perisaccadic_perception_temporal_unmasking_or_spatial_uncrowding"><img alt="Research paper thumbnail of Perisaccadic perception: temporal unmasking or spatial uncrowding?" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058781/Perisaccadic_perception_temporal_unmasking_or_spatial_uncrowding">Perisaccadic perception: temporal unmasking or spatial uncrowding?</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Stimuli briefly presented around the time of saccades are often misperceived in terms of their sp...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Stimuli briefly presented around the time of saccades are often misperceived in terms of their spatial or temporal attributes. Recently, evidence for a peri-saccadic reduction in crowding was reported, interpreted in terms of remapping of receptive fields (Harrison et al., 2013). However, that study used forward and backward masks and weak (always vertical) flankers, creating a &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot;super-crowding&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot; paradigm rather than &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot;pure&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot; spatial crowding (Vickery et al., 2009) We investigated whether these effects are better explained by spatial (pure crowding) or temporal factors (masking), as well as investigating the role of the pre-saccadic shift of attention (van Koningsbruggen &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp; Buonocore, 2013).. In two experiments, we independently varied the distance from target to flankers, the presence of forward and backward masks and whether or not participants made a saccade. Moreover, we controlled for the orientation of the flankers (all vertical or randomized orientation). First, we replicated the pattern of crowding during fixation using masked stimuli but we also found that much of the decrement of performance with the super-crowding display was explained by temporal masking rather than only spatial crowding. Moreover, the level of crowding obtained with random rather than vertical flanker orientations was stronger than the super-crowded display alone, reinforcing the hypothesis of a greater temporal than spatial effect. Second, we replicated the finding of a small peri-saccadic improvement in performance with super-crowding displays as in the original study but found a different pattern in the pure crowding version without masking. Overall, our pattern of results was in accordance with a general benefit from the well-known pre-saccadic shift in attention towards the saccade target. These results are consistent with evidence for peri-saccadic shifts in receptive fields towards the saccade target. Meeting abstract presented at VSS 2015.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058781"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058781"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058781; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058781]").text(description); $(".js-view-count[data-work-id=29058781]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058781; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058781']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058781]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058781,"title":"Perisaccadic perception: temporal unmasking or spatial uncrowding?","internal_url":"https://www.academia.edu/29058781/Perisaccadic_perception_temporal_unmasking_or_spatial_uncrowding","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058737"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058737/Backward_Masking_and_Unmasking_Across_Saccadic_Eye_Movements"><img alt="Research paper thumbnail of Backward Masking and Unmasking Across Saccadic Eye Movements" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058737/Backward_Masking_and_Unmasking_Across_Saccadic_Eye_Movements">Backward Masking and Unmasking Across Saccadic Eye Movements</a></div><div class="wp-workCard_item"><span>Current Biology</span><span>, 2010</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Humans make several eye movements every second, and thus a fundamental challenge in conscious vis...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Humans make several eye movements every second, and thus a fundamental challenge in conscious vision is to maintain continuity by matching object representations in constantly shifting retinal coordinates. One possible mechanism for visual stability is the remapping of receptive fields around saccade onset, combining pre- and postsaccadic information. The mislocalization of stimuli briefly flashed near the time of saccades has been taken as evidence for remapping. Yet the relationship between remapping, mislocalization, and trans-saccadic integration remains unclear. We asked participants to identify a target stimulus presented around the time of saccade onset, which was immediately visually masked by a postsaccadic stimulus presented in the same spatial location (backward masking). Presenting two rapidly occurring events across separate fixations allowed us to investigate how the visual system reconstructs what happens during a saccade. We show that saccadic remapping resulted in perception of target and mask as either spatially segregated or integrated, depending on the exact timing of saccade onset. During segregation, the target was unmasked because it was perceived as displaced from the mask; during integration, the postsaccadic stimulus masked the presaccadic target (spatiotopic masking). Thus, segregation and integration may work together to yield continuity in conscious vision.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058737"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058737"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058737; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058737]").text(description); $(".js-view-count[data-work-id=29058737]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058737; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058737']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058737]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058737,"title":"Backward Masking and Unmasking Across Saccadic Eye Movements","internal_url":"https://www.academia.edu/29058737/Backward_Masking_and_Unmasking_Across_Saccadic_Eye_Movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058739"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058739/Remapping_of_the_line_motion_illusion_across_eye_movements"><img alt="Research paper thumbnail of Remapping of the line motion illusion across eye movements" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058739/Remapping_of_the_line_motion_illusion_across_eye_movements">Remapping of the line motion illusion across eye movements</a></div><div class="wp-workCard_item"><span>Experimental Brain Research</span><span>, 2012</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Although motion processing in the brain has been classically studied in terms of retinotopically ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Although motion processing in the brain has been classically studied in terms of retinotopically defined receptive fields, recent evidence suggests that motion perception can occur in a spatiotopic reference frame. We investigated the underlying mechanisms of spatiotopic motion perception by examining the role of saccade metrics as well as the capacity of trans-saccadic motion. To this end, we used the line motion illusion (LMI), in which a straight line briefly shown after a high contrast stimulus (inducer) is perceived as expanding away from the inducer position. This illusion provides an interesting test of spatiotopic motion because the neural correlates of this phenomenon have been found early in the visual cortex and the effect does not require focused attention. We measured the strength of LMI both with stable fixation and when participants were asked to perform a 10° saccade during the blank ISI between the inducer and the line. A strong motion illusion was found across saccades in spatiotopic coordinates. When the inducer was presented near in time to the saccade cue, saccadic latencies were longer, saccade amplitudes were shorter, and the strength of reported LMI was consistently reduced. We also measured the capacity of the trans-saccadic LMI by varying the number of inducers. In contrast to a visual-spatial memory task, we found that the LMI was largely eliminated by saccades when two or more inducers were displayed. Together, these results suggest that motion perceived in non-retinotopic coordinates depends on an active, saccade-dependent remapping process with a strictly limited capacity.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058739"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058739"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058739; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058739]").text(description); $(".js-view-count[data-work-id=29058739]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058739; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058739']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058739]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058739,"title":"Remapping of the line motion illusion across eye movements","internal_url":"https://www.academia.edu/29058739/Remapping_of_the_line_motion_illusion_across_eye_movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058744"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058744/Dynamic_object_based_remapping_of_visual_features_in_trans_saccadic_perception"><img alt="Research paper thumbnail of Dynamic, object-based remapping of visual features in trans-saccadic perception" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058744/Dynamic_object_based_remapping_of_visual_features_in_trans_saccadic_perception">Dynamic, object-based remapping of visual features in trans-saccadic perception</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2008</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Saccadic eye movements can dramatically change the location in which an object is projected onto ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Saccadic eye movements can dramatically change the location in which an object is projected onto the retina. One mechanism that might potentially underlie the perception of stable objects, despite the occurrence of saccades, is the &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot;remapping&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot; of receptive fields around the time of saccadic eye movements. Here we examined two possible models of trans-saccadic remapping of visual features: (1) spatiotopic coordinates that remain constant across saccades or (2) an object-based remapping in retinal coordinates. We used form adaptation to test &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot;object&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot; and &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot;space&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot; based predictions for an adapter that changed spatial and/or retinal location due to eye movements, object motion or manual displacement using a computer mouse. The predictability and speed of the object motion was also manipulated. The main finding was that maximum transfer of the form aftereffect in retinal coordinates occurred when there was a saccade and when the object motion was attended and predictable. A small transfer was also found when observers moved the object across the screen using a computer mouse. The overall pattern of results is consistent with the theory of object-based remapping for salient stimuli. Thus, the active updating of the location and features of attended objects may play a role in perceptual stability.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058744"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058744"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058744; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058744]").text(description); $(".js-view-count[data-work-id=29058744]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058744; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058744']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058744]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058744,"title":"Dynamic, object-based remapping of visual features in trans-saccadic perception","internal_url":"https://www.academia.edu/29058744/Dynamic_object_based_remapping_of_visual_features_in_trans_saccadic_perception","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058751"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058751/Continuous_perception_of_motion_and_shape_across_saccadic_eye_movements"><img alt="Research paper thumbnail of Continuous perception of motion and shape across saccadic eye movements" class="work-thumbnail" src="https://attachments.academia-assets.com/49512584/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058751/Continuous_perception_of_motion_and_shape_across_saccadic_eye_movements">Continuous perception of motion and shape across saccadic eye movements</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2010</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Although our naïve experience of visual perception is that it is smooth and coherent, the actual ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Although our naïve experience of visual perception is that it is smooth and coherent, the actual input from the retina involves brief and discrete fixations separated by saccadic eye movements. This raises the question of whether our impression of stable and continuous vision is merely an illusion. To test this, we examined whether motion perception can &quot;bridge&quot; a saccade in a two-frame apparent motion display in which the two frames were separated by a saccade. We found that transformational apparent motion, in which an object is seen to change shape and even move in three dimensions during the motion trajectory, continues across saccades. Moreover, participants preferred an interpretation of motion in spatial, rather than retinal, coordinates. The strength of the motion percept depended on the temporal delay between the two motion frames and was sufficient to give rise to a motion-from-shape aftereffect, even when the motion was defined by a secondorder shape cue (&quot;phantom transformational apparent motion&quot;). These findings suggest that motion and shape information are integrated across saccades into a single, coherent percept of a moving object.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="781e60fe140c02e9b06906a42ae1ac1f" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512584,&quot;asset_id&quot;:29058751,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512584/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058751"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058751"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058751; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058751]").text(description); $(".js-view-count[data-work-id=29058751]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058751; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058751']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "781e60fe140c02e9b06906a42ae1ac1f" } } $('.js-work-strip[data-work-id=29058751]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058751,"title":"Continuous perception of motion and shape across saccadic eye movements","internal_url":"https://www.academia.edu/29058751/Continuous_perception_of_motion_and_shape_across_saccadic_eye_movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512584,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512584/thumbnails/1.jpg","file_name":"Continuous_perception_of_motion_and_shap20161010-18911-1ta850z.pdf","download_url":"https://www.academia.edu/attachments/49512584/download_file","bulk_download_file_name":"Continuous_perception_of_motion_and_shap.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512584/Continuous_perception_of_motion_and_shap20161010-18911-1ta850z-libre.pdf?1476136386=\u0026response-content-disposition=attachment%3B+filename%3DContinuous_perception_of_motion_and_shap.pdf\u0026Expires=1740881335\u0026Signature=Lt0v465rSmcBEB-wxtanOTayoaLCNPjXuRoQ9F4zKDjCKr0bdClXldUVDWYvEhPOzHMqt8Q4NneoMOwk1V8u~4ikhXxsDKvavq5aEb4hVTT0ZCyCrKhmYZVDcrMuFf3OayBO07rqZVcc3ki5LPL0txtwcvAyNW~pWSYTO8~uz4vbQZNw2LdJ6nCFn9nWL00L5Si1LOq2IHLdAthUQ5RwWiBFesMDT0VNCi4lAl9o2RvftacPO6f5RHWbc2cwQcBBjAaJ6viwc1KuR73oyySV1Cc9uUsCbs-d1SAa2U-n794RwYKUuiK7Elxz5SkwodVQ44~V6m3XgQnor5zxvLfK3Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058758"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058758/The_missing_link_for_attention_pointers_comment_on_Cavanagh_et_al"><img alt="Research paper thumbnail of The missing link for attention pointers: comment on Cavanagh et al" class="work-thumbnail" src="https://attachments.academia-assets.com/49512583/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058758/The_missing_link_for_attention_pointers_comment_on_Cavanagh_et_al">The missing link for attention pointers: comment on Cavanagh et al</a></div><div class="wp-workCard_item"><span>Trends in Cognitive Sciences</span><span>, 2010</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="68f08085aff1462109c6de69ec564b6e" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512583,&quot;asset_id&quot;:29058758,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512583/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058758"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058758"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058758; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058758]").text(description); $(".js-view-count[data-work-id=29058758]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058758; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058758']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "68f08085aff1462109c6de69ec564b6e" } } $('.js-work-strip[data-work-id=29058758]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058758,"title":"The missing link for attention pointers: comment on Cavanagh et al","internal_url":"https://www.academia.edu/29058758/The_missing_link_for_attention_pointers_comment_on_Cavanagh_et_al","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512583,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512583/thumbnails/1.jpg","file_name":"j.tics.2010.08.00720161010-18905-18p3s63.pdf","download_url":"https://www.academia.edu/attachments/49512583/download_file","bulk_download_file_name":"The_missing_link_for_attention_pointers.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512583/j.tics.2010.08.00720161010-18905-18p3s63-libre.pdf?1476136380=\u0026response-content-disposition=attachment%3B+filename%3DThe_missing_link_for_attention_pointers.pdf\u0026Expires=1740881335\u0026Signature=IvKLSBHKRCQqB9zlsvASZRQKga~rRpfZw2TSkmgijhAGyEAY9CFOtewiGw8Cu3yIplU6SqtjTUtFM9bSIcEMx9-UqwG61AI2HsczK7DukiFEX~cTSPv-NZZ2Tul6OQq8SkXudfbaSzm4z~-JX-Uy98rGDFjkbg-F~Iw7l7wI2FI3YnG~FVjo2uwUJQf8SUKbIJ8tchjiRz0FL8-TApiNk2VwDjrPujee9QPD9EVBH2QlMO5AStQrSK9OHDCCfyp8vO81FZoSWLCqjipQb1VZgLR43pKPktmDq5ca01WoR32ig4m6Ly04jSkyUWgCuwzJ8N2M6CmlpNtMjQpZJ2mHkg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058761"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058761/Selective_attention_and_the_active_remapping_of_object_features_in_trans_saccadic_perception"><img alt="Research paper thumbnail of Selective attention and the active remapping of object features in trans-saccadic perception" class="work-thumbnail" src="https://attachments.academia-assets.com/49512570/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058761/Selective_attention_and_the_active_remapping_of_object_features_in_trans_saccadic_perception">Selective attention and the active remapping of object features in trans-saccadic perception</a></div><div class="wp-workCard_item"><span>Vision Research</span><span>, 2009</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">When the same object is attended both before and after a saccadic eye movement, its visual featur...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">When the same object is attended both before and after a saccadic eye movement, its visual features may be remapped to the new retinal position of the object. To further investigate the role of selective attention in trans-saccadic perception, the magnitude of the cross-saccadic tilt aftereffect was measured for both attended and unattended objects. The results show that both selective attention and saccadic eye movements influenced the magnitude of the tilt aftereffect, but in different ways. Dividing attention among multiple objects lead to a general decrease in the tilt aftereffect, independent of whether or not a saccade occurred. Making a saccade also resulted in a consistent reduction of the aftereffect, but this was due to incomplete transfer of form adaptation to the new retinal position. The influences of selective attention and saccadic remapping on the tilt aftereffect were independent and additive. These findings suggest that trans-saccadic perception is not limited to a single object but instead depends on the allocation of selective attention. Overall, the results are consistent with the hypothesis that the role of attention is to select salient objects, with trans-saccadic perception mechanisms acting to maintain information about those salient objects across eye movements.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6cd8b02955c21f0ee608df4b49b057c0" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512570,&quot;asset_id&quot;:29058761,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512570/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058761"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058761"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058761; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058761]").text(description); $(".js-view-count[data-work-id=29058761]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058761; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058761']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6cd8b02955c21f0ee608df4b49b057c0" } } $('.js-work-strip[data-work-id=29058761]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058761,"title":"Selective attention and the active remapping of object features in trans-saccadic perception","internal_url":"https://www.academia.edu/29058761/Selective_attention_and_the_active_remapping_of_object_features_in_trans_saccadic_perception","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512570,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512570/thumbnails/1.jpg","file_name":"Selective_attention_and_the_active_remap20161010-18905-12xvbw4.pdf","download_url":"https://www.academia.edu/attachments/49512570/download_file","bulk_download_file_name":"Selective_attention_and_the_active_remap.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512570/Selective_attention_and_the_active_remap20161010-18905-12xvbw4-libre.pdf?1476136398=\u0026response-content-disposition=attachment%3B+filename%3DSelective_attention_and_the_active_remap.pdf\u0026Expires=1740881335\u0026Signature=FBxdB7A1lHmDi5PfsY4nafTn04BnbpwpSStzid7sY6Q1J2hjW5sa9tXJWfRvejuA4CEcHgNRlX8zOvz1vPzj31owr1n1Ckp2VA4ifAqLRqH8zivQ9fhYD-YXucCSH134HtF21tJgz1k3-GHcuSZrce3vcQE5ouA-aEVqrq8y91FFgfoerb9CUxJnsJvMCcua7L1xTIx8ySKh9jrHi0QhIB~X4-tSTZRXyKlgExbL3tAIy86N-eac0GNaoOwJJSjXTBLXJX6ZG53tNnJfMSeZhB~DyRe4Uh~7NReRdjW5IkzVogGV9BC8sPv-uE3IONwAVI7y2QXZqkrSWbdmKKpYcQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058775"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058775/Saccade_kinematics_modulate_perisaccadic_perception"><img alt="Research paper thumbnail of Saccade kinematics modulate perisaccadic perception" class="work-thumbnail" src="https://attachments.academia-assets.com/49512574/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058775/Saccade_kinematics_modulate_perisaccadic_perception">Saccade kinematics modulate perisaccadic perception</a></div><div class="wp-workCard_item"><span>Journal of vision</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Around the time of execution of an eye movement, participants systematically misperceive the spat...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Around the time of execution of an eye movement, participants systematically misperceive the spatial location of briefly flashed visual stimuli. This phenomenon, known as perisaccadic mislocalization, is thought to involve an active process that takes into account the motor plan (efference copy) of the upcoming saccade. While it has been proposed that the motor system anticipates and informs the visual system about the upcoming eye movements, at present the type and detail of information carried by this motor signal remains unclear. Some authors have argued that the efference copy conveys only coarse information about the direction of the eye movement, while a second theoretical view proposes that it provides specific details about the direction, amplitude, and velocity of the saccade to come. To test between these alternatives, we investigated the influence of saccade parameters on a perisaccadic unmasking task in which performance in discriminating the identity of a target (face o...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="f7af82b4e1d38649166651b86057abb9" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512574,&quot;asset_id&quot;:29058775,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512574/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058775"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058775"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058775; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058775]").text(description); $(".js-view-count[data-work-id=29058775]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058775; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058775']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "f7af82b4e1d38649166651b86057abb9" } } $('.js-work-strip[data-work-id=29058775]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058775,"title":"Saccade kinematics modulate perisaccadic perception","internal_url":"https://www.academia.edu/29058775/Saccade_kinematics_modulate_perisaccadic_perception","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512574,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512574/thumbnails/1.jpg","file_name":"Saccade_kinematics_modulate_perisaccadic20161010-18905-8e93qz.pdf","download_url":"https://www.academia.edu/attachments/49512574/download_file","bulk_download_file_name":"Saccade_kinematics_modulate_perisaccadic.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512574/Saccade_kinematics_modulate_perisaccadic20161010-18905-8e93qz-libre.pdf?1476136392=\u0026response-content-disposition=attachment%3B+filename%3DSaccade_kinematics_modulate_perisaccadic.pdf\u0026Expires=1740881335\u0026Signature=JTGCw53PcCBGw35FYvXqmmIg7HVOiFQeTlLou3IB~mmKbMu1Jubpb4N1S1nGhlw9bISnR8xvUqBB4r-AbthR5X0f88cYEzvuYkFO-AapVh5WKgBpTVdBZVy~16fn5aRdO-uI9Ca-v3~28fHNV3-ZMIv4oMocvX5dKAfnz8PnFN7dBfApi4vMlEHL1TOxKyTJ1gR-7nksim7eMZ-0CZWy5jgAXjHoFn1NOstLDYxEASVx80lm7I9KroUltHn5bFneCQ5TSljOfRDB9lZRhGfEBerItveqFfhux~zw~mtFjohQ2rHXRoWi3hbZysaDJlGImDg9dhBcYdzXWHTaIWe89g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058777"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058777/Disrupting_saccadic_updating_visual_interference_prior_to_the_first_saccade_elicits_spatial_errors_in_the_secondary_saccade_in_a_double_step_task"><img alt="Research paper thumbnail of Disrupting saccadic updating: visual interference prior to the first saccade elicits spatial errors in the secondary saccade in a double-step task" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058777/Disrupting_saccadic_updating_visual_interference_prior_to_the_first_saccade_elicits_spatial_errors_in_the_secondary_saccade_in_a_double_step_task">Disrupting saccadic updating: visual interference prior to the first saccade elicits spatial errors in the secondary saccade in a double-step task</a></div><div class="wp-workCard_item"><span>Experimental brain research</span><span>, Jan 2, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">When we explore the visual environment around us, we produce sequences of very precise eye moveme...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">When we explore the visual environment around us, we produce sequences of very precise eye movements aligning the objects of interest with the most sensitive part of the retina for detailed visual processing. A copy of the impending motor command, the corollary discharge, is sent as soon as the first saccade in a sequence is ready to monitor the next fixation location and correctly plan the subsequent eye movement. Neurophysiological investigations have shown that chemical interference with the corollary discharge generates a distinct pattern of spatial errors on sequential eye movements, with similar results also from clinical and TMS studies. Here, we used saccadic inhibition to interfere with the temporal domain of the first of two subsequent saccades during a standard double-step paradigm. In two experiments, we report that the temporal interference on the primary saccade led to a specific error in the final landing position of the second saccade that was consistent with previou...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058777"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058777"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058777; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058777]").text(description); $(".js-view-count[data-work-id=29058777]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058777; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058777']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058777]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058777,"title":"Disrupting saccadic updating: visual interference prior to the first saccade elicits spatial errors in the secondary saccade in a double-step task","internal_url":"https://www.academia.edu/29058777/Disrupting_saccadic_updating_visual_interference_prior_to_the_first_saccade_elicits_spatial_errors_in_the_secondary_saccade_in_a_double_step_task","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058786"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058786/Selective_attention_and_the_active_remapping_of_object_features_in_trans_saccadic_perception"><img alt="Research paper thumbnail of Selective attention and the active remapping of object features in trans-saccadic perception" class="work-thumbnail" src="https://attachments.academia-assets.com/49512585/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058786/Selective_attention_and_the_active_remapping_of_object_features_in_trans_saccadic_perception">Selective attention and the active remapping of object features in trans-saccadic perception</a></div><div class="wp-workCard_item"><span>Vision Research</span><span>, Jun 2, 2009</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">When the same object is attended both before and after a saccadic eye movement, its visual featur...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">When the same object is attended both before and after a saccadic eye movement, its visual features may be remapped to the new retinal position of the object. To further investigate the role of selective attention in trans-saccadic perception, the magnitude of the cross-saccadic tilt aftereffect was measured for both attended and unattended objects. The results show that both selective attention and saccadic eye movements influenced the magnitude of the tilt aftereffect, but in different ways. Dividing attention among multiple objects lead to a general decrease in the tilt aftereffect, independent of whether or not a saccade occurred. Making a saccade also resulted in a consistent reduction of the aftereffect, but this was due to incomplete transfer of form adaptation to the new retinal position. The influences of selective attention and saccadic remapping on the tilt aftereffect were independent and additive. These findings suggest that trans-saccadic perception is not limited to a single object but instead depends on the allocation of selective attention. Overall, the results are consistent with the hypothesis that the role of attention is to select salient objects, with trans-saccadic perception mechanisms acting to maintain information about those salient objects across eye movements.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="1099c79d4a86e24e64a3b05532b90146" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512585,&quot;asset_id&quot;:29058786,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512585/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058786"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058786"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058786; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058786]").text(description); $(".js-view-count[data-work-id=29058786]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058786; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058786']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "1099c79d4a86e24e64a3b05532b90146" } } $('.js-work-strip[data-work-id=29058786]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058786,"title":"Selective attention and the active remapping of object features in trans-saccadic perception","internal_url":"https://www.academia.edu/29058786/Selective_attention_and_the_active_remapping_of_object_features_in_trans_saccadic_perception","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512585,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512585/thumbnails/1.jpg","file_name":"Selective_attention_and_the_active_remap20161010-18905-11jmxq9.pdf","download_url":"https://www.academia.edu/attachments/49512585/download_file","bulk_download_file_name":"Selective_attention_and_the_active_remap.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512585/Selective_attention_and_the_active_remap20161010-18905-11jmxq9-libre.pdf?1476136379=\u0026response-content-disposition=attachment%3B+filename%3DSelective_attention_and_the_active_remap.pdf\u0026Expires=1740881335\u0026Signature=FuDed6H~e5HfOjAXpNCP05h6V6K4hpohdeoKJyq401KGmcCxLtrZMD5aBA4dK0xzEngW3Ti8fZpWr5gZa9mdf0vmy2m1KpAkrRS2fj6hhq7w7u8ZkgIOSSJzvL0I~kHOVDYPJjWfTXpdf6YHrBrRoVaj7nCveBRdJ1aky~Huvxcf-dZc6VFar0OuNqlKvjekaGoOOqSVrvyncJ-iMQ8e5-U9mULY5Q6nq0~TdbJlMCv2KopaqXG0FA~3DzLB6dr2aUu9KqwALcX3kPMGPm-PyRLfzQOApGXe~MIVUBi-2qpaAM9zXR-eu49QzEGk1StoFXS2jkN-ATOLIa0I07zS1Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058791"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058791/Predictive_transfer_of_visual_adaptation_before_saccadic_eye_movements"><img alt="Research paper thumbnail of Predictive transfer of visual adaptation before saccadic eye movements" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058791/Predictive_transfer_of_visual_adaptation_before_saccadic_eye_movements">Predictive transfer of visual adaptation before saccadic eye movements</a></div><div class="wp-workCard_item"><span>J Vision</span><span>, 2010</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058791"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058791"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058791; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058791]").text(description); $(".js-view-count[data-work-id=29058791]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058791; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058791']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058791]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058791,"title":"Predictive transfer of visual adaptation before saccadic eye movements","internal_url":"https://www.academia.edu/29058791/Predictive_transfer_of_visual_adaptation_before_saccadic_eye_movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058809"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058809/Beyond_the_point_of_no_return_effects_of_visual_distractors_on_saccade_amplitude_and_velocity"><img alt="Research paper thumbnail of Beyond the point of no return: effects of visual distractors on saccade amplitude and velocity" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058809/Beyond_the_point_of_no_return_effects_of_visual_distractors_on_saccade_amplitude_and_velocity">Beyond the point of no return: effects of visual distractors on saccade amplitude and velocity</a></div><div class="wp-workCard_item"><span>Journal of Neurophysiology</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Visual transients, such as a bright flash, reduce the proportion of saccades executed around 60-1...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Visual transients, such as a bright flash, reduce the proportion of saccades executed around 60-125 ms after flash onset, a phenomenon known as saccadic inhibition. Across three experiments, we apply a similar time-course analysis to the amplitudes and velocities of saccades. Alongside the expected reduction of saccade frequency in the key time period, we report two perturbations of the &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot;main sequence&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot;, one before and one after the period of saccadic inhibition. First, saccades launched between 30 to 70 ms following the flash were hypometric, with peak speed exceeding that expected for a saccade of similar amplitude. This finding was in contrast to the common idea that saccades have passed a &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot;point-of-no-return&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot; around 60 ms prior to launching, escaping interference from distractors. The early hypometric saccades observed were not a consequence of spatial averaging between target and distractor locations, as they were found not only following a localized central flash (Experiment 1), but also following a spatially generalized flash (Experiment 2). Second, across experiments, saccades launched at 110 ms post-flash, toward the end of saccadic inhibition, had normal amplitude but a peak speed higher than expected for that amplitude suggesting increased collicular excitation at the time of launching. Overall, the results show that saccades that escape inhibition following a visual transient are not necessarily unaffected, but instead can reveal interference in spatial and kinematic measures.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058809"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058809"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058809; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058809]").text(description); $(".js-view-count[data-work-id=29058809]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058809; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058809']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058809]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058809,"title":"Beyond the point of no return: effects of visual distractors on saccade amplitude and velocity","internal_url":"https://www.academia.edu/29058809/Beyond_the_point_of_no_return_effects_of_visual_distractors_on_saccade_amplitude_and_velocity","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="8625258" id="individuationestimationandnumericalcognition"><div class="js-work-strip profile--work_container" data-work-id="7758353"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/7758353/A_Shared_Flexible_Neural_Map_Architecture_Reflects_Capacity_Limits_in_Both_Visual_Short_Term_Memory_and_Enumeration"><img alt="Research paper thumbnail of A Shared, Flexible Neural Map Architecture Reflects Capacity Limits in Both Visual Short-Term Memory and Enumeration" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/7758353/A_Shared_Flexible_Neural_Map_Architecture_Reflects_Capacity_Limits_in_Both_Visual_Short_Term_Memory_and_Enumeration">A Shared, Flexible Neural Map Architecture Reflects Capacity Limits in Both Visual Short-Term Memory and Enumeration</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://srecwarangal.academia.edu/RakeshSengupta">Rakesh Sengupta</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a>, and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/ManuelaPiazza1">Manuela Piazza</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Human cognition is characterized by severe capacity limits: we can accurately track, enumerate, o...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Human cognition is characterized by severe capacity limits: we can accurately track, enumerate, or hold in mind only a small number of items at a time. It remains debated whether capacity limitations across tasks are determined by a common system. Here we measure brain activation of adult subjects performing either a visual short-term memory (vSTM) task consisting of holding in mind precise information about the orientation and position of a variable number of items, or an enumeration task consisting of assessing the number of items in those sets. We show that task-specific capacity limits (three to four items in enumeration and two to three in vSTM) are neurally reflected in the activity of the posterior parietal cortex (PPC): an identical set of voxels in this region, commonly activated during the two tasks, changed its overall response profile reflecting task-specific capacity limitations. These results, replicated in a second experiment, were further supported by multivariate pattern analysis in which we could decode the number of items presented over a larger range during enumeration than during vSTM. Finally, we simulated our results with a computational model of PPC using a saliency map architecture in which the level of mutual inhibition between nodes gives rise to capacity limitations and reflects the task-dependent precision with which objects need to be encoded (high precision for vSTM, lower precision for enumeration). Together, our work supports the existence of a common, flexible system underlying capacity limits across tasks in PPC that may take the form of a saliency map.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="7758353"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="7758353"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 7758353; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=7758353]").text(description); $(".js-view-count[data-work-id=7758353]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 7758353; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='7758353']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=7758353]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":7758353,"title":"A Shared, Flexible Neural Map Architecture Reflects Capacity Limits in Both Visual Short-Term Memory and Enumeration","internal_url":"https://www.academia.edu/7758353/A_Shared_Flexible_Neural_Map_Architecture_Reflects_Capacity_Limits_in_Both_Visual_Short_Term_Memory_and_Enumeration","owner_id":126325,"coauthors_can_edit":true,"owner":{"id":126325,"first_name":"Rakesh","middle_initials":null,"last_name":"Sengupta","page_name":"RakeshSengupta","domain_name":"srecwarangal","created_at":"2010-02-01T23:54:46.129-08:00","display_name":"Rakesh Sengupta","url":"https://srecwarangal.academia.edu/RakeshSengupta"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058745"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058745/A_shared_sensorimotor_map_for_visual_memory_counting_and_trans_saccadic_perception"><img alt="Research paper thumbnail of A shared sensorimotor map for visual memory, counting and trans-saccadic perception" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058745/A_shared_sensorimotor_map_for_visual_memory_counting_and_trans_saccadic_perception">A shared sensorimotor map for visual memory, counting and trans-saccadic perception</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2010</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058745"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058745"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058745; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058745]").text(description); $(".js-view-count[data-work-id=29058745]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058745; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058745']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058745]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058745,"title":"A shared sensorimotor map for visual memory, counting and trans-saccadic perception","internal_url":"https://www.academia.edu/29058745/A_shared_sensorimotor_map_for_visual_memory_counting_and_trans_saccadic_perception","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058747"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058747/Summary_statistics_support_spatiotemporal_stability"><img alt="Research paper thumbnail of Summary statistics support spatiotemporal stability" class="work-thumbnail" src="https://attachments.academia-assets.com/49512561/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058747/Summary_statistics_support_spatiotemporal_stability">Summary statistics support spatiotemporal stability</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2013</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="1be917706d94f983bdf4c49e8a40c991" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512561,&quot;asset_id&quot;:29058747,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512561/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058747"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058747"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058747; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058747]").text(description); $(".js-view-count[data-work-id=29058747]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058747; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058747']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "1be917706d94f983bdf4c49e8a40c991" } } $('.js-work-strip[data-work-id=29058747]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058747,"title":"Summary statistics support spatiotemporal stability","internal_url":"https://www.academia.edu/29058747/Summary_statistics_support_spatiotemporal_stability","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512561,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512561/thumbnails/1.jpg","file_name":"253937811.pdf","download_url":"https://www.academia.edu/attachments/49512561/download_file","bulk_download_file_name":"Summary_statistics_support_spatiotempora.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512561/253937811-libre.pdf?1476136458=\u0026response-content-disposition=attachment%3B+filename%3DSummary_statistics_support_spatiotempora.pdf\u0026Expires=1740881335\u0026Signature=NpzmISP5TtHu6kbwCOtpvhZeEGESyk3JiuKuNse-CnnYx3vHU0ZTvIFuh1-P2gHuk2HnyhJAPl43VtBtS7gdaP0zwYpWZ27Vazzf7~F0jUsdGeM5o7gNaxSQx9objzrlEoOW0lKsx8PmJ-SKbiUD7RpstP4VWmo0uQLY2w0ecBePnpSehaDoez-ikoLQdGJ7KVro1AX0xjoy8VcDLOeYk2voAGNQwlbwSGXIVxT5iCC-O8ZpG1PQCnqxhuZZsBFqgbhV6L0jQULS8FPG0m9YX21ayG5evbsPLrezUrMjts2NMTOjejZdOba5fQWvdva8FwncW27Snd5WSgHD7iha7g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058749"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058749/Subitizing_and_estimation_emerge_from_a_computational_saliency_map_model_of_object_individuation"><img alt="Research paper thumbnail of Subitizing and estimation emerge from a computational saliency map model of object individuation" class="work-thumbnail" src="https://attachments.academia-assets.com/49512569/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058749/Subitizing_and_estimation_emerge_from_a_computational_saliency_map_model_of_object_individuation">Subitizing and estimation emerge from a computational saliency map model of object individuation</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://srecwarangal.academia.edu/RakeshSengupta">Rakesh Sengupta</a></span></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2013</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="df6cd01fa7f3fc3a999d557ac24a7626" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512569,&quot;asset_id&quot;:29058749,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512569/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058749"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058749"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058749; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058749]").text(description); $(".js-view-count[data-work-id=29058749]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058749; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058749']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "df6cd01fa7f3fc3a999d557ac24a7626" } } $('.js-work-strip[data-work-id=29058749]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058749,"title":"Subitizing and estimation emerge from a computational saliency map model of object individuation","internal_url":"https://www.academia.edu/29058749/Subitizing_and_estimation_emerge_from_a_computational_saliency_map_model_of_object_individuation","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512569,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512569/thumbnails/1.jpg","file_name":"Subitizing_and_estimation_emerge_from_a_20161010-27090-1dfwvc6.pdf","download_url":"https://www.academia.edu/attachments/49512569/download_file","bulk_download_file_name":"Subitizing_and_estimation_emerge_from_a.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512569/Subitizing_and_estimation_emerge_from_a_20161010-27090-1dfwvc6-libre.pdf?1476136404=\u0026response-content-disposition=attachment%3B+filename%3DSubitizing_and_estimation_emerge_from_a.pdf\u0026Expires=1740881335\u0026Signature=Qh6ap3M7hYP9aJa68O7cnFFfqnfekqhP-RnabBKEuETiF8OjLVvtgBYA~i-lN8Cxs72p3c6y2UY5toPYf9EzdgpYOa~ziCUmfHLFkKLWnlINns7uw7mHABEaWoacyYBeK9IYnvFB4vtOvsmfNhJ7LFiZgVhOztAwIIIgz07ZR4sc5LD1a8RSXwWIBhWt5MCG3IeQCiRU6c8DlQD4wp728humTmh42RsOL1PQuMR84ORvnOvxhZqrHfjoYZYhK6gh3sYXfj1qcyValPyIq6cOBgeAGABSmEdBvPZ1l8KAxcWm0a9tXoVO76bF0EujcC1NZn3GGHOLSv-pJJ7Y54lddg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058769"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058769/Global_statistical_regularities_modulate_the_speed_of_visual_search_in_patients_with_focal_attentional_deficits"><img alt="Research paper thumbnail of Global statistical regularities modulate the speed of visual search in patients with focal attentional deficits" class="work-thumbnail" src="https://attachments.academia-assets.com/49512566/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058769/Global_statistical_regularities_modulate_the_speed_of_visual_search_in_patients_with_focal_attentional_deficits">Global statistical regularities modulate the speed of visual search in patients with focal attentional deficits</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/MiceliGabriele">Gabriele Miceli</a></span></div><div class="wp-workCard_item"><span>Frontiers in Psychology</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">There is growing evidence that the statistical properties of ensembles of similar objects are pro...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">There is growing evidence that the statistical properties of ensembles of similar objects are processed in a qualitatively different manner than the characteristics of individual items. It has recently been proposed that these types of perceptual statistical representations are part of a strategy to complement focused attention in order to circumvent the visual system&#39;s limited capacity to represent more than a few individual objects in detail. Previous studies have demonstrated that patients with attentional deficits are nonetheless sensitive to these sorts of statistical representations. Here, we examined how such global representations may function to aid patients in overcoming focal attentional limitations by manipulating the statistical regularity of a visual scene while patients performed a search task. Three patients previously diagnosed with visual neglect searched for a target Gabor tilted to the left or right of vertical in displays of horizontal distractor Gabors. Although the local sizes of the distractors changed on every trial, the mean size remained stable for several trials. Patients made faster correct responses to targets in neglected regions of the visual field when global statistics remained constant over several trials, similar to age-matched controls. Given neglect patients&#39; attentional deficits, these results suggest that stable perceptual representations of global statistics can establish a context to speed search without the need to represent individual elements in detail.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="fad7ff4b3f42e46bd2cc2fef54de9cdc" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512566,&quot;asset_id&quot;:29058769,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512566/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058769"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058769"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058769; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058769]").text(description); $(".js-view-count[data-work-id=29058769]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058769; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058769']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "fad7ff4b3f42e46bd2cc2fef54de9cdc" } } $('.js-work-strip[data-work-id=29058769]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058769,"title":"Global statistical regularities modulate the speed of visual search in patients with focal attentional deficits","internal_url":"https://www.academia.edu/29058769/Global_statistical_regularities_modulate_the_speed_of_visual_search_in_patients_with_focal_attentional_deficits","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512566,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512566/thumbnails/1.jpg","file_name":"4053765.pdf","download_url":"https://www.academia.edu/attachments/49512566/download_file","bulk_download_file_name":"Global_statistical_regularities_modulate.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512566/4053765-libre.pdf?1476136407=\u0026response-content-disposition=attachment%3B+filename%3DGlobal_statistical_regularities_modulate.pdf\u0026Expires=1740881335\u0026Signature=Hzn3pvjIxMMcBi1y28yheZbekjRUR5CjEDRk90hQUlIMO4jxxStONUHeokEnIHFnaQozWmgc93AypmDWCYD0rTzJn0sEXpTgepjYvZ-gTMEUUgg~2TheeRTXYLjZ0DiIGqFuckF1jraYDxAlxJlG3RuEwKEjVhCQn6FTL1XmBNSk4YJ6JXXVoIxddxsowTPD3sSi84BbH3Q8D5CRQ8mv4r6tX0RA4SF-e-3BRPpQt3fkxDg9wpRcskNK9NPGyhqXhpcKm2CHGWL~wnXXup85nUk~efw-XRIeRloFMlNxdelufGGNeN27Tp9g5C7llBNbVyI~UO6WyWLx8kMMz8Se0w__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058771"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058771/The_role_of_attentional_priority_and_saliency_in_determining_capacity_limits_in_enumeration_and_visual_working_memory"><img alt="Research paper thumbnail of The role of attentional priority and saliency in determining capacity limits in enumeration and visual working memory" class="work-thumbnail" src="https://attachments.academia-assets.com/49512573/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058771/The_role_of_attentional_priority_and_saliency_in_determining_capacity_limits_in_enumeration_and_visual_working_memory">The role of attentional priority and saliency in determining capacity limits in enumeration and visual working memory</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/ManuelaPiazza1">Manuela Piazza</a></span></div><div class="wp-workCard_item"><span>PloS one</span><span>, 2011</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Many common tasks require us to individuate in parallel two or more objects out of a complex scen...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Many common tasks require us to individuate in parallel two or more objects out of a complex scene. Although the mechanisms underlying our abilities to count the number of items, remember the visual properties of objects and to make saccadic eye movements towards targets have been studied separately, each of these tasks require selection of individual objects and shows a capacity limit. Here we show that a common factor--salience--determines the capacity limit in the various tasks. We manipulated bottom-up salience (visual contrast) and top-down salience (task relevance) in enumeration and visual memory tasks. As one item became increasingly salient, the subitizing range was reduced and memory performance for all other less-salient items was decreased. Overall, the pattern of results suggests that our abilities to enumerate and remember small groups of stimuli are grounded in an attentional priority or salience map which represents the location of important items.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="4112143bfae0c157c5b6e787dabedc4a" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512573,&quot;asset_id&quot;:29058771,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512573/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058771"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058771"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058771; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058771]").text(description); $(".js-view-count[data-work-id=29058771]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058771; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058771']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "4112143bfae0c157c5b6e787dabedc4a" } } $('.js-work-strip[data-work-id=29058771]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058771,"title":"The role of attentional priority and saliency in determining capacity limits in enumeration and visual working memory","internal_url":"https://www.academia.edu/29058771/The_role_of_attentional_priority_and_saliency_in_determining_capacity_limits_in_enumeration_and_visual_working_memory","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512573,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512573/thumbnails/1.jpg","file_name":"The_Role_of_Attentional_Priority_and_Sal20161010-2393-1xd7ssz.pdf","download_url":"https://www.academia.edu/attachments/49512573/download_file","bulk_download_file_name":"The_role_of_attentional_priority_and_sal.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512573/The_Role_of_Attentional_Priority_and_Sal20161010-2393-1xd7ssz-libre.pdf?1476136393=\u0026response-content-disposition=attachment%3B+filename%3DThe_role_of_attentional_priority_and_sal.pdf\u0026Expires=1740881335\u0026Signature=Qw54mRZpPp4tAeLJjdg-NugN1OxdcQ-1R2phQWvgSqkj6pnVG6nIAl3qYngN1SIBIBdOOvpZRY6GvT2LqKC2RBw2Dbopv8tNbpwiCnB07Dpk5QBhP6Kt6yCBTCTQbhXOo4O5Ee-iWN9yu23II3CpVkUhy4dl3klMqtdecbkqP-N9nmYPyzKxVW4DXe557jIOQm0bkC-CZc6QuShX7W7lEsJ8lFHb3ArWUap4CiA6t0dajmIyA5CooOueuwI~TuoyP857nCkhaGnWWnk-lIPymhEUr-hYxe0vD5ytVf3b6V2Xaz5Mi0AVOhxZpn27v76wyouOHuTDIhcXkHkFrUIL6Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058772"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058772/Characterizing_ensemble_statistics_mean_size_is_represented_across_multiple_frames_of_reference"><img alt="Research paper thumbnail of Characterizing ensemble statistics: mean size is represented across multiple frames of reference" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058772/Characterizing_ensemble_statistics_mean_size_is_represented_across_multiple_frames_of_reference">Characterizing ensemble statistics: mean size is represented across multiple frames of reference</a></div><div class="wp-workCard_item"><span>Attention, perception &amp; psychophysics</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The visual system represents the overall statistical, not individual, properties of sets. Here we...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The visual system represents the overall statistical, not individual, properties of sets. Here we tested the spatial nature of ensemble statistics. We used a mean-size adaptation paradigm (Corbett et al. in Visual Cognition, 20, 211-231, 2012) to examine whether average size is encoded in multiple reference frames. We adapted observers to patches of small- and large-sized dots in opposite regions of the display (left/right or top/bottom) and then tested their perceptions of the sizes of single test dots presented in regions that corresponded to retinotopic, spatiotopic, and hemispheric coordinates within the adapting displays. We observed retinotopic, spatiotopic, and hemispheric adaptation aftereffects, such that participants perceived a test dot as being larger when it was presented in the area adapted to the patch of small dots than when it was presented in the area adapted to large dots. This aftereffect also transferred between eyes. Our results demonstrate that mean size is re...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058772"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058772"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058772; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058772]").text(description); $(".js-view-count[data-work-id=29058772]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058772; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058772']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058772]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058772,"title":"Characterizing ensemble statistics: mean size is represented across multiple frames of reference","internal_url":"https://www.academia.edu/29058772/Characterizing_ensemble_statistics_mean_size_is_represented_across_multiple_frames_of_reference","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058773"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058773/Stable_statistical_representations_facilitate_visual_search"><img alt="Research paper thumbnail of Stable statistical representations facilitate visual search" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058773/Stable_statistical_representations_facilitate_visual_search">Stable statistical representations facilitate visual search</a></div><div class="wp-workCard_item"><span>Journal of experimental psychology. Human perception and performance</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Observers represent the average properties of object ensembles even when they cannot identify ind...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Observers represent the average properties of object ensembles even when they cannot identify individual elements. To investigate the functional role of ensemble statistics, we examined how modulating statistical stability affects visual search. We varied the mean and/or individual sizes of an array of Gabor patches while observers searched for a tilted target. In &amp;quot;stable&amp;quot; blocks, the mean and/or local sizes of the Gabors were constant over successive displays, whereas in &amp;quot;unstable&amp;quot; baseline blocks they changed from trial to trial. Although there was no relationship between the context and the spatial location of the target, observers found targets faster (as indexed by faster correct responses and fewer saccades) as the global mean size became stable over several displays. Building statistical stability also facilitated scanning the scene, as measured by larger saccadic amplitudes, faster saccadic reaction times, and shorter fixation durations. These findings su...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058773"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058773"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058773; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058773]").text(description); $(".js-view-count[data-work-id=29058773]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058773; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058773']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058773]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058773,"title":"Stable statistical representations facilitate visual search","internal_url":"https://www.academia.edu/29058773/Stable_statistical_representations_facilitate_visual_search","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="7900441"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/7900441/A_visual_sense_of_number_emerges_from_the_dynamics_of_a_recurrent_on_center_off_surround_neural_network"><img alt="Research paper thumbnail of A visual sense of number emerges from the dynamics of a recurrent on-center off-surround neural network" class="work-thumbnail" src="https://attachments.academia-assets.com/36367190/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/7900441/A_visual_sense_of_number_emerges_from_the_dynamics_of_a_recurrent_on_center_off_surround_neural_network">A visual sense of number emerges from the dynamics of a recurrent on-center off-surround neural network</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/SurampudiBapi">Raju S Bapi</a>, and <a class="" data-click-track="profile-work-strip-authors" href="https://srecwarangal.academia.edu/RakeshSengupta">Rakesh Sengupta</a></span></div><div class="wp-workCard_item"><span>Brain Research</span><span>, Aug 6, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">It has been proposed that the ability of humans to quickly perceive numerosity involves a visual ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">It has been proposed that the ability of humans to quickly perceive numerosity involves a visual sense of number. Different paradigms of enumeration and numerosity comparison have produced a gamut of behavioral and neuroimaging data, but there has been no unified conceptual framework that can explain results across the entire range of numerosity. The current work tries to address the ongoing debate concerning whether the same mechanism operates for enumeration of small and large numbers, through a computational approach. We describe the workings of a single-layered, fully connected network characterized by self-excitation and recurrent inhibition that operates at both subitizing and estimation ranges. We show that such a network can account for classic numerical cognition effects (the distance effect, Fechner׳s law, Weber fraction for numerosity comparison) through the network steady state activation response across different recurrent inhibition values. The model also accounts for fMRI data previously reported for different enumeration related tasks. The model also allows us to generate an estimate of the pattern of reaction times in enumeration tasks. Overall, these findings suggest that a single network architecture can account for both small and large number processing.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="db912888ed018ad770cf4e7fcf418309" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:36367190,&quot;asset_id&quot;:7900441,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/36367190/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="7900441"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="7900441"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 7900441; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=7900441]").text(description); $(".js-view-count[data-work-id=7900441]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 7900441; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='7900441']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "db912888ed018ad770cf4e7fcf418309" } } $('.js-work-strip[data-work-id=7900441]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":7900441,"title":"A visual sense of number emerges from the dynamics of a recurrent on-center off-surround neural network","internal_url":"https://www.academia.edu/7900441/A_visual_sense_of_number_emerges_from_the_dynamics_of_a_recurrent_on_center_off_surround_neural_network","owner_id":126325,"coauthors_can_edit":true,"owner":{"id":126325,"first_name":"Rakesh","middle_initials":null,"last_name":"Sengupta","page_name":"RakeshSengupta","domain_name":"srecwarangal","created_at":"2010-02-01T23:54:46.129-08:00","display_name":"Rakesh Sengupta","url":"https://srecwarangal.academia.edu/RakeshSengupta"},"attachments":[{"id":36367190,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/36367190/thumbnails/1.jpg","file_name":"sengupta_numerosity.pdf","download_url":"https://www.academia.edu/attachments/36367190/download_file","bulk_download_file_name":"A_visual_sense_of_number_emerges_from_th.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/36367190/sengupta_numerosity-libre.pdf?1421966758=\u0026response-content-disposition=attachment%3B+filename%3DA_visual_sense_of_number_emerges_from_th.pdf\u0026Expires=1740881336\u0026Signature=YTEGhegB~xSeXk1iJtc572EY9kDreLBBTdV7u-7BiYwTT9-oUIx8GSKsBJwEXsNKEkzxofUZT1r3crks-mdc6oNsnggsUuq4FHO7I9Pj5f5rnx3JjCDWmKJa8cbsC33iOXqVBGsA-R7xzGKy-ubtE4dny8CVb5ROgdWCK5wlL-ShTOqHel4zQUwxJTNst-Un8neE4GCfPgW3LdSyKQQmMNTTg5EGBxBMP9NjMfGMku2iqLj9pFfZmzqGUv~W~NnrR89SKSHULlxobmuYpCSHci3bjXH3Gx8O0iUecn12BcqPxYK55rjtzpewP0yjz4ht7TBi5B-rIZ9pwHe9ttDREQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058779"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058779/Whats_the_purpose_of_perceptual_averaging"><img alt="Research paper thumbnail of What&#39;s the purpose of perceptual averaging?" class="work-thumbnail" src="https://attachments.academia-assets.com/49512582/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058779/Whats_the_purpose_of_perceptual_averaging">What&#39;s the purpose of perceptual averaging?</a></div><div class="wp-workCard_item"><span>Journal of Vision</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Background / Purpose: Observers represent the average properties of sets even when they cannot id...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Background / Purpose: Observers represent the average properties of sets even when they cannot identify individual elements. Although it has been proposed that these representations complement limited capacity focused attention, there has yet been no empirical investigation of their functional role in visual perception. Indeed, the visual system could capitalize on statistical regularities inherent in the surrounding environment to create the illusion of stable and complete perception amidst constantly changing retinal imagery, despite its limited capacity to represent more than a handful of objects in detail. Main conclusion: We manipulated the statistical regularity of a visual scene over time while observers performed a search task. Specifically, we modulated the mean size of an array of Gabor patches while observers searched for a left or right tilted target among horizontal distractors. In ‘stable’ blocks, the mean size of the Gabor patches was constant over successive displays...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="22ac19f80bffff4940ece2201463c25c" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512582,&quot;asset_id&quot;:29058779,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512582/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058779"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058779"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058779; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058779]").text(description); $(".js-view-count[data-work-id=29058779]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058779; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058779']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "22ac19f80bffff4940ece2201463c25c" } } $('.js-work-strip[data-work-id=29058779]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058779,"title":"What's the purpose of perceptual averaging?","internal_url":"https://www.academia.edu/29058779/Whats_the_purpose_of_perceptual_averaging","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512582,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512582/thumbnails/1.jpg","file_name":"Whats_the_purpose_of_perceptual_averagi20161010-27087-b5r1t8.pdf","download_url":"https://www.academia.edu/attachments/49512582/download_file","bulk_download_file_name":"Whats_the_purpose_of_perceptual_averagin.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512582/Whats_the_purpose_of_perceptual_averagi20161010-27087-b5r1t8-libre.pdf?1476136431=\u0026response-content-disposition=attachment%3B+filename%3DWhats_the_purpose_of_perceptual_averagin.pdf\u0026Expires=1740881336\u0026Signature=A6SGwr~qmp0xiWbrkEmt8ITTv2zmN-PPUyAaV5xGkxcuW~bus0~H-2dWAz0tGuBS-E3RknRZY6D7fV801vZJ4FBINitMdalUQkThuHn~zmtLgEOcZ2Ss6W7~Tsa1yCpMSTDv368t0m4s0823CT2flx0iW-ox-0HZHOlEwOqOb2fRQG2hzIy4G0JmzchsBJYTW~S7a6sGdHqh1tgsMX7dIC1ESnmnY-wVGGXd8wsiXnkwANQCeeD2lcNWNWTBtgEedVQtQ1k9~wX0IYSzgsApTMxsXFFRdg-eHWqB17pSdaKlLusY1rG7yJ2E8pzQm3QnpYRUKJjuiNJhJ9StMydG8Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058782"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058782/Evidence_for_global_perceptual_averaging_in_individuals_with_Autism_Spectrum_Disorder"><img alt="Research paper thumbnail of Evidence for global perceptual averaging in individuals with Autism Spectrum Disorder" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058782/Evidence_for_global_perceptual_averaging_in_individuals_with_Autism_Spectrum_Disorder">Evidence for global perceptual averaging in individuals with Autism Spectrum Disorder</a></div><div class="wp-workCard_item"><span>Journal of vision</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Studies of visual perception in individuals with Autism Spectrum Disorder (ASD) report enhanced l...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Studies of visual perception in individuals with Autism Spectrum Disorder (ASD) report enhanced local processing, and either impaired or suppressed global processing. In contrast, we report evidence of global size averaging despite poor accuracy at recalling sizes of individual objects, as well as a persistent contextual influence of adaptation to mean size on the perceived size of single objects across groups of ASD and control observers. In Experiment 1, participants viewed a set of heterogeneously sized circles followed by two test circles and judged which test circle represented the mean size (mean task), or was a member (member task) of the set. Despite their noted hypersensitivity to local detail, the ASD group showed the same patterns of high accuracy in the mean task and chance accuracy in the member task as the control group, in-line with Ariely&amp;#39;s (2001) proposal that observers can extract average properties of sets without retaining information about individual items. ...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058782"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058782"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058782; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058782]").text(description); $(".js-view-count[data-work-id=29058782]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058782; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058782']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058782]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058782,"title":"Evidence for global perceptual averaging in individuals with Autism Spectrum Disorder","internal_url":"https://www.academia.edu/29058782/Evidence_for_global_perceptual_averaging_in_individuals_with_Autism_Spectrum_Disorder","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="8625253" id="memoryscenesevents"><div class="js-work-strip profile--work_container" data-work-id="5309665"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/5309665/Visual_scene_memory_and_the_guidance_of_saccadic_eye_movements"><img alt="Research paper thumbnail of Visual scene memory and the guidance of saccadic eye movements" class="work-thumbnail" src="https://attachments.academia-assets.com/49352404/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/5309665/Visual_scene_memory_and_the_guidance_of_saccadic_eye_movements">Visual scene memory and the guidance of saccadic eye movements</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">An unresolved question is how much information can be remembered from visual scenes when they are...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">An unresolved question is how much information can be remembered from visual scenes when they are inspected by saccadic eye movements. Subjects used saccadic eye movements to scan a computer-generated scene, and afterwards, recalled as many objects as they could. Scene memory was quite good: it improved with display duration, it persisted over time long after the display was removed, and it continued to accumulate with additional viewings of the same display (Melcher, D. The persistance of memory for scenes. Nature 412, 401). The occurrence of saccadic eye movements was important to ensure good recall performance, even though subjects often recalled non-fixated objects. Inter-saccadic intervals increased with display duration, showing an influence of duration on global scanning strategy. The choice of saccadic target was predicted by a Random Selection with Distance Weighting (RSDW) model, in which the target for each saccade is selected at random from all available objects, weighted according to distance from fixation, regardless of which objects had previously been fixated. The results show that the visual memory that was reflected in the recall reports was not utilized for the immediate decision about where to look in the scene. Visual memory can be excellent, but it is not always reflected in oculomotor measures, perhaps because the cost of rapid on-line memory retrieval is too great.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="4fc99abdb0127cd72b2a34816682aab8" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49352404,&quot;asset_id&quot;:5309665,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49352404/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="5309665"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="5309665"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5309665; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5309665]").text(description); $(".js-view-count[data-work-id=5309665]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5309665; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='5309665']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "4fc99abdb0127cd72b2a34816682aab8" } } $('.js-work-strip[data-work-id=5309665]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":5309665,"title":"Visual scene memory and the guidance of saccadic eye movements","internal_url":"https://www.academia.edu/5309665/Visual_scene_memory_and_the_guidance_of_saccadic_eye_movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49352404,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49352404/thumbnails/1.jpg","file_name":"Visual_scene_memory_and_the_guidance_of_20161004-25096-wi45x2.pdf","download_url":"https://www.academia.edu/attachments/49352404/download_file","bulk_download_file_name":"Visual_scene_memory_and_the_guidance_of.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49352404/Visual_scene_memory_and_the_guidance_of_20161004-25096-wi45x2-libre.pdf?1475607810=\u0026response-content-disposition=attachment%3B+filename%3DVisual_scene_memory_and_the_guidance_of.pdf\u0026Expires=1740881336\u0026Signature=NKQ3viUx9enLMiMJSxuDMbVbyrH2K1-pLjqoF-1IoU63Gp2c~3nrtxsUH-h2G-k~rUYWsoIt33zz2g4qNFLKDuV5rmmbIcTzzqzLBvXH7ShGvZy1QY61jn04xF5gchgXyzyOvOJOIXpYm39d2Fwa1zLEcssop6inHvKZgg7Jc8VavYnR0FjwlF3smffI~8OI5rN4KkpCqIrbSneBu5MvwyrLWSdboIvKxxI6ynfqc2lxNYDn3Zxmy5ABsTwvg~95lqQlQITYza3NNMuwItj03rgAuNn0-ypR5SOj8FB5SnvmgLm~GigutfPFh7QNwJu5vLGxEahjQIl3MhHp28NiJQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058733"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058733/Trans_saccadic_memory_Building_a_stable_world_from_glance_to_glance"><img alt="Research paper thumbnail of Trans-saccadic memory: Building a stable world from glance to glance" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058733/Trans_saccadic_memory_Building_a_stable_world_from_glance_to_glance">Trans-saccadic memory: Building a stable world from glance to glance</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Abstract During natural viewing, the eye samples the visual environment using a series of jerking...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Abstract During natural viewing, the eye samples the visual environment using a series of jerking, saccadic eye movements, separated by periods of fixation. This raises the fundamental question of how information from separate fixations is integrated into a single, coherent percept. We discuss two mechanisms,that may be involved in generating our stable and continuous perception of the world. First, information</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058733"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058733"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058733; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058733]").text(description); $(".js-view-count[data-work-id=29058733]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058733; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058733']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058733]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058733,"title":"Trans-saccadic memory: Building a stable world from glance to glance","internal_url":"https://www.academia.edu/29058733/Trans_saccadic_memory_Building_a_stable_world_from_glance_to_glance","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058735"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058735/A_computational_study_of_visual_working_memory_capacity_in_the_presence_of_saliency_effects"><img alt="Research paper thumbnail of A computational study of visual working memory capacity in the presence of saliency effects" class="work-thumbnail" src="https://attachments.academia-assets.com/49512557/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058735/A_computational_study_of_visual_working_memory_capacity_in_the_presence_of_saliency_effects">A computational study of visual working memory capacity in the presence of saliency effects</a></div><div class="wp-workCard_item"><span>BMC Neuroscience</span><span>, 2011</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="a23688c4f2605645224a14475f8eeb67" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512557,&quot;asset_id&quot;:29058735,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512557/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058735"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058735"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058735; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058735]").text(description); $(".js-view-count[data-work-id=29058735]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058735; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058735']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "a23688c4f2605645224a14475f8eeb67" } } $('.js-work-strip[data-work-id=29058735]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058735,"title":"A computational study of visual working memory capacity in the presence of saliency effects","internal_url":"https://www.academia.edu/29058735/A_computational_study_of_visual_working_memory_capacity_in_the_presence_of_saliency_effects","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512557,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512557/thumbnails/1.jpg","file_name":"1471-2202-12-s1-p64.pdf","download_url":"https://www.academia.edu/attachments/49512557/download_file","bulk_download_file_name":"A_computational_study_of_visual_working.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512557/1471-2202-12-s1-p64-libre.pdf?1476136406=\u0026response-content-disposition=attachment%3B+filename%3DA_computational_study_of_visual_working.pdf\u0026Expires=1740881336\u0026Signature=FKX1iNPdTGahG3OOwrERCucH1BDM0G4VC6-aXsWuwZS9q7PSELrmyhNvQ~m1kuQf3UsvCdi9BW6LTdg0cMF3ic6BMvBp-RzBp5uNB09jI05LhnwMlJitkq6bBgm-GPqiparFUxtumzJUWr1cVKpnJhcTXeJjrFBk5vRL0692WmJyY~yTokeNplFdRazXHI2-hTA5lC~nsPSQ7deTmns3RN-rerL2Mqnxehjbffjpv3yKXkddpivQlEi~j8XI-2Tzu4jHWBwwCA2Mydj9x7JK9xPViwzyVb2zXtz6o4M-FGoSKJxAo5bNIAp2oLKJJjcwJBk6uL6mVe5PqgS9crv4Ag__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058741"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058741/The_role_of_semantic_interference_in_limiting_memory_for_the_details_of_visual_scenes"><img alt="Research paper thumbnail of The role of semantic interference in limiting memory for the details of visual scenes" class="work-thumbnail" src="https://attachments.academia-assets.com/49512572/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058741/The_role_of_semantic_interference_in_limiting_memory_for_the_details_of_visual_scenes">The role of semantic interference in limiting memory for the details of visual scenes</a></div><div class="wp-workCard_item"><span>Frontiers in Psychology</span><span>, 2011</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Many studies suggest a large capacity memory for briefly presented pictures of whole scenes. At t...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Many studies suggest a large capacity memory for briefly presented pictures of whole scenes. At the same time, visual working memory (WM) of scene elements is limited to only a few items. We examined the role of retroactive interference in limiting memory for visual details. Participants viewed a scene for 5 s and then, after a short delay containing either a blank screen or 10 distracter scenes, answered questions about the location, color, and identity of objects in the scene. We found that the influence of the distracters depended on whether they were from a similar semantic domain, such as &quot;kitchen&quot; or &quot;airport.&quot; Increasing the number of similar scenes reduced, and eventually eliminated, memory for scene details. Although scene memory was firmly established over the initial study period, this memory was fragile and susceptible to interference. This may help to explain the discrepancy in the literature between studies showing limited visual WM and those showing a large capacity memory for scenes.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="378b63b338acdb161aac59ba19d3d4ab" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512572,&quot;asset_id&quot;:29058741,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512572/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058741"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058741"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058741; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058741]").text(description); $(".js-view-count[data-work-id=29058741]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058741; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058741']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "378b63b338acdb161aac59ba19d3d4ab" } } $('.js-work-strip[data-work-id=29058741]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058741,"title":"The role of semantic interference in limiting memory for the details of visual scenes","internal_url":"https://www.academia.edu/29058741/The_role_of_semantic_interference_in_limiting_memory_for_the_details_of_visual_scenes","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512572,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512572/thumbnails/1.jpg","file_name":"The_Role_of_Semantic_Interference_in_Lim20161010-27090-1qcx9sh.pdf","download_url":"https://www.academia.edu/attachments/49512572/download_file","bulk_download_file_name":"The_role_of_semantic_interference_in_lim.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512572/The_Role_of_Semantic_Interference_in_Lim20161010-27090-1qcx9sh-libre.pdf?1476136399=\u0026response-content-disposition=attachment%3B+filename%3DThe_role_of_semantic_interference_in_lim.pdf\u0026Expires=1740881336\u0026Signature=ABFeM~1CoVEyTa65ZOfPzZNwT36e8ZlAKJBTxlAkjv9jG732a34ntPuLikkSMuOdsZvGE3BoL4FruKtafXWdFB4MXbcGgV373czh-K~UV572otmYDEQggNNZVu07S6LXj25BzTMiKpgUef5dlfkQyTj5DO5s0KbORWrwRyDO0zlj07WMk99e6opk8EqIgUNu~sbveWlyrZiBXHFpUCGKIpHRAiHiO1wNvLYu~PNIUjMNFtNmS2dLiqwxOdII8GlXioV0-wngmXSCHEvJ8vqiyGjuAqp~fc71xcCX~VxOln5FiI24rmULQxtA1TWY~Vk~XsIZlObPq1lLiHfthFodug__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058743"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058743/The_build_up_of_scene_memory_across_eye_movements"><img alt="Research paper thumbnail of The build up of scene memory across eye movements" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058743/The_build_up_of_scene_memory_across_eye_movements">The build up of scene memory across eye movements</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2010</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058743"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058743"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058743; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058743]").text(description); $(".js-view-count[data-work-id=29058743]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058743; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058743']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058743]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058743,"title":"The build up of scene memory across eye movements","internal_url":"https://www.academia.edu/29058743/The_build_up_of_scene_memory_across_eye_movements","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058746"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058746/Emotion_modulates_eye_movement_patterns_and_subsequent_memory_for_the_gist_and_details_of_movie_scenes"><img alt="Research paper thumbnail of Emotion modulates eye movement patterns and subsequent memory for the gist and details of movie scenes" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058746/Emotion_modulates_eye_movement_patterns_and_subsequent_memory_for_the_gist_and_details_of_movie_scenes">Emotion modulates eye movement patterns and subsequent memory for the gist and details of movie scenes</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">A basic question in vision research regards where people look in complex scenes and how this infl...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">A basic question in vision research regards where people look in complex scenes and how this influences their performance in various tasks. Previous studies with static images have demonstrated a close link between where people look and what they remember. Here, we examined the pattern of eye movements when participants watched neutral and emotional clips from Hollywood-style movies. Participants answered multiple-choice memory questions concerning visual and auditory scene details immediately upon viewing 1-min-long neutral or emotional movie clips. Fixations were more narrowly focused for emotional clips, and immediate memory for object details was worse compared to matched neutral scenes, implying preferential attention to emotional events. Although we found the expected correlation between where people looked and what they remembered for neutral clips, this relationship broke down for emotional clips. When participants were subsequently presented with key frames (static images) extracted from the movie clips such that presentation duration of the target objects (TOs) corresponding to the multiple-choice questions was matched and the earlier questions were repeated, more fixations were observed on the TOs, and memory performance also improved significantly, confirming that emotion modulates the relationship between gaze position and memory performance. Finally, in a long-term memory test, old/new recognition performance was significantly better for emotional scenes as compared to neutral scenes. Overall, these results are consistent with the hypothesis that emotional content draws eye fixations and strengthens memory for the scene gist while weakening encoding of peripheral scene details.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058746"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058746"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058746; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058746]").text(description); $(".js-view-count[data-work-id=29058746]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058746; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058746']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058746]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058746,"title":"Emotion modulates eye movement patterns and subsequent memory for the gist and details of movie scenes","internal_url":"https://www.academia.edu/29058746/Emotion_modulates_eye_movement_patterns_and_subsequent_memory_for_the_gist_and_details_of_movie_scenes","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058750"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058750/Accumulation_and_persistence_of_memory_for_natural_scenes"><img alt="Research paper thumbnail of Accumulation and persistence of memory for natural scenes" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058750/Accumulation_and_persistence_of_memory_for_natural_scenes">Accumulation and persistence of memory for natural scenes</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2006</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Although our visual experience of the world is rich and full of detail, visual short-term memory ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Although our visual experience of the world is rich and full of detail, visual short-term memory (VSTM) can retain only about four objects at a time. Long-term memory (LTM) for pictures lasts longer but may rely on abstract gist, raising the question of how it is possible to remember details of natural scenes. We studied the accumulation and persistence of memory for pictures shown for 1-20 s. Performance in answering questions about the details of pictures increased linearly as a function of the total time that the scene was viewed. Similar gains in memory were found for items of central and marginal interest. No loss of memory was found for picture detail over a 60-s interval, even when observers performed a VSTM or reading task during the delay. Together these results suggest that our rich phenomenological experience of a detailed scene reflects the maintenance in memory of useful information about previous fixations rather than the limited capacity of VSTM.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058750"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058750"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058750; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058750]").text(description); $(".js-view-count[data-work-id=29058750]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058750; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058750']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058750]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058750,"title":"Accumulation and persistence of memory for natural scenes","internal_url":"https://www.academia.edu/29058750/Accumulation_and_persistence_of_memory_for_natural_scenes","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058753"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058753/Accumulating_and_remembering_the_details_of_neutral_and_emotional_natural_scenes"><img alt="Research paper thumbnail of Accumulating and remembering the details of neutral and emotional natural scenes" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058753/Accumulating_and_remembering_the_details_of_neutral_and_emotional_natural_scenes">Accumulating and remembering the details of neutral and emotional natural scenes</a></div><div class="wp-workCard_item"><span>Perception</span><span>, 2010</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In contrast to our rich sensory experience with complex scenes in everyday life, the capacity of ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In contrast to our rich sensory experience with complex scenes in everyday life, the capacity of visual working memory is thought to be quite limited. Here our memory has been examined for the details of naturalistic scenes as a function of display duration, emotional valence of the scene, and delay before test. Individual differences in working memory and long-term memory for pictorial scenes were examined in experiment 1. The accumulation of memory for emotional scenes and the retention of these details in long-term memory were investigated in experiment 2. Although there were large individual differences in performance, memory for scene details generally exceeded the traditional working memory limit within a few seconds. Information about positive scenes was learned most quickly, while negative scenes showed the worst memory for details. The overall pattern of results was consistent with the idea that both short-term and long-term representations are mixed together in a medium-term &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39;online&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39; memory for scenes.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058753"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058753"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058753; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058753]").text(description); $(".js-view-count[data-work-id=29058753]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058753; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058753']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058753]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058753,"title":"Accumulating and remembering the details of neutral and emotional natural scenes","internal_url":"https://www.academia.edu/29058753/Accumulating_and_remembering_the_details_of_neutral_and_emotional_natural_scenes","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058754"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058754/Pictures_in_mind_Initial_encoding_of_object_properties_varies_with_the_realism_of_the_scene_stimulus"><img alt="Research paper thumbnail of Pictures in mind: Initial encoding of object properties varies with the realism of the scene stimulus" class="work-thumbnail" src="https://attachments.academia-assets.com/49512578/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058754/Pictures_in_mind_Initial_encoding_of_object_properties_varies_with_the_realism_of_the_scene_stimulus">Pictures in mind: Initial encoding of object properties varies with the realism of the scene stimulus</a></div><div class="wp-workCard_item"><span>Perception</span><span>, 2007</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="0225b05e60f8e3ed5d7b567fbca00475" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512578,&quot;asset_id&quot;:29058754,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512578/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058754"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058754"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058754; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058754]").text(description); $(".js-view-count[data-work-id=29058754]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058754; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058754']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "0225b05e60f8e3ed5d7b567fbca00475" } } $('.js-work-strip[data-work-id=29058754]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058754,"title":"Pictures in mind: Initial encoding of object properties varies with the realism of the scene stimulus","internal_url":"https://www.academia.edu/29058754/Pictures_in_mind_Initial_encoding_of_object_properties_varies_with_the_realism_of_the_scene_stimulus","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512578,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512578/thumbnails/1.jpg","file_name":"Pictures_in_mind_Initial_encoding_of_obj20161010-18911-qgb7g8.pdf","download_url":"https://www.academia.edu/attachments/49512578/download_file","bulk_download_file_name":"Pictures_in_mind_Initial_encoding_of_obj.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512578/Pictures_in_mind_Initial_encoding_of_obj20161010-18911-qgb7g8-libre.pdf?1476136392=\u0026response-content-disposition=attachment%3B+filename%3DPictures_in_mind_Initial_encoding_of_obj.pdf\u0026Expires=1740881336\u0026Signature=UGdZjOYT7wQhAWc2MYPDda9~j-sCxUwtjs5v16SRt3hx95AzW4-xgsyK1oXkfVSDFTEH1nuyaHELkMczt9ko-YC6ymvRBwxWpZU8FvBlL9OuagXeqI2Pvk17XKAdRTcF78SXhyrO8Tdh9JRZzPe75rck-ftNb2KGn2-s~hNtNo0zXbaV6yLOKWEt6YgbsaW5c1YyRi8NQhMX16JqB7-reMoLUjh9XF81aICZ0p4IzjzlIkIjLcPteQ2nAx1ONmCl-9nDjQm-kPkmpDJ~Tjr8Fd~TaDRO8Ux-3Lfy-mLjH-WnzY29q89Meav0dnDzj4xi1B2Efr1nf5y0Gvb-frV6Vg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058792"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058792/Accumulation_of_Visual_Memory_for_Natural_Scenes_A_Medium_Term_Memory"><img alt="Research paper thumbnail of Accumulation of Visual Memory for Natural Scenes: A Medium-Term Memory?" class="work-thumbnail" src="https://attachments.academia-assets.com/49512586/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058792/Accumulation_of_Visual_Memory_for_Natural_Scenes_A_Medium_Term_Memory">Accumulation of Visual Memory for Natural Scenes: A Medium-Term Memory?</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">We studied visual memory for objects in natural scenes. Participants viewed photographs or pictur...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">We studied visual memory for objects in natural scenes. Participants viewed photographs or pictures for periods of 1 to 20 seconds, and were then asked questions about the color, location, and identity of objects that they had seen, as well as given recognition tests. Performance improved as a function of display duration for all question types and for objects of both central and peripheral interest. On some trials, previously viewed stimuli, which had been shown 4 to 6 trials earlier with no subsequent memory test, were repeated to see if performance continued to improve across separate presentations. There was no loss of information across retests, such that memory for a display shown for 10 and then 5 seconds was equal to performance after a single trial of 15 seconds. Overall, memory performance exceeded the capacity and duration limits of short-term or working memory, supporting the idea of a medium-term visual scene memory.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6b25d52c5a0cf6b73777804aedf047fc" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512586,&quot;asset_id&quot;:29058792,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512586/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058792"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058792"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058792; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058792]").text(description); $(".js-view-count[data-work-id=29058792]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058792; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058792']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6b25d52c5a0cf6b73777804aedf047fc" } } $('.js-work-strip[data-work-id=29058792]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058792,"title":"Accumulation of Visual Memory for Natural Scenes: A Medium-Term Memory?","internal_url":"https://www.academia.edu/29058792/Accumulation_of_Visual_Memory_for_Natural_Scenes_A_Medium_Term_Memory","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512586,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512586/thumbnails/1.jpg","file_name":"Accumulation_of_Visual_Memory_for_Natura20161010-2393-18z54ic.pdf","download_url":"https://www.academia.edu/attachments/49512586/download_file","bulk_download_file_name":"Accumulation_of_Visual_Memory_for_Natura.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512586/Accumulation_of_Visual_Memory_for_Natura20161010-2393-18z54ic-libre.pdf?1476136380=\u0026response-content-disposition=attachment%3B+filename%3DAccumulation_of_Visual_Memory_for_Natura.pdf\u0026Expires=1740881336\u0026Signature=XtOeercMzIBwd2Kaiu1qBMNNsFz74VJzyImUST0JdfqLKPrU4nCWpm6AY6TMo1JV8Ike9Ffu~YFOBJrXDjHSWxY0lrMggK7Twzd7l33q4D-u7KDhsNZeFNIyVIqihJVj3-BfQZ3w9pmFH5G~FyZuTTn8u20laK2DzPq0ZG7Q6zO4zAGr1e4eaGfq6RKutwxuzDGF1QhB3mUxIIVmwstHxUS9g0ENHKCNjWPdhRy~tf7M9Mzhuw014cNlCnXcsHJDE8vBn4QuqtKFMFYqQeyztb2MFV~HcYDv8L2j0Ah5yeBQqFaDHGsdgI0dIAQvYG23K-P5rqjkL68Dd5ZOYrtjew__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="8625257" id="attentionandvisualawareness"><div class="js-work-strip profile--work_container" data-work-id="29058752"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058752/Implicit_Attentional_Selection_of_Bound_Visual_Features"><img alt="Research paper thumbnail of Implicit Attentional Selection of Bound Visual Features" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058752/Implicit_Attentional_Selection_of_Bound_Visual_Features">Implicit Attentional Selection of Bound Visual Features</a></div><div class="wp-workCard_item"><span>Neuron</span><span>, 2005</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Traditionally, research on visual attention has been focused on the processes involved in conscio...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Traditionally, research on visual attention has been focused on the processes involved in conscious, explicit selection of task-relevant sensory input. Recently, however, it has been shown that attending to a specific feature of an object automatically increases neural sensitivity to this feature throughout the visual field. Here we show that directing attention to a specific color of an object results in attentional modulation of the processing of task-irrelevant and not consciously perceived motion signals that are spatiotemporally associated with this color throughout the visual field. Such implicit cross-feature spreading of attention takes place according to the veridical physical associations between the color and motion signals, even under special circumstances when they are perceptually misbound. These results imply that the units of implicit attentional selection are spatiotemporally colocalized feature clusters that are automatically bound throughout the visual field.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058752"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058752"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058752; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058752]").text(description); $(".js-view-count[data-work-id=29058752]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058752; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058752']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058752]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058752,"title":"Implicit Attentional Selection of Bound Visual Features","internal_url":"https://www.academia.edu/29058752/Implicit_Attentional_Selection_of_Bound_Visual_Features","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="5309668"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/5309668/The_role_of_attention_in_central_and_peripheral_motion_integration"><img alt="Research paper thumbnail of The role of attention in central and peripheral motion integration" class="work-thumbnail" src="https://attachments.academia-assets.com/49352402/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/5309668/The_role_of_attention_in_central_and_peripheral_motion_integration">The role of attention in central and peripheral motion integration</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://york.academia.edu/AurelioBruno">Aurelio Bruno</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Attention has been shown to modulate visual processing in a wide variety of tasks. We tested the ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Attention has been shown to modulate visual processing in a wide variety of tasks. We tested the influence of attention on the temporal integration of motion for both central and peripherally viewed targets (6°· 6°). Consistent with previous results, motion sensitivity for a brief motion signal (70-3500 ms) embedded in noise (10 s) increased as a function of motion duration up to a critical duration of about 1.5 s. Summation times for centrally and peripherally viewed targets were similar. An effect of eccentricity was found, however, in a double-motion task, in which two brief (150 ms) motion signals were presented with varying delays (0-7 s) of random noise between the two signals. Specifically, the maximum delay between the two signals that still supported temporal summation (summation constant) was about three times longer for centrally viewed targets (3.5-4.5 s versus 1.5-2 s). We investigated the role of spatial attention in the double-motion task by adding a concurrent color contrast discrimination task. The addition of the concurrent task dramatically reduced differences in the summation constant for central and peripheral targets, without reducing overall motion sensitivity. Thus, attention appears to specifically modulate temporal summation, suggesting that the long integration times found for motion coherence are mediated by attention.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="d0bf425d06556578f7001896837c832c" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49352402,&quot;asset_id&quot;:5309668,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49352402/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="5309668"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="5309668"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5309668; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5309668]").text(description); $(".js-view-count[data-work-id=5309668]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5309668; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='5309668']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "d0bf425d06556578f7001896837c832c" } } $('.js-work-strip[data-work-id=5309668]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":5309668,"title":"The role of attention in central and peripheral motion integration","internal_url":"https://www.academia.edu/5309668/The_role_of_attention_in_central_and_peripheral_motion_integration","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49352402,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49352402/thumbnails/1.jpg","file_name":"The_role_of_attention_in_central_and_per20161004-5636-wc6qlf.pdf","download_url":"https://www.academia.edu/attachments/49352402/download_file","bulk_download_file_name":"The_role_of_attention_in_central_and_per.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49352402/The_role_of_attention_in_central_and_per20161004-5636-wc6qlf-libre.pdf?1475607812=\u0026response-content-disposition=attachment%3B+filename%3DThe_role_of_attention_in_central_and_per.pdf\u0026Expires=1740881336\u0026Signature=KAAcVYefFBok9u9yovZyy8dHf1ZdnIMMqYde9ICpXCIM4lLj~42P6gPqhkjG9V08-dvxQwuxZynqGt0Ci0IQwwmiTdpdiSwMOfpO1KCGgeAvYsG2ZqzrKIJGbGrm5vX4fmgBpSQHTXyr7B9qw-tQ4h3KdNHYmGJxpoL757D3suMW1G8~gWuPvA5xY2rbRQ4~MltC12Ni0EKBHJU0WQ-ogAYqI6p-UE5Rvx8V6nMtdSPcbzDR4~PsgjlrGpMJdHHhV1SRey8TfbWzuTJ6ubO64XVGne41qk2QwrlvVd3Sk~TL0Jlfm915iAH6NojIJVfgbv~86yc5N5S1BnFDe~-pSQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058740"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058740/Intercepting_the_First_Pass_Rapid_Categorization_is_Suppressed_for_Unseen_Stimuli"><img alt="Research paper thumbnail of Intercepting the First Pass: Rapid Categorization is Suppressed for Unseen Stimuli" class="work-thumbnail" src="https://attachments.academia-assets.com/49512562/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058740/Intercepting_the_First_Pass_Rapid_Categorization_is_Suppressed_for_Unseen_Stimuli">Intercepting the First Pass: Rapid Categorization is Suppressed for Unseen Stimuli</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/EmanueleOlivetti">Emanuele Olivetti</a></span></div><div class="wp-workCard_item"><span>Frontiers in Psychology</span><span>, 2011</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="77414150de1f60ca915dfd1b4cb31145" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512562,&quot;asset_id&quot;:29058740,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512562/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058740"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058740"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058740; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058740]").text(description); $(".js-view-count[data-work-id=29058740]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058740; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058740']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "77414150de1f60ca915dfd1b4cb31145" } } $('.js-work-strip[data-work-id=29058740]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058740,"title":"Intercepting the First Pass: Rapid Categorization is Suppressed for Unseen Stimuli","internal_url":"https://www.academia.edu/29058740/Intercepting_the_First_Pass_Rapid_Categorization_is_Suppressed_for_Unseen_Stimuli","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512562,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512562/thumbnails/1.jpg","file_name":"fpsyg-02-00198.pdf","download_url":"https://www.academia.edu/attachments/49512562/download_file","bulk_download_file_name":"Intercepting_the_First_Pass_Rapid_Catego.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512562/fpsyg-02-00198-libre.pdf?1476136414=\u0026response-content-disposition=attachment%3B+filename%3DIntercepting_the_First_Pass_Rapid_Catego.pdf\u0026Expires=1740881336\u0026Signature=QDfcDBHRnAFKZyCfVBdflO16Szx~EIQflB3CWGv7a6UIdA2N3zO-raDA3aNajbL1ISQuRd1xl1KEcGdtydbHE0C9oqHX78lN68tlH1JiGaSLiVeVgUD2m~P3ikW4kTbJbfxRTTu0QmlHNxEalvKTpYh~~jHxO-UNLalp9dF4HVfScYROb3tKlqDrfEEiHPTo5Pq6FIMBSKpgyJu5EyCp7AXVAJNLys83QsMDAbPRYSmPI3kIQEMg594RlaM5CGdjP-pAkW8wi-ysSXJ~VqhHGbnKC9WF9Ro3M4O3f7aHaiXKgKnIhgnIf9YxgsEn9~AQhyziDhTH7MPnrVbIsLTVMA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058742"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058742/Unseen_complex_motion_is_modulated_by_attention_and_generates_a_visible_aftereffect"><img alt="Research paper thumbnail of Unseen complex motion is modulated by attention and generates a visible aftereffect" class="work-thumbnail" src="https://attachments.academia-assets.com/49512567/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058742/Unseen_complex_motion_is_modulated_by_attention_and_generates_a_visible_aftereffect">Unseen complex motion is modulated by attention and generates a visible aftereffect</a></div><div class="wp-workCard_item"><span>Journal of Vision</span><span>, 2011</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The relationship between attention and awareness and the processing of visual information outside...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The relationship between attention and awareness and the processing of visual information outside of attention and awareness remain controversial issues. We employed the motion aftereffect (MAE) illusion and continuous flash suppression (CFS) to study the behavioral effects of unseen and unattended visual motion. The main finding was that either withdrawal of attention or the lack of visual awareness on the adaptors did not eliminate the formation of translational MAEs, spiral MAEs, or the interocular transfer of the MAE. However, no spiral MAE was generated when attention was diverted from the unseen spiral adaptors. Interestingly, all MAEs that arose in the absence of awareness or in the absence of attention were reduced in size. The pattern of results is consistent with suggestions that the magnitude of visual motion adaptation depends on both attention and awareness.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="2fb9d03adc092742fc28e2fd579e4d96" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512567,&quot;asset_id&quot;:29058742,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512567/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058742"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058742"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058742; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058742]").text(description); $(".js-view-count[data-work-id=29058742]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058742; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058742']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "2fb9d03adc092742fc28e2fd579e4d96" } } $('.js-work-strip[data-work-id=29058742]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058742,"title":"Unseen complex motion is modulated by attention and generates a visible aftereffect","internal_url":"https://www.academia.edu/29058742/Unseen_complex_motion_is_modulated_by_attention_and_generates_a_visible_aftereffect","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512567,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512567/thumbnails/1.jpg","file_name":"Unseen_complex_motion_is_modulated_by_at20161010-27087-mvoz79.pdf","download_url":"https://www.academia.edu/attachments/49512567/download_file","bulk_download_file_name":"Unseen_complex_motion_is_modulated_by_at.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512567/Unseen_complex_motion_is_modulated_by_at20161010-27087-mvoz79-libre.pdf?1476136401=\u0026response-content-disposition=attachment%3B+filename%3DUnseen_complex_motion_is_modulated_by_at.pdf\u0026Expires=1740881336\u0026Signature=ggy6sXQe-cM3qLaI25DkYIPFwm15YmHxbQlhO-XfoYlgBdjBdSQ1jv4JpN9zCKG5lwv2ESrClQ-Yw7dtdrehIvl~EQgMLhus07scbqzHWogRyHmtc9-Xe-26KaTkHPXDh0pcL3CvJSta-jbnIe6xQF0Qe8h8hiPDvDl9p2xdzuJWjHp9~1c-F6PleMZEB1YSUqsPyGm6cMLiyHpnA5y2twE6TEk9olHDnvWg1o1fypHyfOANY4hybSh3gpD1NSTLejGGS4vezarfRyU7~sCSggqc6DnrDbxRtWfynDTdxBEbYbqxzB4ZapoRXUOwT0JrgQysM4SJ5GFSojXvXUhvog__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058756"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058756/Non_Conscious_Processing_of_Motion_Coherence_Can_Boost_Conscious_Access"><img alt="Research paper thumbnail of Non-Conscious Processing of Motion Coherence Can Boost Conscious Access" class="work-thumbnail" src="https://attachments.academia-assets.com/49512580/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058756/Non_Conscious_Processing_of_Motion_Coherence_Can_Boost_Conscious_Access">Non-Conscious Processing of Motion Coherence Can Boost Conscious Access</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/AngelikaLingnau">Angelika Lingnau</a></span></div><div class="wp-workCard_item"><span>PLoS ONE</span><span>, 2013</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Research on the scope and limits of non-conscious vision can advance our understanding of the fun...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Research on the scope and limits of non-conscious vision can advance our understanding of the functional and neural underpinnings of visual awareness. Here we investigated whether distributed local features can be bound, outside of awareness, into coherent patterns. We used continuous flash suppression (CFS) to create interocular suppression, and thus lack of awareness, for a moving dot stimulus that varied in terms of coherence with an overall pattern (radial flow). Our results demonstrate that for radial motion, coherence favors the detection of patterns of moving dots even under interocular suppression. Coherence caused dots to break through the masks more often: this indicates that the visual system was able to integrate low-level motion signals into a coherent pattern outside of visual awareness. In contrast, in an experiment using meaningful or scrambled biological motion we did not observe any increase in the sensitivity of detection for meaningful patterns. Overall, our results are in agreement with previous studies on face processing and with the hypothesis that certain features are spatiotemporally bound into coherent patterns even outside of attention or awareness. Citation: Kaunitz L, Fracasso A, Lingnau A, Melcher D (2013) Non-Conscious Processing of Motion Coherence Can Boost Conscious Access. PLoS ONE 8(4): e60787.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="b8727acfebd696e200045642e2220e9f" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512580,&quot;asset_id&quot;:29058756,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512580/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058756"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058756"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058756; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058756]").text(description); $(".js-view-count[data-work-id=29058756]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058756; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058756']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "b8727acfebd696e200045642e2220e9f" } } $('.js-work-strip[data-work-id=29058756]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058756,"title":"Non-Conscious Processing of Motion Coherence Can Boost Conscious Access","internal_url":"https://www.academia.edu/29058756/Non_Conscious_Processing_of_Motion_Coherence_Can_Boost_Conscious_Access","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512580,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512580/thumbnails/1.jpg","file_name":"Non-Conscious_Processing_of_Motion_Coher20161010-18908-115obkn.pdf","download_url":"https://www.academia.edu/attachments/49512580/download_file","bulk_download_file_name":"Non_Conscious_Processing_of_Motion_Coher.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512580/Non-Conscious_Processing_of_Motion_Coher20161010-18908-115obkn-libre.pdf?1476136384=\u0026response-content-disposition=attachment%3B+filename%3DNon_Conscious_Processing_of_Motion_Coher.pdf\u0026Expires=1740881336\u0026Signature=cXX3shtNklIwmEw5pLXWe0bEiy6VG~hhqDpTgl9~XMd-ZibE-w1AiYumtlqhOQJAvERwWm~VcKYVpKKZCa-nFukIkhO0A8NjK-IhR5xHiUEWSXYTFkPwo15eiESn0BXUV1sV3C~GYzdillI8B6qqkzi1PB3OP7hT43QrVYuMbcL9kelwjwlv0vnTCSQVycHGRUbY7PDO~gV3LXN9QXpa6GFcMyDk~vJAg5xTZsCOp~30bi5bA0wQLxtq7KqOSx5n38bICwUpOCPWpFKiX3qfH52dmpB2na1x8OX16IOwdco16ljReKKXpSN3qfifUL0ra7I6coj5R~oT~sjsMq1eog__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058760"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058760/Subthreshold_features_of_visual_objects_Unseen_but_not_unbound"><img alt="Research paper thumbnail of Subthreshold features of visual objects: Unseen but not unbound" class="work-thumbnail" src="https://attachments.academia-assets.com/49512564/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058760/Subthreshold_features_of_visual_objects_Unseen_but_not_unbound">Subthreshold features of visual objects: Unseen but not unbound</a></div><div class="wp-workCard_item"><span>Vision Research</span><span>, 2006</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The object is a basic unit that is thought to organize the way in which we perceive and think abo...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The object is a basic unit that is thought to organize the way in which we perceive and think about the world. According to theories of object-based attention, perception of uniWed objects depends on the binding together of the disparate features of each object via attention.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="d4adfb8a37f1ed0ac83721ba5bd542c7" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512564,&quot;asset_id&quot;:29058760,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512564/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058760"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058760"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058760; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058760]").text(description); $(".js-view-count[data-work-id=29058760]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058760; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058760']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "d4adfb8a37f1ed0ac83721ba5bd542c7" } } $('.js-work-strip[data-work-id=29058760]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058760,"title":"Subthreshold features of visual objects: Unseen but not unbound","internal_url":"https://www.academia.edu/29058760/Subthreshold_features_of_visual_objects_Unseen_but_not_unbound","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512564,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512564/thumbnails/1.jpg","file_name":"Subthreshold_features_of_visual_objects_20161010-18905-1bpm82g.pdf","download_url":"https://www.academia.edu/attachments/49512564/download_file","bulk_download_file_name":"Subthreshold_features_of_visual_objects.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512564/Subthreshold_features_of_visual_objects_20161010-18905-1bpm82g-libre.pdf?1476136404=\u0026response-content-disposition=attachment%3B+filename%3DSubthreshold_features_of_visual_objects.pdf\u0026Expires=1740881336\u0026Signature=BgAYS5MstyEBZ-jLyedT9YAdNvi9s9kvvtlaeqQiRjhsxoh1LUY8CkRFlGrsadoaLKRJNu4ZdgdF2X5~3NqV~vr4qTWQkq5yNxK6jfOgopixeYfjrCBZMTBUZsw~E2g7g2kELJYeNkLRfzRaDljC4NEJHw4MD1vppGT7Dtgc70DkjlQsue33JWnyT8UZql~mHyJ1DXTsaSvP4XsA1dAhTsPDNz4bvX3EvooPmVvkmI1kCzHzAnwpVWsIHBuVKT5xLmBpsdaUWtWZWzxaP~smvAdvpSn8wjzcR7PG9kY9xU5W7LXE6CyiJFY-n1OpF~HcK~m7NXTEkSCGIUnZdI3B6g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058768"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058768/Waves_of_visibility_probing_the_depth_of_inter_ocular_suppression_with_transient_and_sustained_targets"><img alt="Research paper thumbnail of Waves of visibility: probing the depth of inter-ocular suppression with transient and sustained targets" class="work-thumbnail" src="https://attachments.academia-assets.com/49512565/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058768/Waves_of_visibility_probing_the_depth_of_inter_ocular_suppression_with_transient_and_sustained_targets">Waves of visibility: probing the depth of inter-ocular suppression with transient and sustained targets</a></div><div class="wp-workCard_item"><span>Frontiers in Psychology</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In order to study non-conscious visual processing, researchers render otherwise consciously perce...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In order to study non-conscious visual processing, researchers render otherwise consciously perceived images into invisible stimuli. Through the years, several psychophysical techniques have been developed for this purpose. Yet the comparison of experimental results across techniques remains a difficult task as the depth of suppression depends on the interactions between the type of stimuli and the suppression methods employed. This poses a limit to the inferences that researchers make about the extent of non-conscious processes. We investigated the mechanisms underlying inter-ocular suppression during continuous flash suppression (CFS) and dichoptic visual masking using a transient onset target stimulus and a variety of stimulus/mask temporal manipulations. We show that target duration, timing of target onset, and mask frequency are key aspects of inter-ocular suppression during CFS with transient targets. The differences between our results and sustained target CFS studies suggest that two distinct mechanisms are involved in the detection of transient and prolonged target stimuli during CFS. Our results provide insight into the dynamics of CFS together with evidence for similarities between transient target CFS and dichoptic visual masking.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="894c4267a68fa5ed3d73cd93b1bc74fb" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512565,&quot;asset_id&quot;:29058768,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512565/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058768"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058768"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058768; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058768]").text(description); $(".js-view-count[data-work-id=29058768]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058768; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058768']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "894c4267a68fa5ed3d73cd93b1bc74fb" } } $('.js-work-strip[data-work-id=29058768]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058768,"title":"Waves of visibility: probing the depth of inter-ocular suppression with transient and sustained targets","internal_url":"https://www.academia.edu/29058768/Waves_of_visibility_probing_the_depth_of_inter_ocular_suppression_with_transient_and_sustained_targets","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512565,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512565/thumbnails/1.jpg","file_name":"fpsyg-05-00804.pdf","download_url":"https://www.academia.edu/attachments/49512565/download_file","bulk_download_file_name":"Waves_of_visibility_probing_the_depth_of.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512565/fpsyg-05-00804-libre.pdf?1476136406=\u0026response-content-disposition=attachment%3B+filename%3DWaves_of_visibility_probing_the_depth_of.pdf\u0026Expires=1740881336\u0026Signature=fOjamlBf3UTd4UW9TZAylsw5ddn1ZMvZSGHoBR3pyjZvLZzr6zmdBCJRkMOYxGmNFes7QANe0yw0f-ZzsOPq82UXX5AGcKh8j3IBRXG1WXQMjJFauONJAwnMQPQam1f37eCE9Cpvyfl-OU4zpXa6ATXkKkH3dzuEexAu8kdyGy~drQPvaBB~acp7WvqKzxy3yMhry3hVlfxMxi0jtmF7C33EdRusXYxEWHoVbYxcvf9Hk00mg44Rgw-pFZ4lT9z3U2n9b99h59VwXHCtcvBukNP~vCei9S~6g7m0BxvNaISk3gzkR0fwR~E-OujOl9mZVeiDqlpGuPSz1ykERuxBUA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058770"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058770/Strength_and_coherence_of_binocular_rivalry_depends_on_shared_stimulus_complexity"><img alt="Research paper thumbnail of Strength and coherence of binocular rivalry depends on shared stimulus complexity" class="work-thumbnail" src="https://attachments.academia-assets.com/49512581/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058770/Strength_and_coherence_of_binocular_rivalry_depends_on_shared_stimulus_complexity">Strength and coherence of binocular rivalry depends on shared stimulus complexity</a></div><div class="wp-workCard_item"><span>Vision research</span><span>, 2007</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Presenting incompatible images to the eyes results in alternations of conscious perception, a phe...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Presenting incompatible images to the eyes results in alternations of conscious perception, a phenomenon known as binocular rivalry. We examined rivalry using either simple stimuli (oriented gratings) or coherent visual objects (faces, houses etc). Two rivalry characteristics were measured: Depth of rivalry suppression and coherence of alternations. Rivalry between coherent visual objects exhibits deep suppression and coherent rivalry, whereas rivalry between gratings exhibits shallow suppression and piecemeal rivalry. Interestingly, rivalry between a simple and a complex stimulus displays the same characteristics (shallow and piecemeal) as rivalry between two simple stimuli. Thus, complex stimuli fail to rival globally unless the fellow stimulus is also global. We also conducted a face adaptation experiment. Adaptation to rivaling faces improved subsequent face discrimination (as expected), but adaptation to a rivaling face/grating pair did not. To explain this, we suggest rivalry ...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="35611471c247e5550b36488576780467" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512581,&quot;asset_id&quot;:29058770,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512581/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058770"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058770"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058770; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058770]").text(description); $(".js-view-count[data-work-id=29058770]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058770; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058770']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "35611471c247e5550b36488576780467" } } $('.js-work-strip[data-work-id=29058770]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058770,"title":"Strength and coherence of binocular rivalry depends on shared stimulus complexity","internal_url":"https://www.academia.edu/29058770/Strength_and_coherence_of_binocular_rivalry_depends_on_shared_stimulus_complexity","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512581,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512581/thumbnails/1.jpg","file_name":"Strength_and_coherence_of_binocular_riva20161010-18908-1vekkph.pdf","download_url":"https://www.academia.edu/attachments/49512581/download_file","bulk_download_file_name":"Strength_and_coherence_of_binocular_riva.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512581/Strength_and_coherence_of_binocular_riva20161010-18908-1vekkph-libre.pdf?1476136393=\u0026response-content-disposition=attachment%3B+filename%3DStrength_and_coherence_of_binocular_riva.pdf\u0026Expires=1740881336\u0026Signature=MZ06X6gPGUoVKqQbckr7ilBLUsiXDGaqmRt~-4edp80qFGhW8MYxCLYNSh90EAYVxbcqr8DYS~dR~hCRlJANkz0Saz7bb-3QAg0~P6XWnfgDppuSKqY7Qsvoy85bB72k-zI1zP6rzORNqrfmEXz1wcUPS-7PkF0~2CC-m8iTKn8o44Q~Ve8LmTfXE4N61mpK583nditGFx5lWuih4Z6xHnenDUG1Q4ak9bbJEXmYH10eKdCRwc9sQ-Zgjv8vMQPsek8jjq6Ql932AEqA3JB2OwZw6zYcXIGCxlZvQ-yX~C4aFD6NlNKLff3MHykn8Ggo5o4Cx9xHvGNlPQUg2PPhxg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058784"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058784/Continuous_flash_suppression_effectiveness_depends_on_mask_temporal_frequency"><img alt="Research paper thumbnail of Continuous flash suppression effectiveness depends on mask temporal frequency" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058784/Continuous_flash_suppression_effectiveness_depends_on_mask_temporal_frequency">Continuous flash suppression effectiveness depends on mask temporal frequency</a></div><div class="wp-workCard_item"><span>Journal of Vision</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">A variant of binocular rivalry called Continuous Flash Suppression (CFS) (Tsuchiya &amp;amp;amp;amp;a...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">A variant of binocular rivalry called Continuous Flash Suppression (CFS) (Tsuchiya &amp;amp;amp;amp;amp;amp;amp;amp;amp; Koch, 2005) has become a popular tool for investigating visual processing outside of conscious awareness (Yang, Brascamp, Kang, &amp;amp;amp;amp;amp;amp;amp;amp;amp; Blake, 2014). In a CFS paradigm, a series of different Mondrian patterns is flashed to one eye at a steady rate, suppressing awareness of the image presented to the other eye (Tsuchiya, Koch, Gilroy, &amp;amp;amp;amp;amp;amp;amp;amp;amp; Blake, 2006). In most studies using CFS the temporal frequency for the mask images is set to 10 (Tsuchiya &amp;amp;amp;amp;amp;amp;amp;amp;amp; Koch, 2005; Tsuchiya et al., 2006) or 20 (Jiang et al., 2009) Hz. To date, little is known about the precise relationship between masking effectiveness and temporal masking frequency. Given the role of temporal factors in many theories of visual awareness, such as phase coupling of neural oscillations across brain regions or re-entrant processing, we investigated the suppression effectiveness of a wide range of masking frequencies (0-32Hz). In a breakthrough CFS paradigm, participants reported whether an image (a face or house) was presented on each trial while Mondrian-like textures were presented as masks. In condition 1, trials with all different frequencies occurred in random order while in condition 2, trials with the same masking frequency were grouped in blocks.   We found that the response times differed dramatically between temporal masking frequencies, with mask effectiveness following a log-normal curve peaking around 6Hz in both conditions. The static mask (0 Hz: traditional binocular rivalry) yielded similar breakthrough times as higher frequencies of CFS. In practical terms, these results show that, the 10 Hz/20 Hz frequencies used in most CFS studies may not be optimally effective. More generally, these findings support the idea that temporal factors play a critical role in perceptual awareness. Meeting abstract presented at VSS 2015.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058784"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058784"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058784; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058784]").text(description); $(".js-view-count[data-work-id=29058784]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058784; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058784']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058784]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058784,"title":"Continuous flash suppression effectiveness depends on mask temporal frequency","internal_url":"https://www.academia.edu/29058784/Continuous_flash_suppression_effectiveness_depends_on_mask_temporal_frequency","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="8625245" id="artneuroscience"><div class="js-work-strip profile--work_container" data-work-id="29058765"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058765/The_visual_system_as_a_constraint_on_the_survival_and_success_of_specific_artworks"><img alt="Research paper thumbnail of The visual system as a constraint on the survival and success of specific artworks" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058765/The_visual_system_as_a_constraint_on_the_survival_and_success_of_specific_artworks">The visual system as a constraint on the survival and success of specific artworks</a></div><div class="wp-workCard_item"><span>Spatial Vision</span><span>, 2008</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Why should vision science turn its gaze towards artworks? One possibility is that understanding v...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Why should vision science turn its gaze towards artworks? One possibility is that understanding visual processing might yield some fundamental insight into the nature of art. However, there are many examples of phenomena that can be seen - such as automobiles, clouds or leaves - but which are not explained in any deep sense by the properties of human visual perception. We examine one art historical question that might benefit from knowledge about the visual system: why do some artworks &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39;survive&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39; historically while others fade into the dustbin of time? One possible reason, suggested by studies of rapid visual categorization, is that some objects are recognized more quickly and easily than others and thus are less culturally specific in terms of pictorial representation. A second, related, explanation is that many artistic techniques use the eyes as a channel to evoke other senses, cognition, emotions and the motor system. &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39;Art&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39; is a social and historical construct - after all, the concept of &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39;fine art&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39; was invented in the 18th century - and thus many aspects of artistic appreciation are specific to particular cultural and historical contexts. Some great works, however, may be adopted by successive generations because of an ability to appeal to a shared perceptual system.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058765"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058765"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058765; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058765]").text(description); $(".js-view-count[data-work-id=29058765]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058765; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058765']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058765]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058765,"title":"The visual system as a constraint on the survival and success of specific artworks","internal_url":"https://www.academia.edu/29058765/The_visual_system_as_a_constraint_on_the_survival_and_success_of_specific_artworks","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058755"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058755/A_moments_monument_the_central_vision_of_Italian_sculptor_Medardo_Rosso_1858_1928_"><img alt="Research paper thumbnail of A moment&#39;s monument&quot;: the central vision of Italian sculptor Medardo Rosso (1858 - 1928)" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058755/A_moments_monument_the_central_vision_of_Italian_sculptor_Medardo_Rosso_1858_1928_">A moment&#39;s monument&quot;: the central vision of Italian sculptor Medardo Rosso (1858 - 1928)</a></div><div class="wp-workCard_item"><span>Perception</span><span>, 2003</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">One of the basic limitations on visual perception is that it is impossible, in any given moment, ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">One of the basic limitations on visual perception is that it is impossible, in any given moment, to see the world sharply and full of colors beyond the central area of the visual field. This fact was popularized and brought to the attention of artists in the nineteenth century. To accurately represent the &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39;impression&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39;, or vision of a single glance, an artistic work should contain only a central area in focus surrounded by a progressively greater blur. The work of the Italian sculptor Medardo Rosso (1858-1928) may be the first artistic representation of differences in central and peripheral acuity. Despite using the medium of sculpture, typically three-dimensional, Rosso conceived of his art as two-dimensional because in a given moment it is possible to view a scene from only one viewpoint. The analysis of Rosso&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39;s photographs of his own sculptures emphasizes the areas of detail and relative blur, allowing a reconstruction of his point de vue unique--where the observer should stand when viewing that specific sculpture. We argue that the role of central and peripheral vision in subjective perception is critical to understanding the work of Rosso, aptly defined by critics as monument d&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;#39;un instant.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058755"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058755"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058755; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058755]").text(description); $(".js-view-count[data-work-id=29058755]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058755; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058755']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058755]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058755,"title":"A moment's monument\": the central vision of Italian sculptor Medardo Rosso (1858 - 1928)","internal_url":"https://www.academia.edu/29058755/A_moments_monument_the_central_vision_of_Italian_sculptor_Medardo_Rosso_1858_1928_","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058766"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058766/Perception_of_emotion_in_abstract_artworks"><img alt="Research paper thumbnail of Perception of emotion in abstract artworks" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058766/Perception_of_emotion_in_abstract_artworks">Perception of emotion in abstract artworks</a></div><div class="wp-workCard_item"><span>Progress in Brain Research</span><span>, 2013</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">There is a long-standing and fundamental debate regarding how emotion can be expressed by fine ar...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">There is a long-standing and fundamental debate regarding how emotion can be expressed by fine art. Some artists and theorists have claimed that certain features of paintings, such as color, line, form, and composition, can consistently express an &amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot;objective&amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;amp;quot; emotion, while others have argued that emotion perception is subjective and depends more on expertise of the observer. Here, we discuss two studies in which we have found evidence for consistency in observer ratings of emotion for abstract artworks. We have developed a stimulus set of abstract art images to test emotional priming, both between different painting images and between paintings and faces. The ratings were also used in a computational vision analysis of the visual features underlying emotion expression. Overall, these findings suggest that there is a strong bottom-up and objective aspect to perception of emotion in abstract artworks that may tap into basic visual mechanisms.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058766"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058766"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058766; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058766]").text(description); $(".js-view-count[data-work-id=29058766]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058766; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058766']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058766]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058766,"title":"Perception of emotion in abstract artworks","internal_url":"https://www.academia.edu/29058766/Perception_of_emotion_in_abstract_artworks","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058785"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/29058785/The_Influence_of_Art_Expertise_and_Training_on_Emotion_and_Preference_Ratings_for_Representational_and_Abstract_Artworks"><img alt="Research paper thumbnail of The Influence of Art Expertise and Training on Emotion and Preference Ratings for Representational and Abstract Artworks" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/29058785/The_Influence_of_Art_Expertise_and_Training_on_Emotion_and_Preference_Ratings_for_Representational_and_Abstract_Artworks">The Influence of Art Expertise and Training on Emotion and Preference Ratings for Representational and Abstract Artworks</a></div><div class="wp-workCard_item"><span>PloS one</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Across cultures and throughout recorded history, humans have produced visual art. This raises the...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Across cultures and throughout recorded history, humans have produced visual art. This raises the question of why people report such an emotional response to artworks and find some works more beautiful or compelling than others. In the current study we investigated the interplay between art expertise, and emotional and preference judgments. Sixty participants (40 novices, 20 art experts) rated a set of 150 abstract artworks and portraits during two occasions: in a laboratory setting and in a museum. Before commencing their second session, half of the art novices received a brief training on stylistic and art historical aspects of abstract art and portraiture. Results showed that art experts rated the artworks higher than novices on aesthetic facets (beauty and wanting), but no group differences were observed on affective evaluations (valence and arousal). The training session made a small effect on ratings of preference compared to the non-trained group of novices. Overall, these fi...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058785"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058785"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058785; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058785]").text(description); $(".js-view-count[data-work-id=29058785]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058785; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058785']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=29058785]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058785,"title":"The Influence of Art Expertise and Training on Emotion and Preference Ratings for Representational and Abstract Artworks","internal_url":"https://www.academia.edu/29058785/The_Influence_of_Art_Expertise_and_Training_on_Emotion_and_Preference_Ratings_for_Representational_and_Abstract_Artworks","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="961682" id="otherpapers"><div class="js-work-strip profile--work_container" data-work-id="37510916"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/37510916/The_effect_of_linguistic_and_visual_salience_in_visual_world_studies"><img alt="Research paper thumbnail of The effect of linguistic and visual salience in visual world studies" class="work-thumbnail" src="https://attachments.academia-assets.com/57483099/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/37510916/The_effect_of_linguistic_and_visual_salience_in_visual_world_studies">The effect of linguistic and visual salience in visual world studies</a></div><div class="wp-workCard_item"><span>Frontiers in Psychology</span><span>, 2014</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Research using the visual world paradigm has demonstrated that visual input has a rapid effect on...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Research using the visual world paradigm has demonstrated that visual input has a rapid effect on language interpretation tasks such as reference resolution and, conversely, that linguistic material-including verbs, prepositions and adjectives-can influence fixations to potential referents. More recent research has started to explore how this effect of linguistic input on fixations is mediated by properties of the visual stimulus, in particular by visual salience. In the present study we further explored the role of salience in the visual world paradigm manipulating language-driven salience and visual salience. Specifically, we tested how linguistic salience (i.e., the greater accessibility of linguistically introduced entities) and visual salience (bottom-up attention grabbing visual aspects) interact. We recorded participants&#39; eye-movements during a MapTask, asking them to look from landmark to landmark displayed upon a map while hearing direction-giving instructions. The landmarks were of comparable size and color, except in the Visual Salience condition, in which one landmark had been made more visually salient. In the Linguistic Salience conditions, the instructions included references to an object not on the map. Response times and fixations were recorded. Visual Salience influenced the time course of fixations at both the beginning and the end of the trial but did not show a significant effect on response times. Linguistic Salience reduced response times and increased fixations to landmarks when they were associated to a Linguistic Salient entity not present itself on the map. When the target landmark was both visually and linguistically salient, it was fixated longer, but fixations were quicker when the target item was linguistically salient only. Our results suggest that the two types of salience work in parallel and that linguistic salience affects fixations even when the entity is not visually present. The effect of linguistic and visual salience in visual world studies. Front. Psychol. 5:176.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="174f52cb13d972b534ac60453f6303f8" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:57483099,&quot;asset_id&quot;:37510916,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/57483099/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="37510916"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="37510916"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 37510916; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=37510916]").text(description); $(".js-view-count[data-work-id=37510916]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 37510916; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='37510916']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "174f52cb13d972b534ac60453f6303f8" } } $('.js-work-strip[data-work-id=37510916]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":37510916,"title":"The effect of linguistic and visual salience in visual world studies","internal_url":"https://www.academia.edu/37510916/The_effect_of_linguistic_and_visual_salience_in_visual_world_studies","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":57483099,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/57483099/thumbnails/1.jpg","file_name":"pdf.pdf","download_url":"https://www.academia.edu/attachments/57483099/download_file","bulk_download_file_name":"The_effect_of_linguistic_and_visual_sali.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/57483099/pdf-libre.pdf?1538425398=\u0026response-content-disposition=attachment%3B+filename%3DThe_effect_of_linguistic_and_visual_sali.pdf\u0026Expires=1740881336\u0026Signature=S96tG92hQaiApjcbJ99qCa2exb4yS-mSxoAuisfkUq05S4Zg55IGArOrTsjDD6oxDERf3bm0sG-Ev8Wj3rFPrj9G6JjlF10SJxix1MtcAJFstQT1-5ua8ZFeem8XkC9xnZH8Kcgej1LpifAC0Hgq7WSHEUfH~5DFSFEspavne7jYTIVMlDcwmiS7OleBQs77M-aNF2Ioacqw3R~sZiL1--Lchc2M9VvOgAWRWqUOujKJqcs5D7Jbp1ClBNVmOhxt3QY6Nx65d47LTJ9dLmIpZet8UWDgbQ1MgRjWNdI9BCIVEZDGyM4E-cNNkjIrPvj~4J0RbqmTiMLUVLiUTKYhag__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="27284332"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/27284332/PET_An_eye_tracking_dataset_for_animal_centric_Pascal_object_classes"><img alt="Research paper thumbnail of PET: An eye-tracking dataset for animal-centric Pascal object classes" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/27284332/PET_An_eye_tracking_dataset_for_animal_centric_Pascal_object_classes">PET: An eye-tracking dataset for animal-centric Pascal object classes</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/RamanathanSubramanian">Ramanathan Subramanian</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://nus.academia.edu/StefanWinkler">Stefan Winkler</a>, and <a class="" data-click-track="profile-work-strip-authors" href="https://unitn.academia.edu/DavidMelcher">David Melcher</a></span></div><div class="wp-workCard_item"><span>2015 IEEE International Conference on Multimedia and Expo (ICME)</span><span>, 2015</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="27284332"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="27284332"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 27284332; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=27284332]").text(description); $(".js-view-count[data-work-id=27284332]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 27284332; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='27284332']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=27284332]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":27284332,"title":"PET: An eye-tracking dataset for animal-centric Pascal object classes","internal_url":"https://www.academia.edu/27284332/PET_An_eye_tracking_dataset_for_animal_centric_Pascal_object_classes","owner_id":710774,"coauthors_can_edit":true,"owner":{"id":710774,"first_name":"Ramanathan","middle_initials":null,"last_name":"Subramanian","page_name":"RamanathanSubramanian","domain_name":"independent","created_at":"2011-09-05T19:13:47.822-07:00","display_name":"Ramanathan Subramanian","url":"https://independent.academia.edu/RamanathanSubramanian"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="29058776"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/29058776/Shapes_surfaces_and_saccades"><img alt="Research paper thumbnail of Shapes, surfaces and saccades" class="work-thumbnail" src="https://attachments.academia-assets.com/49512579/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/29058776/Shapes_surfaces_and_saccades">Shapes, surfaces and saccades</a></div><div class="wp-workCard_item"><span>Vision research</span><span>, 1999</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Saccadic localization of spatially extended objects requires the computation of a single saccadic...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Saccadic localization of spatially extended objects requires the computation of a single saccadic landing position. What representation of the target guides saccades? Saccades were examined for various targets composed of dots to determine whether landing position corresponded to the center-of-gravity (average location) of the dots, the center-of-area of the shape, or the symmetric axis. Targets were composed of dots configured as outline drawings of circles, ellipses, cardioids, wiggly lines, or amorphous blobs. In some cases, dot spacing was varied, extraneous dot clusters were superimposed, or different distributions of dots inside the boundary were added. Quasi-random dot clusters without a well-defined contour were also studied. Instructions were to look at the target as a whole, and keep latency long enough to avoid compromising accuracy. Saccades landed with a high level of precision (S.D.s 7-10% of target eccentricity) near the center-of-area of the target shape, rather than...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="1870c14dbf20063b76447d55ca7b6258" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:49512579,&quot;asset_id&quot;:29058776,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/49512579/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="29058776"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="29058776"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29058776; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29058776]").text(description); $(".js-view-count[data-work-id=29058776]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29058776; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='29058776']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "1870c14dbf20063b76447d55ca7b6258" } } $('.js-work-strip[data-work-id=29058776]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":29058776,"title":"Shapes, surfaces and saccades","internal_url":"https://www.academia.edu/29058776/Shapes_surfaces_and_saccades","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[{"id":49512579,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/49512579/thumbnails/1.jpg","file_name":"Shapes_surfaces_and_saccades20161010-27090-1qdn2br.pdf","download_url":"https://www.academia.edu/attachments/49512579/download_file","bulk_download_file_name":"Shapes_surfaces_and_saccades.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/49512579/Shapes_surfaces_and_saccades20161010-27090-1qdn2br-libre.pdf?1476136388=\u0026response-content-disposition=attachment%3B+filename%3DShapes_surfaces_and_saccades.pdf\u0026Expires=1740881336\u0026Signature=HPRyrplco~WwWmgTl6X2ZQdcsC0CZrH26sPauo6~69h4~53vSdYmrc5Rq6S~QB87ZSaeBkzdQ5ooClVAdLkeEuw81zM4iNr7mWiKzZyBuOclC3eKT88LQFnZlKWZmg0kHHrAEIyBdGjGav-Hm-2j437dYhqEt6Iy0R9hIcl1hZ-YQe0TmjUeRa~RQFMgURQmfd02SlUQbM7YVk4vV3~frWOcC0GTlkwgxNYgrsJY58L9dnlWTvQE3Swa2dZJd5nYimJirtrkVMg4uknCncbj3xeGUXi8oIguStSwCegKidsceyfo8tcT~hT~Qr0gCfUaUjWkwSlDJVI2q1qt1VnO5g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="11735096" id="papers"><div class="js-work-strip profile--work_container" data-work-id="100090952"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090952/Influence_of_bottom_up_saliency_level_on_performance_in_a_visual_WM_task"><img alt="Research paper thumbnail of Influence of bottom-up saliency level on performance in a visual WM task" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090952/Influence_of_bottom_up_saliency_level_on_performance_in_a_visual_WM_task">Influence of bottom-up saliency level on performance in a visual WM task</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;Proportion of correct responses for trials in which the bottom-up visual saliency of one...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;Proportion of correct responses for trials in which the bottom-up visual saliency of one item in the display was manipulated at different levels of visual contrast (also size in the case of the triangles). The number of items (set size) was held constant at three. Performance results for those trials in which the test stimulus is the salient item are distinguished from those in which the test stimulus is a non-salient item (denoted as “other” in the figure) to assess the influence of saliency level on behavioural performance. Error bars show one standard error of the mean. Adapted with permission from Melcher and Piazza &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;[19]&amp;lt;/a&amp;gt;.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090952"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090952"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090952; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090952]").text(description); $(".js-view-count[data-work-id=100090952]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090952; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090952']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090952]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090952,"title":"Influence of bottom-up saliency level on performance in a visual WM task","internal_url":"https://www.academia.edu/100090952/Influence_of_bottom_up_saliency_level_on_performance_in_a_visual_WM_task","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100090951"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090951/Performance_dependence_on_stimulation_amplitude_at_baseline_level"><img alt="Research paper thumbnail of Performance dependence on stimulation amplitude at baseline level" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090951/Performance_dependence_on_stimulation_amplitude_at_baseline_level">Performance dependence on stimulation amplitude at baseline level</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;The stimulation amplitude at baseline level is a limiting factor of &amp;lt;i&amp;gt;e&amp;lt;/i&amp;gt;...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;The stimulation amplitude at baseline level is a limiting factor of &amp;lt;i&amp;gt;e&amp;lt;/i&amp;gt;WMC. Not only a sufficiently large value is necessary to reach the full WM capacity of the network but also the absolute difference between and has different effects depending on the baseline level . &amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; Performance predicted by the model in a change detection task akin to the experiments in &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;[19]&amp;lt;/a&amp;gt; as a function of the saliency level. In the model, visual saliency is introduced by means of an upregulation of the neuronal responses to the item. To this end, different levels of additional external stimulation are considered:  = 60, 70, 80, 100, 120 and 140 Hz for set size 3. &amp;lt;b&amp;gt;B&amp;lt;/b&amp;gt; Performance predicted by the model in a change detection task akin to the experiments in &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;[19]&amp;lt;/a&amp;gt; as a function of set size. The pool selective to the salient stimulus receives an external stimulation  = 100 Hz whereas the pools that are not selective to the salient stimulus but are stimulated receive an external stimulation  = 60 Hz during the 200 ms stimulation period. This corresponds, in fact, to a particular working point of the graph in &amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; for a varying set size. In this case, the salient item is always maintained in working memory throughout the delay period but random performance is obtained for non-salient items, thus suggesting a winner salient item in a winner-take-all network. In both plots, the results are assessed separately for the trials in which the target item corresponds to the salient item and those which do not coincide with the salient item (i.e. other). The network parameters employed in these simulations are those shown in &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-t001&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-t001&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;Table 1&amp;lt;/a&amp;gt;.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090951"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090951"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090951; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090951]").text(description); $(".js-view-count[data-work-id=100090951]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090951; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090951']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090951]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090951,"title":"Performance dependence on stimulation amplitude at baseline level","internal_url":"https://www.academia.edu/100090951/Performance_dependence_on_stimulation_amplitude_at_baseline_level","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100090950"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090950/Firing_rates_predicted_by_the_model_for_smaller_network_sizes"><img alt="Research paper thumbnail of Firing rates predicted by the model for smaller network sizes" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090950/Firing_rates_predicted_by_the_model_for_smaller_network_sizes">Firing rates predicted by the model for smaller network sizes</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;Results from the simulations of the full spiking model with 2500 neurons in four differe...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;Results from the simulations of the full spiking model with 2500 neurons in four different trials in the baseline condition with no visual saliency and four pools (pool 1, pool 3, pool 5 and pool 7) receiving an external stimulation  = 60 Hz. The results illustrate how smaller networks present larger finite size noise, which may lead to spontaneous memory losses throughout the delay period, and thus, reduce the &amp;lt;i&amp;gt;e&amp;lt;/i&amp;gt;WMC of the system.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090950"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090950"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090950; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090950]").text(description); $(".js-view-count[data-work-id=100090950]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090950; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090950']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090950]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090950,"title":"Firing rates predicted by the model for smaller network sizes","internal_url":"https://www.academia.edu/100090950/Firing_rates_predicted_by_the_model_for_smaller_network_sizes","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100090949"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090949/Model_prediction_of_saliency_influence_on_proportion_correct_in_a_visual_WM_task"><img alt="Research paper thumbnail of Model prediction of saliency influence on proportion correct in a visual WM task" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090949/Model_prediction_of_saliency_influence_on_proportion_correct_in_a_visual_WM_task">Model prediction of saliency influence on proportion correct in a visual WM task</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;&amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; Performance predicted by the model in a change detection task akin ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;&amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; Performance predicted by the model in a change detection task akin to the experiments in &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;[19]&amp;lt;/a&amp;gt; (also see &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-g011&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-g011&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;Fig. 11&amp;lt;/a&amp;gt;) as a function of set size. The pool selective to the salient stimulus receives an external stimulation  = 120 Hz whereas the pools that are not selective to the salient stimulus but are stimulated receive an external stimulation  = 80 Hz during the 200 ms stimulation period. &amp;lt;b&amp;gt;B&amp;lt;/b&amp;gt; Performance predicted by the model in a change detection task akin to the experiments in &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;[19]&amp;lt;/a&amp;gt; (also see &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-g012&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-g012&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;Fig. 12&amp;lt;/a&amp;gt;) as a function of the saliency level. In the model, visual saliency is introduced by means of an upregulation of the neuronal responses to the item. To this end, different levels of additional external stimulation are considered:  = 80, 90, 100, 120, 140 and 160 Hz for set size 3. In both cases, the results are assessed separately for the trials in which the target item corresponds to the salient item and those which do not coincide with the salient item (i.e. other). The network parameters employed in these simulations are those shown in &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-t001&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-t001&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;Table 1&amp;lt;/a&amp;gt;. The results suggest a good qualitative conformance with the experimental results shown in &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-g011&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-g011&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;Fig. 11&amp;lt;/a&amp;gt; and &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-g012&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-g012&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;Fig. 12&amp;lt;/a&amp;gt;. It is worth noting that no specific tuning of the network parameters has been sought to reproduce such results. The same parameters used in the previous studies, which reproduce the main results from the available literature, have also been used in this case.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090949"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090949"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090949; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090949]").text(description); $(".js-view-count[data-work-id=100090949]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090949; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090949']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090949]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090949,"title":"Model prediction of saliency influence on proportion correct in a visual WM task","internal_url":"https://www.academia.edu/100090949/Model_prediction_of_saliency_influence_on_proportion_correct_in_a_visual_WM_task","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100090948"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090948/Architecture_of_the_cortical_network_model"><img alt="Research paper thumbnail of Architecture of the cortical network model" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090948/Architecture_of_the_cortical_network_model">Architecture of the cortical network model</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;The population of excitatory neurons is subdivided in non-overlapping populations select...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;The population of excitatory neurons is subdivided in non-overlapping populations selective to 8 different stimuli. Black and blue arrows within pyramidal cells: NMDA and AMPA-mediated recurrent excitatory connections. Black arrows from other areas: AMPA-mediated external excitatory connections. Red circle-headed arrows: GABA-mediated inhibitory connections. There are three possible synaptic strengths for recurrent excitatory connections: potentiated (by a relative factor , black arrows), depressed (by a relative factor , light blue arrows), and unchanged (baseline level , dark blue arrows). The weight denotes the strength of inhibitory-to-excitatory and inhibitory-to-inhibitory connections. The dots stand for the missing , …, populations and their corresponding connections.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090948"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090948"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090948; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090948]").text(description); $(".js-view-count[data-work-id=100090948]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090948; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090948']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090948]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090948,"title":"Architecture of the cortical network model","internal_url":"https://www.academia.edu/100090948/Architecture_of_the_cortical_network_model","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100090947"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090947/Model_based_prediction_of_performance_for_different_levels_of_saliency"><img alt="Research paper thumbnail of Model-based prediction of performance for different levels of saliency" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090947/Model_based_prediction_of_performance_for_different_levels_of_saliency">Model-based prediction of performance for different levels of saliency</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;Model-based prediction of performance derived from computational simulations of a change...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;Model-based prediction of performance derived from computational simulations of a change detection task with selective neural assemblies ( axis) simultaneously stimulated. Performance is calculated by assuming that an item is held in visual WM when its associated selective pool shows a mean persistent activity 20 Hz during the last 300 ms of the delay period. selective pools are stimulated at  = 60 Hz and the remaining pool receives a higher stimulation  = 60 Hz (no saliency), 70 Hz, 80 Hz, 100 Hz, 150 Hz, and 200 Hz. &amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; Performance calculated as (&amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719.e303&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719.e303&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;Eq. 7&amp;lt;/a&amp;gt;), and &amp;lt;b&amp;gt;B&amp;lt;/b&amp;gt; performance calculated as (&amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719.e305&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719.e305&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;Eq. 8&amp;lt;/a&amp;gt;). For both proposed performance estimates, performance decreases for larger set sizes and for larger saliency levels.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090947"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090947"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090947; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090947]").text(description); $(".js-view-count[data-work-id=100090947]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090947; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090947']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090947]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090947,"title":"Model-based prediction of performance for different levels of saliency","internal_url":"https://www.academia.edu/100090947/Model_based_prediction_of_performance_for_different_levels_of_saliency","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100090946"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090946/Model_predictions_of_firing_rates"><img alt="Research paper thumbnail of Model predictions of firing rates" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090946/Model_predictions_of_firing_rates">Model predictions of firing rates</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;Illustration of three possible neuronal behaviours observed when 3 pools are simultaneou...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;Illustration of three possible neuronal behaviours observed when 3 pools are simultaneously stimulated. The stimulation period is represented by means of a thick back line. Noise introduces a stochastic component which is translated into a distribution of possible neuronal behaviours. &amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; Trials in which no saliency effects are present and the three pools selective to the visual items in the display (pool 1, pool 3 and pool 5) receive an intensity  = 80 Hz, and &amp;lt;b&amp;gt;B&amp;lt;/b&amp;gt; trials in which one of the three items in the visual display is salient and the pool selective to such item (pool 1) receives an external current  = 120 Hz instead of  = 80 Hz. The response of the pool selective to the salient stimulus always reaches an elevated firing rate which, in turn, recruits inhibition and may prevent other pools from reaching such elevated firing rates, and thus, reduces the &amp;lt;i&amp;gt;e&amp;lt;/i&amp;gt;WMC.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090946"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090946"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090946; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090946]").text(description); $(".js-view-count[data-work-id=100090946]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090946; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090946']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090946]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090946,"title":"Model predictions of firing rates","internal_url":"https://www.academia.edu/100090946/Model_predictions_of_firing_rates","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100090945"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090945/Prediction_of_firing_rates"><img alt="Research paper thumbnail of Prediction of firing rates" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090945/Prediction_of_firing_rates">Prediction of firing rates</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;Results obtained from the network model with the parameters shown in &amp;lt;a href=&amp;quot;ht...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;Results obtained from the network model with the parameters shown in &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-t001&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-t001&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;Table 1&amp;lt;/a&amp;gt;. The stimulation period during which the external stimulation is administered to the network to emulate the presence of 4 items in the visual display is  = 500 ms and is depicted by a black segment. &amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; 4 pools (pool 1, pool 3, pool 5 and pool 7) receive an external stimulation  = 60 Hz and &amp;lt;b&amp;gt;B&amp;lt;/b&amp;gt; the pool selective to the salient item (pool 1) receives an external stimulation  = 100 Hz while the remaining three stimulated pools (pool 3, pool 5 and pool 7) receive only  = 60 Hz. As a consequence of the biased competition in the visual saliency condition, the pool selective to the salient item quickly reaches a state of high firing rate during the stimulation period while preventing others from accessing such a state. This leads to fewer items being appropriately encoded into the visual WM system and, thus, would reduce performance in those trials in which the test item is different from the salient one.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090945"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090945"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090945; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090945]").text(description); $(".js-view-count[data-work-id=100090945]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090945; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090945']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090945]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090945,"title":"Prediction of firing rates","internal_url":"https://www.academia.edu/100090945/Prediction_of_firing_rates","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100090944"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090944/Distribution_of_number_of_items_maintained_in_visual_WM_as_a_function_of_the_stimulation_period"><img alt="Research paper thumbnail of Distribution of number of items maintained in visual WM as a function of the stimulation period" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090944/Distribution_of_number_of_items_maintained_in_visual_WM_as_a_function_of_the_stimulation_period">Distribution of number of items maintained in visual WM as a function of the stimulation period</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;Histograms illustrating the percentage of trials in which multiple items show high activ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;Histograms illustrating the percentage of trials in which multiple items show high activity during the last 300 ms of the delay period for different stimulation periods: &amp;lt;b&amp;gt;A &amp;lt;/b&amp;gt; = 200 ms, &amp;lt;b&amp;gt;B &amp;lt;/b&amp;gt; = 500 ms, and &amp;lt;b&amp;gt;C &amp;lt;/b&amp;gt; = 1000 ms. The set size of the memory set is 4 in these experiments. Four pools receive an external stimulation  = 60 Hz in the no-saliency condition whereas the pool selective to the salient item receives an external stimulation  = 100 Hz (while the remaining three stimulated pools receive only  = 60 Hz) in the saliency condition. The network parameters employed in these simulations are those shown in &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-t001&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-t001&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;Table 1&amp;lt;/a&amp;gt;. The results illustrate that &amp;lt;i&amp;gt;e&amp;lt;/i&amp;gt;WMC increases when the stimulation period is increased and is reduced in the presence of saliency.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090944"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090944"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090944; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090944]").text(description); $(".js-view-count[data-work-id=100090944]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090944; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090944']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090944]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090944,"title":"Distribution of number of items maintained in visual WM as a function of the stimulation period","internal_url":"https://www.academia.edu/100090944/Distribution_of_number_of_items_maintained_in_visual_WM_as_a_function_of_the_stimulation_period","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100090943"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090943/RESEARCH_ARTICLE_The_Influence_of_Art_Expertise_and_Training_on_Emotion_and_Preference_Ratings_for_Representational_and_Abstract_Artworks"><img alt="Research paper thumbnail of RESEARCH ARTICLE The Influence of Art Expertise and Training on Emotion and Preference Ratings for Representational and Abstract Artworks" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090943/RESEARCH_ARTICLE_The_Influence_of_Art_Expertise_and_Training_on_Emotion_and_Preference_Ratings_for_Representational_and_Abstract_Artworks">RESEARCH ARTICLE The Influence of Art Expertise and Training on Emotion and Preference Ratings for Representational and Abstract Artworks</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Across cultures and throughout recorded history, humans have produced visual art. This raises the...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Across cultures and throughout recorded history, humans have produced visual art. This raises the question of why people report such an emotional response to artworks and find some works more beautiful or compelling than others. In the current study we investigated the interplay between art expertise, and emotional and preference judgments. Sixty partici-pants (40 novices, 20 art experts) rated a set of 150 abstract artworks and portraits during two occasions: in a laboratory setting and in a museum. Before commencing their second session, half of the art novices received a brief training on stylistic and art historical aspects of abstract art and portraiture. Results showed that art experts rated the artworks higher than novices on aesthetic facets (beauty and wanting), but no group differences were observed on affective evaluations (valence and arousal). The training session made a small effect on ratings of preference compared to the non-trained group of novices. Overall, these f...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090943"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090943"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090943; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090943]").text(description); $(".js-view-count[data-work-id=100090943]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090943; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090943']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090943]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090943,"title":"RESEARCH ARTICLE The Influence of Art Expertise and Training on Emotion and Preference Ratings for Representational and Abstract Artworks","internal_url":"https://www.academia.edu/100090943/RESEARCH_ARTICLE_The_Influence_of_Art_Expertise_and_Training_on_Emotion_and_Preference_Ratings_for_Representational_and_Abstract_Artworks","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100090942"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090942/POSTER_PRESENTATION_Open_Access"><img alt="Research paper thumbnail of POSTER PRESENTATION Open Access" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090942/POSTER_PRESENTATION_Open_Access">POSTER PRESENTATION Open Access</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">A computational study of visual working memory capacity in the presence of saliency effects</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090942"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090942"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090942; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090942]").text(description); $(".js-view-count[data-work-id=100090942]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090942; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090942']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090942]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090942,"title":"POSTER PRESENTATION Open Access","internal_url":"https://www.academia.edu/100090942/POSTER_PRESENTATION_Open_Access","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100090941"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100090941/Correction_Effective_Visual_Working_Memory_Capacity_An_Emergent_Effect_from_the_Neural_Dynamics_in_an_Attractor_Network"><img alt="Research paper thumbnail of Correction: Effective Visual Working Memory Capacity: An Emergent Effect from the Neural Dynamics in an Attractor Network" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100090941/Correction_Effective_Visual_Working_Memory_Capacity_An_Emergent_Effect_from_the_Neural_Dynamics_in_an_Attractor_Network">Correction: Effective Visual Working Memory Capacity: An Emergent Effect from the Neural Dynamics in an Attractor Network</a></div><div class="wp-workCard_item"><span>PLoS ONE</span><span>, 2012</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100090941"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100090941"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100090941; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100090941]").text(description); $(".js-view-count[data-work-id=100090941]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100090941; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100090941']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100090941]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100090941,"title":"Correction: Effective Visual Working Memory Capacity: An Emergent Effect from the Neural Dynamics in an Attractor Network","internal_url":"https://www.academia.edu/100090941/Correction_Effective_Visual_Working_Memory_Capacity_An_Emergent_Effect_from_the_Neural_Dynamics_in_an_Attractor_Network","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100083987"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100083987/The_effect_of_perceptual_history_on_the_interpretation_of_causality"><img alt="Research paper thumbnail of The effect of perceptual history on the interpretation of causality" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100083987/The_effect_of_perceptual_history_on_the_interpretation_of_causality">The effect of perceptual history on the interpretation of causality</a></div><div class="wp-workCard_item"><span>Journal of Vision</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100083987"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100083987"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100083987; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100083987]").text(description); $(".js-view-count[data-work-id=100083987]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100083987; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100083987']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100083987]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100083987,"title":"The effect of perceptual history on the interpretation of causality","internal_url":"https://www.academia.edu/100083987/The_effect_of_perceptual_history_on_the_interpretation_of_causality","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100083986"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100083986/Mean_field_analysis_of_the_model"><img alt="Research paper thumbnail of Mean field analysis of the model" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100083986/Mean_field_analysis_of_the_model">Mean field analysis of the model</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;Mean field analysis of the model assessing the dependence of the network behaviour on th...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;Mean field analysis of the model assessing the dependence of the network behaviour on the potentiated synaptic strength () and the inhibitory synaptic strength (), for different initial conditions. &amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; The initial firing rate conditions for pools showing high firing rates are derived from a Gaussian distribution with mean  = 40 Hz and standard deviation  = 0.01 Hz. The firing rates determining the initial conditions of pools in spontaneous states are obtained from randomly sampling a Gaussian distribution with mean  = 3 Hz and standard deviation  = 0.01 Hz. The colour code indicates the number of pools which settle on stable states showing persistently high firing rates ( Hz) during the delay period when no further stimulation is provided. &amp;lt;b&amp;gt;B&amp;lt;/b&amp;gt; Identical initial conditions as in &amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; but one of the pools showing an initially high firing rate of 65 Hz. From left to right an increasing number of pools had high initial firing rates. Note that as a consequence of considering a hard boundary (i.e. Hz, used in subsequent studies) for values some apparent discontinuities may appear for increasing values, which in fact correspond to stable states with persistent firing rates just below the threshold. However, this does not occur in the region where our working point is located (, ).&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100083986"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100083986"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100083986; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100083986]").text(description); $(".js-view-count[data-work-id=100083986]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100083986; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100083986']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100083986]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100083986,"title":"Mean field analysis of the model","internal_url":"https://www.academia.edu/100083986/Mean_field_analysis_of_the_model","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100083984"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100083984/Effects_of_familiarity_on_rating_scores_obtained_in_the_museum_session"><img alt="Research paper thumbnail of Effects of familiarity on rating scores obtained in the museum session" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100083984/Effects_of_familiarity_on_rating_scores_obtained_in_the_museum_session">Effects of familiarity on rating scores obtained in the museum session</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;Error bars represent the standard error of the mean. * significantly different from each...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;Error bars represent the standard error of the mean. * significantly different from each other at &amp;lt;i&amp;gt;p&amp;lt;/i&amp;gt;&amp;lt;.05.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100083984"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100083984"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100083984; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100083984]").text(description); $(".js-view-count[data-work-id=100083984]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100083984; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100083984']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100083984]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100083984,"title":"Effects of familiarity on rating scores obtained in the museum session","internal_url":"https://www.academia.edu/100083984/Effects_of_familiarity_on_rating_scores_obtained_in_the_museum_session","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100083983"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100083983/Influence_of_saliency_on_behavioural_performace_in_a_visual_WM_task"><img alt="Research paper thumbnail of Influence of saliency on behavioural performace in a visual WM task" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100083983/Influence_of_saliency_on_behavioural_performace_in_a_visual_WM_task">Influence of saliency on behavioural performace in a visual WM task</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;Proportion of correct responses for trials in which: &amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; the bottom-up ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;Proportion of correct responses for trials in which: &amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; the bottom-up saliency of one of the items in the display was defined by manipulating the visual contrast, and &amp;lt;b&amp;gt;B&amp;lt;/b&amp;gt; the top-down saliency of one of the items in the display resulted from an item appearing at a task-relevant location. Performance results for those trials in which the test stimulus is the salient item are distinguished from those in which the test stimulus is a non-salient item (denoted as “other” in the figure) to assess the influence of saliency on behavioural performance. Error bars show one standard error of the mean. Adapted with permission from Melcher and Piazza &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;[19]&amp;lt;/a&amp;gt;.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100083983"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100083983"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100083983; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100083983]").text(description); $(".js-view-count[data-work-id=100083983]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100083983; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100083983']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100083983]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100083983,"title":"Influence of saliency on behavioural performace in a visual WM task","internal_url":"https://www.academia.edu/100083983/Influence_of_saliency_on_behavioural_performace_in_a_visual_WM_task","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100083982"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100083982/Distribution_of_number_of_items_maintained_in_WM_as_a_function_of_network_size"><img alt="Research paper thumbnail of Distribution of number of items maintained in WM as a function of network size" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100083982/Distribution_of_number_of_items_maintained_in_WM_as_a_function_of_network_size">Distribution of number of items maintained in WM as a function of network size</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;Histograms illustrating the percentage of trials in which multiple items show high activ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;Histograms illustrating the percentage of trials in which multiple items show high activity during the last 300 ms of the delay period for different stimulation periods:  = 200 ms, 500 ms and 1000 ms and different network sizes  = 2500, 5000 and 10000 neurons. &amp;lt;b&amp;gt;A&amp;lt;/b&amp;gt; Four pools receive an additional external stimulation  = 60 Hz, and &amp;lt;b&amp;gt;B&amp;lt;/b&amp;gt; the pool selective to the salient item receives an external stimulation  = 100 Hz while the remaining three stimulated pools receive only  = 60 Hz. The network parameters employed in these simulations are those shown in &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-t001&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone-0042719-t001&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;Table 1&amp;lt;/a&amp;gt;. The results illustrate that &amp;lt;i&amp;gt;e&amp;lt;/i&amp;gt;WMC increases when the stimulation period is increased for intermediate (500 ms) and long (1000 ms) stimulation periods, and is reduced in the presence of saliency. This tendency is stronger for larger network sizes as a consequence of the reduced finite size noise. For short stimulation periods (200 ms), the competition processes in the saliency condition favours a winner-take-all type of behaviour for the salient item.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100083982"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100083982"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100083982; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100083982]").text(description); $(".js-view-count[data-work-id=100083982]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100083982; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100083982']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100083982]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100083982,"title":"Distribution of number of items maintained in WM as a function of network size","internal_url":"https://www.academia.edu/100083982/Distribution_of_number_of_items_maintained_in_WM_as_a_function_of_network_size","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100083981"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100083981/Experimental_setup_task_timeline"><img alt="Research paper thumbnail of Experimental setup: task timeline" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100083981/Experimental_setup_task_timeline">Experimental setup: task timeline</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;A stimulus set (“memory set”), consisting of 1 to 4 oriented Gabor stimuli, was shown fo...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;A stimulus set (“memory set”), consisting of 1 to 4 oriented Gabor stimuli, was shown for 200 ms in order to discourage subjects from making saccadic eye movements to scan the individual items. Trials were started by a button press and the first stimulus frame was only displayed after a variable delay time (500–700 ms). A fixation point was maintained at the center of the screen throughout each block of trials. The orientation of each Gabor stimulus in the memory set was one of eight possible orientations (10, 20, 30 or 40 degrees from the vertical). A blank delay of 1000 ms followed the display of the memory set. Then, one probe stimulus (“test stimulus”) was shown for 200 ms. The test stimulus was identical to the Gabor patch at the same location in the memory set on “same trials”, whereas its orientation was mirror-reversed across the vertical on “different trials”. In the baseline condition, the Gabor stimuli had identical contrast and size (30% of full contrast). Separate blocks of trials were run in which the saliency of one item was manipulated by either increasing its bottom-up or top-down saliency. In the bottom-up saliency manipulation, the visual contrast with the background and/or the size of the Gabor stimulus was increased. Top-down saliency was manipulated by adding a memory-guided saccade task. To this end, a red dot was presented, along with the fixation point, at the beginning of the trials and participants were instructed to memorise this location in order to make a saccade there once the central fixation point was removed. Adapted with permission from Melcher and Piazza &amp;lt;a href=&amp;quot;<a href="http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot" rel="nofollow">http://www.plosone.org/article/info:doi/10.1371/journal.pone.0042719#pone.0042719-Melcher2&amp;quot</a>; target=&amp;quot;_blank&amp;quot;&amp;gt;[19]&amp;lt;/a&amp;gt;.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100083981"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100083981"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100083981; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100083981]").text(description); $(".js-view-count[data-work-id=100083981]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100083981; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100083981']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100083981]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100083981,"title":"Experimental setup: task timeline","internal_url":"https://www.academia.edu/100083981/Experimental_setup_task_timeline","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100083980"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100083980/Demographic_characteristics_of_participants"><img alt="Research paper thumbnail of Demographic characteristics of participants" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100083980/Demographic_characteristics_of_participants">Demographic characteristics of participants</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;The art expertise score refers to a questionnaire adapted from the Assessment of Art Att...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">&amp;lt;p&amp;gt;The art expertise score refers to a questionnaire adapted from the Assessment of Art Attributes (see Measures section) in which participants indicated, among others, the number of hours per week spent on creating visual art, number of museum visits per year, and so on. Scores are presented as mean ± standard deviation.&amp;lt;/p&amp;gt;&amp;lt;p&amp;gt;* significantly lower compared to the Expert group at &amp;lt;i&amp;gt;p&amp;lt;/i&amp;gt;&amp;lt;.005.&amp;lt;/p&amp;gt;&amp;lt;p&amp;gt;Demographic characteristics of participants.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100083980"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100083980"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100083980; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100083980]").text(description); $(".js-view-count[data-work-id=100083980]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100083980; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100083980']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100083980]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100083980,"title":"Demographic characteristics of participants","internal_url":"https://www.academia.edu/100083980/Demographic_characteristics_of_participants","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="100083979"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/100083979/Parameters_of_the_integrare_and_fire_simulations"><img alt="Research paper thumbnail of Parameters of the integrare-and-fire simulations" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/100083979/Parameters_of_the_integrare_and_fire_simulations">Parameters of the integrare-and-fire simulations</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">&amp;lt;p&amp;gt;Parameters of the integrare-and-fire simulations.&amp;lt;/p</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="100083979"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="100083979"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 100083979; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=100083979]").text(description); $(".js-view-count[data-work-id=100083979]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 100083979; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='100083979']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=100083979]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":100083979,"title":"Parameters of the integrare-and-fire simulations","internal_url":"https://www.academia.edu/100083979/Parameters_of_the_integrare_and_fire_simulations","owner_id":7323984,"coauthors_can_edit":true,"owner":{"id":7323984,"first_name":"David","middle_initials":null,"last_name":"Melcher","page_name":"DavidMelcher","domain_name":"unitn","created_at":"2013-12-03T17:47:15.105-08:00","display_name":"David Melcher","url":"https://unitn.academia.edu/DavidMelcher"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/google_contacts-0dfb882d836b94dbcb4a2d123d6933fc9533eda5be911641f20b4eb428429600.js"], function() { // from javascript_helper.rb $('.js-google-connect-button').click(function(e) { e.preventDefault(); GoogleContacts.authorize_and_show_contacts(); Aedu.Dismissibles.recordClickthrough("WowProfileImportContactsPrompt"); }); $('.js-update-biography-button').click(function(e) { e.preventDefault(); Aedu.Dismissibles.recordClickthrough("UpdateUserBiographyPrompt"); $.ajax({ url: $r.api_v0_profiles_update_about_path({ subdomain_param: 'api', about: "", }), type: 'PUT', success: function(response) { location.reload(); } }); }); $('.js-work-creator-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_document_path({ source: encodeURIComponent(""), }); }); $('.js-video-upload-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_video_path({ source: encodeURIComponent(""), }); }); $('.js-do-this-later-button').click(function() { $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("WowProfileImportContactsPrompt"); }); $('.js-update-biography-do-this-later-button').click(function(){ $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("UpdateUserBiographyPrompt"); }); $('.wow-profile-mentions-upsell--close').click(function(){ $('.wow-profile-mentions-upsell--panel').hide(); Aedu.Dismissibles.recordDismissal("WowProfileMentionsUpsell"); }); $('.wow-profile-mentions-upsell--button').click(function(){ Aedu.Dismissibles.recordClickthrough("WowProfileMentionsUpsell"); }); new WowProfile.SocialRedesignUserWorks({ initialWorksOffset: 20, allWorksOffset: 20, maxSections: 2 }) }); </script> </div></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile_edit-5ea339ee107c863779f560dd7275595239fed73f1a13d279d2b599a28c0ecd33.js","https://a.academia-assets.com/assets/add_coauthor-22174b608f9cb871d03443cafa7feac496fb50d7df2d66a53f5ee3c04ba67f53.js","https://a.academia-assets.com/assets/tab-dcac0130902f0cc2d8cb403714dd47454f11fc6fb0e99ae6a0827b06613abc20.js","https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js"], function() { // from javascript_helper.rb window.ae = window.ae || {}; window.ae.WowProfile = window.ae.WowProfile || {}; if(Aedu.User.current && Aedu.User.current.id === $viewedUser.id) { window.ae.WowProfile.current_user_edit = {}; new WowProfileEdit.EditUploadView({ el: '.js-edit-upload-button-wrapper', model: window.$current_user, }); new AddCoauthor.AddCoauthorsController(); } var userInfoView = new WowProfile.SocialRedesignUserInfo({ recaptcha_key: "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB" }); WowProfile.router = new WowProfile.Router({ userInfoView: userInfoView }); Backbone.history.start({ pushState: true, root: "/" + $viewedUser.page_name }); new WowProfile.UserWorksNav() }); </script> </div> <div class="bootstrap login"><div class="modal fade login-modal" id="login-modal"><div class="login-modal-dialog modal-dialog"><div class="modal-content"><div class="modal-header"><button class="close close" data-dismiss="modal" type="button"><span aria-hidden="true">&times;</span><span class="sr-only">Close</span></button><h4 class="modal-title text-center"><strong>Log In</strong></h4></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><button class="btn btn-fb btn-lg btn-block btn-v-center-content" id="login-facebook-oauth-button"><svg style="float: left; width: 19px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="facebook-square" class="svg-inline--fa fa-facebook-square fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M400 32H48A48 48 0 0 0 0 80v352a48 48 0 0 0 48 48h137.25V327.69h-63V256h63v-54.64c0-62.15 37-96.48 93.67-96.48 27.14 0 55.52 4.84 55.52 4.84v61h-31.27c-30.81 0-40.42 19.12-40.42 38.73V256h68.78l-11 71.69h-57.78V480H400a48 48 0 0 0 48-48V80a48 48 0 0 0-48-48z"></path></svg><small><strong>Log in</strong> with <strong>Facebook</strong></small></button><br /><button class="btn btn-google btn-lg btn-block btn-v-center-content" id="login-google-oauth-button"><svg style="float: left; width: 22px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="google-plus" class="svg-inline--fa fa-google-plus fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M256,8C119.1,8,8,119.1,8,256S119.1,504,256,504,504,392.9,504,256,392.9,8,256,8ZM185.3,380a124,124,0,0,1,0-248c31.3,0,60.1,11,83,32.3l-33.6,32.6c-13.2-12.9-31.3-19.1-49.4-19.1-42.9,0-77.2,35.5-77.2,78.1S142.3,334,185.3,334c32.6,0,64.9-19.1,70.1-53.3H185.3V238.1H302.2a109.2,109.2,0,0,1,1.9,20.7c0,70.8-47.5,121.2-118.8,121.2ZM415.5,273.8v35.5H380V273.8H344.5V238.3H380V202.8h35.5v35.5h35.2v35.5Z"></path></svg><small><strong>Log in</strong> with <strong>Google</strong></small></button><br /><style type="text/css">.sign-in-with-apple-button { width: 100%; height: 52px; border-radius: 3px; border: 1px solid black; cursor: pointer; } .sign-in-with-apple-button > div { margin: 0 auto; / This centers the Apple-rendered button horizontally }</style><script src="https://appleid.cdn-apple.com/appleauth/static/jsapi/appleid/1/en_US/appleid.auth.js" type="text/javascript"></script><div class="sign-in-with-apple-button" data-border="false" data-color="white" id="appleid-signin"><span &nbsp;&nbsp;="Sign Up with Apple" class="u-fs11"></span></div><script>AppleID.auth.init({ clientId: 'edu.academia.applesignon', scope: 'name email', redirectURI: 'https://www.academia.edu/sessions', state: "074be7d31d6b9d87fd33efb830b6be3cad58d5f2728fb6e411c0a064698524aa", });</script><script>// Hacky way of checking if on fast loswp if (window.loswp == null) { (function() { const Google = window?.Aedu?.Auth?.OauthButton?.Login?.Google; const Facebook = window?.Aedu?.Auth?.OauthButton?.Login?.Facebook; if (Google) { new Google({ el: '#login-google-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } if (Facebook) { new Facebook({ el: '#login-facebook-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } })(); }</script></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><div class="hr-heading login-hr-heading"><span class="hr-heading-text">or</span></div></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><form class="js-login-form" action="https://www.academia.edu/sessions" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="gX-ZAcHMuQ-xkO796V9aw3KCd7sQkZdcxR__BOxLJ8BM_lirhsomXmLIRuPZJN5ayIj05mFKZMpYp0Lf2VowUA" autocomplete="off" /><div class="form-group"><label class="control-label" for="login-modal-email-input" style="font-size: 14px;">Email</label><input class="form-control" id="login-modal-email-input" name="login" type="email" /></div><div class="form-group"><label class="control-label" for="login-modal-password-input" style="font-size: 14px;">Password</label><input class="form-control" id="login-modal-password-input" name="password" type="password" /></div><input type="hidden" name="post_login_redirect_url" id="post_login_redirect_url" value="https://unitn.academia.edu/DavidMelcher" autocomplete="off" /><div class="checkbox"><label><input type="checkbox" name="remember_me" id="remember_me" value="1" checked="checked" /><small style="font-size: 12px; margin-top: 2px; display: inline-block;">Remember me on this computer</small></label></div><br><input type="submit" name="commit" value="Log In" class="btn btn-primary btn-block btn-lg js-login-submit" data-disable-with="Log In" /></br></form><script>typeof window?.Aedu?.recaptchaManagedForm === 'function' && window.Aedu.recaptchaManagedForm( document.querySelector('.js-login-form'), document.querySelector('.js-login-submit') );</script><small style="font-size: 12px;"><br />or <a data-target="#login-modal-reset-password-container" data-toggle="collapse" href="javascript:void(0)">reset password</a></small><div class="collapse" id="login-modal-reset-password-container"><br /><div class="well margin-0x"><form class="js-password-reset-form" action="https://www.academia.edu/reset_password" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="9n6sV2Q27BHwwG3zhCPFnFqgNGqIBHP4-nbXw8cU4jE7_239IzBzQCOYxe20WEEF4Kq3N_nfgG5nzmoY8gX1oQ" autocomplete="off" /><p>Enter the email address you signed up with and we&#39;ll email you a reset link.</p><div class="form-group"><input class="form-control" name="email" type="email" /></div><script src="https://recaptcha.net/recaptcha/api.js" async defer></script> <script> var invisibleRecaptchaSubmit = function () { var closestForm = function (ele) { var curEle = ele.parentNode; while (curEle.nodeName !== 'FORM' && curEle.nodeName !== 'BODY'){ curEle = curEle.parentNode; } return curEle.nodeName === 'FORM' ? curEle : null }; var eles = document.getElementsByClassName('g-recaptcha'); if (eles.length > 0) { var form = closestForm(eles[0]); if (form) { form.submit(); } } }; </script> <input type="submit" data-sitekey="6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj" data-callback="invisibleRecaptchaSubmit" class="g-recaptcha btn btn-primary btn-block" value="Email me a link" value=""/> </form></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/collapse-45805421cf446ca5adf7aaa1935b08a3a8d1d9a6cc5d91a62a2a3a00b20b3e6a.js"], function() { // from javascript_helper.rb $("#login-modal-reset-password-container").on("shown.bs.collapse", function() { $(this).find("input[type=email]").focus(); }); }); </script> </div></div></div><div class="modal-footer"><div class="text-center"><small style="font-size: 12px;">Need an account?&nbsp;<a rel="nofollow" href="https://www.academia.edu/signup">Click here to sign up</a></small></div></div></div></div></div></div><script>// If we are on subdomain or non-bootstrapped page, redirect to login page instead of showing modal (function(){ if (typeof $ === 'undefined') return; var host = window.location.hostname; if ((host === $domain || host === "www."+$domain) && (typeof $().modal === 'function')) { $("#nav_log_in").click(function(e) { // Don't follow the link and open the modal e.preventDefault(); $("#login-modal").on('shown.bs.modal', function() { $(this).find("#login-modal-email-input").focus() }).modal('show'); }); } })()</script> <div class="bootstrap" id="footer"><div class="footer-content clearfix text-center padding-top-7x" style="width:100%;"><ul class="footer-links-secondary footer-links-wide list-inline margin-bottom-1x"><li><a href="https://www.academia.edu/about">About</a></li><li><a href="https://www.academia.edu/press">Press</a></li><li><a href="https://www.academia.edu/documents">Papers</a></li><li><a href="https://www.academia.edu/topics">Topics</a></li><li><a href="https://www.academia.edu/journals">Academia.edu Journals</a></li><li><a rel="nofollow" href="https://www.academia.edu/hiring"><svg style="width: 13px; height: 13px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="briefcase" class="svg-inline--fa fa-briefcase fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M320 336c0 8.84-7.16 16-16 16h-96c-8.84 0-16-7.16-16-16v-48H0v144c0 25.6 22.4 48 48 48h416c25.6 0 48-22.4 48-48V288H320v48zm144-208h-80V80c0-25.6-22.4-48-48-48H176c-25.6 0-48 22.4-48 48v48H48c-25.6 0-48 22.4-48 48v80h512v-80c0-25.6-22.4-48-48-48zm-144 0H192V96h128v32z"></path></svg>&nbsp;<strong>We're Hiring!</strong></a></li><li><a rel="nofollow" href="https://support.academia.edu/hc/en-us"><svg style="width: 12px; height: 12px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="question-circle" class="svg-inline--fa fa-question-circle fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M504 256c0 136.997-111.043 248-248 248S8 392.997 8 256C8 119.083 119.043 8 256 8s248 111.083 248 248zM262.655 90c-54.497 0-89.255 22.957-116.549 63.758-3.536 5.286-2.353 12.415 2.715 16.258l34.699 26.31c5.205 3.947 12.621 3.008 16.665-2.122 17.864-22.658 30.113-35.797 57.303-35.797 20.429 0 45.698 13.148 45.698 32.958 0 14.976-12.363 22.667-32.534 33.976C247.128 238.528 216 254.941 216 296v4c0 6.627 5.373 12 12 12h56c6.627 0 12-5.373 12-12v-1.333c0-28.462 83.186-29.647 83.186-106.667 0-58.002-60.165-102-116.531-102zM256 338c-25.365 0-46 20.635-46 46 0 25.364 20.635 46 46 46s46-20.636 46-46c0-25.365-20.635-46-46-46z"></path></svg>&nbsp;<strong>Help Center</strong></a></li></ul><ul class="footer-links-tertiary list-inline margin-bottom-1x"><li class="small">Find new research papers in:</li><li class="small"><a href="https://www.academia.edu/Documents/in/Physics">Physics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Chemistry">Chemistry</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Biology">Biology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Health_Sciences">Health Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Ecology">Ecology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Earth_Sciences">Earth Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a></li></ul></div></div><div class="DesignSystem" id="credit" style="width:100%;"><ul class="u-pl0x footer-links-legal list-inline"><li><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li>Academia &copy;2025</li></ul></div><script> //<![CDATA[ window.detect_gmtoffset = true; window.Academia && window.Academia.set_gmtoffset && Academia.set_gmtoffset('/gmtoffset'); //]]> </script> <div id='overlay_background'></div> <div id='bootstrap-modal-container' class='bootstrap'></div> <div id='ds-modal-container' class='bootstrap DesignSystem'></div> <div id='full-screen-modal'></div> </div> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10