CINXE.COM

Zvi N Roth | The Hebrew University of Jerusalem - Academia.edu

<!DOCTYPE html> <html lang="en" xmlns:fb="http://www.facebook.com/2008/fbml" class="wf-loading"> <head prefix="og: https://ogp.me/ns# fb: https://ogp.me/ns/fb# academia: https://ogp.me/ns/fb/academia#"> <meta charset="utf-8"> <meta name=viewport content="width=device-width, initial-scale=1"> <meta rel="search" type="application/opensearchdescription+xml" href="/open_search.xml" title="Academia.edu"> <title>Zvi N Roth | The Hebrew University of Jerusalem - Academia.edu</title> <!-- _ _ _ | | (_) | | __ _ ___ __ _ __| | ___ _ __ ___ _ __ _ ___ __| |_ _ / _` |/ __/ _` |/ _` |/ _ \ '_ ` _ \| |/ _` | / _ \/ _` | | | | | (_| | (_| (_| | (_| | __/ | | | | | | (_| || __/ (_| | |_| | \__,_|\___\__,_|\__,_|\___|_| |_| |_|_|\__,_(_)___|\__,_|\__,_| We're hiring! See https://www.academia.edu/hiring --> <link href="//a.academia-assets.com/images/favicons/favicon-production.ico" rel="shortcut icon" type="image/vnd.microsoft.icon"> <link rel="apple-touch-icon" sizes="57x57" href="//a.academia-assets.com/images/favicons/apple-touch-icon-57x57.png"> <link rel="apple-touch-icon" sizes="60x60" href="//a.academia-assets.com/images/favicons/apple-touch-icon-60x60.png"> <link rel="apple-touch-icon" sizes="72x72" href="//a.academia-assets.com/images/favicons/apple-touch-icon-72x72.png"> <link rel="apple-touch-icon" sizes="76x76" href="//a.academia-assets.com/images/favicons/apple-touch-icon-76x76.png"> <link rel="apple-touch-icon" sizes="114x114" href="//a.academia-assets.com/images/favicons/apple-touch-icon-114x114.png"> <link rel="apple-touch-icon" sizes="120x120" href="//a.academia-assets.com/images/favicons/apple-touch-icon-120x120.png"> <link rel="apple-touch-icon" sizes="144x144" href="//a.academia-assets.com/images/favicons/apple-touch-icon-144x144.png"> <link rel="apple-touch-icon" sizes="152x152" href="//a.academia-assets.com/images/favicons/apple-touch-icon-152x152.png"> <link rel="apple-touch-icon" sizes="180x180" href="//a.academia-assets.com/images/favicons/apple-touch-icon-180x180.png"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-32x32.png" sizes="32x32"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-194x194.png" sizes="194x194"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-96x96.png" sizes="96x96"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/android-chrome-192x192.png" sizes="192x192"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-16x16.png" sizes="16x16"> <link rel="manifest" href="//a.academia-assets.com/images/favicons/manifest.json"> <meta name="msapplication-TileColor" content="#2b5797"> <meta name="msapplication-TileImage" content="//a.academia-assets.com/images/favicons/mstile-144x144.png"> <meta name="theme-color" content="#ffffff"> <script> window.performance && window.performance.measure && window.performance.measure("Time To First Byte", "requestStart", "responseStart"); </script> <script> (function() { if (!window.URLSearchParams || !window.history || !window.history.replaceState) { return; } var searchParams = new URLSearchParams(window.location.search); var paramsToDelete = [ 'fs', 'sm', 'swp', 'iid', 'nbs', 'rcc', // related content category 'rcpos', // related content carousel position 'rcpg', // related carousel page 'rchid', // related content hit id 'f_ri', // research interest id, for SEO tracking 'f_fri', // featured research interest, for SEO tracking (param key without value) 'f_rid', // from research interest directory for SEO tracking 'f_loswp', // from research interest pills on LOSWP sidebar for SEO tracking 'rhid', // referrring hit id ]; if (paramsToDelete.every((key) => searchParams.get(key) === null)) { return; } paramsToDelete.forEach((key) => { searchParams.delete(key); }); var cleanUrl = new URL(window.location.href); cleanUrl.search = searchParams.toString(); history.replaceState({}, document.title, cleanUrl); })(); </script> <script async src="https://www.googletagmanager.com/gtag/js?id=G-5VKX33P2DS"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-5VKX33P2DS', { cookie_domain: 'academia.edu', send_page_view: false, }); gtag('event', 'page_view', { 'controller': "profiles/works", 'action': "summary", 'controller_action': 'profiles/works#summary', 'logged_in': 'false', 'edge': 'unknown', // Send nil if there is no A/B test bucket, in case some records get logged // with missing data - that way we can distinguish between the two cases. // ab_test_bucket should be of the form <ab_test_name>:<bucket> 'ab_test_bucket': null, }) </script> <script type="text/javascript"> window.sendUserTiming = function(timingName) { if (!(window.performance && window.performance.measure)) return; var entries = window.performance.getEntriesByName(timingName, "measure"); if (entries.length !== 1) return; var timingValue = Math.round(entries[0].duration); gtag('event', 'timing_complete', { name: timingName, value: timingValue, event_category: 'User-centric', }); }; window.sendUserTiming("Time To First Byte"); </script> <meta name="csrf-param" content="authenticity_token" /> <meta name="csrf-token" content="PTWcPvqZ91uC3qW/RHuII9C9MY0OJ0e0KsMNoU/qhbnfKUpbHm5lxcnwT4srHT09MKcROmKVJbqgl2O83JeDYQ==" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/wow-77f7b87cb1583fc59aa8f94756ebfe913345937eb932042b4077563bebb5fb4b.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/social/home-1c712297ae3ac71207193b1bae0ecf1aae125886850f62c9c0139dd867630797.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/heading-b2b823dd904da60a48fd1bfa1defd840610c2ff414d3f39ed3af46277ab8df3b.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/button-3cea6e0ad4715ed965c49bfb15dedfc632787b32ff6d8c3a474182b231146ab7.css" /><link crossorigin="" href="https://fonts.gstatic.com/" rel="preconnect" /><link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,opsz,wght@0,9..40,100..1000;1,9..40,100..1000&amp;family=Gupter:wght@400;500;700&amp;family=IBM+Plex+Mono:wght@300;400&amp;family=Material+Symbols+Outlined:opsz,wght,FILL,GRAD@20,400,0,0&amp;display=swap" rel="stylesheet" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/common-10fa40af19d25203774df2d4a03b9b5771b45109c2304968038e88a81d1215c5.css" /> <meta name="author" content="zvi n roth" /> <meta name="description" content="Zvi N Roth, The Hebrew University of Jerusalem: 6 Followers, 12 Following, 5 Research papers. Research interests: Neuroscience, Computational Neuroscience, and…" /> <meta name="google-site-verification" content="bKJMBZA7E43xhDOopFZkssMMkBRjvYERV-NaN4R6mrs" /> <script> var $controller_name = 'works'; var $action_name = "summary"; var $rails_env = 'production'; var $app_rev = 'c7ac46e400875c3b13c788ad246730fe5f6b36cc'; var $domain = 'academia.edu'; var $app_host = "academia.edu"; var $asset_host = "academia-assets.com"; var $start_time = new Date().getTime(); var $recaptcha_key = "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB"; var $recaptcha_invisible_key = "6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj"; var $disableClientRecordHit = false; </script> <script> window.Aedu = { hit_data: null }; window.Aedu.SiteStats = {"premium_universities_count":15272,"monthly_visitors":"113 million","monthly_visitor_count":113784677,"monthly_visitor_count_in_millions":113,"user_count":277494677,"paper_count":55203019,"paper_count_in_millions":55,"page_count":432000000,"page_count_in_millions":432,"pdf_count":16500000,"pdf_count_in_millions":16}; window.Aedu.serverRenderTime = new Date(1732754270000); window.Aedu.timeDifference = new Date().getTime() - 1732754270000; window.Aedu.isUsingCssV1 = false; window.Aedu.enableLocalization = true; window.Aedu.activateFullstory = false; window.Aedu.serviceAvailability = { status: {"attention_db":"on","bibliography_db":"on","contacts_db":"on","email_db":"on","indexability_db":"on","mentions_db":"on","news_db":"on","notifications_db":"on","offsite_mentions_db":"on","redshift":"on","redshift_exports_db":"on","related_works_db":"on","ring_db":"on","user_tests_db":"on"}, serviceEnabled: function(service) { return this.status[service] === "on"; }, readEnabled: function(service) { return this.serviceEnabled(service) || this.status[service] === "read_only"; }, }; window.Aedu.viewApmTrace = function() { // Check if x-apm-trace-id meta tag is set, and open the trace in APM // in a new window if it is. var apmTraceId = document.head.querySelector('meta[name="x-apm-trace-id"]'); if (apmTraceId) { var traceId = apmTraceId.content; // Use trace ID to construct URL, an example URL looks like: // https://app.datadoghq.com/apm/traces?query=trace_id%31298410148923562634 var apmUrl = 'https://app.datadoghq.com/apm/traces?query=trace_id%3A' + traceId; window.open(apmUrl, '_blank'); } }; </script> <!--[if lt IE 9]> <script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.2/html5shiv.min.js"></script> <![endif]--> <link href="https://fonts.googleapis.com/css?family=Roboto:100,100i,300,300i,400,400i,500,500i,700,700i,900,900i" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css" rel="stylesheet"> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/libraries-a9675dcb01ec4ef6aa807ba772c7a5a00c1820d3ff661c1038a20f80d06bb4e4.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/academia-296162c7af6fd81dcdd76f1a94f1fad04fb5f647401337d136fe8b68742170b1.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system_legacy-056a9113b9a0f5343d013b29ee1929d5a18be35fdcdceb616600b4db8bd20054.css" /> <script src="//a.academia-assets.com/assets/webpack_bundles/runtime-bundle-005434038af4252ca37c527588411a3d6a0eabb5f727fac83f8bbe7fd88d93bb.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/webpack_libraries_and_infrequently_changed.wjs-bundle-7734a8b54dedb8fefe75ef934203d0ef41d066c1c5cfa6ba2a13dd7ec8bce449.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/core_webpack.wjs-bundle-9eaa790b1139e2d88eb4ac931dcbc6d7cea4403a763db5cc5a695dd2eaa34151.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/sentry.wjs-bundle-5fe03fddca915c8ba0f7edbe64c194308e8ce5abaed7bffe1255ff37549c4808.js"></script> <script> jade = window.jade || {}; jade.helpers = window.$h; jade._ = window._; </script> <!-- Google Tag Manager --> <script id="tag-manager-head-root">(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0], j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src= 'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f); })(window,document,'script','dataLayer_old','GTM-5G9JF7Z');</script> <!-- End Google Tag Manager --> <script> window.gptadslots = []; window.googletag = window.googletag || {}; window.googletag.cmd = window.googletag.cmd || []; </script> <script type="text/javascript"> // TODO(jacob): This should be defined, may be rare load order problem. // Checking if null is just a quick fix, will default to en if unset. // Better fix is to run this immedietely after I18n is set. if (window.I18n != null) { I18n.defaultLocale = "en"; I18n.locale = "en"; I18n.fallbacks = true; } </script> <link rel="canonical" href="https://huji.academia.edu/ZviRoth" /> </head> <!--[if gte IE 9 ]> <body class='ie ie9 c-profiles/works a-summary logged_out'> <![endif]--> <!--[if !(IE) ]><!--> <body class='c-profiles/works a-summary logged_out'> <!--<![endif]--> <div id="fb-root"></div><script>window.fbAsyncInit = function() { FB.init({ appId: "2369844204", version: "v8.0", status: true, cookie: true, xfbml: true }); // Additional initialization code. if (window.InitFacebook) { // facebook.ts already loaded, set it up. window.InitFacebook(); } else { // Set a flag for facebook.ts to find when it loads. window.academiaAuthReadyFacebook = true; } };</script><script>window.fbAsyncLoad = function() { // Protection against double calling of this function if (window.FB) { return; } (function(d, s, id){ var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) {return;} js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_US/sdk.js"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk')); } if (!window.defer_facebook) { // Autoload if not deferred window.fbAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.fbAsyncLoad(); }, 5000); }</script> <div id="google-root"></div><script>window.loadGoogle = function() { if (window.InitGoogle) { // google.ts already loaded, set it up. window.InitGoogle("331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"); } else { // Set a flag for google.ts to use when it loads. window.GoogleClientID = "331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"; } };</script><script>window.googleAsyncLoad = function() { // Protection against double calling of this function (function(d) { var js; var id = 'google-jssdk'; var ref = d.getElementsByTagName('script')[0]; if (d.getElementById(id)) { return; } js = d.createElement('script'); js.id = id; js.async = true; js.onload = loadGoogle; js.src = "https://accounts.google.com/gsi/client" ref.parentNode.insertBefore(js, ref); }(document)); } if (!window.defer_google) { // Autoload if not deferred window.googleAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.googleAsyncLoad(); }, 5000); }</script> <div id="tag-manager-body-root"> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-5G9JF7Z" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <!-- Event listeners for analytics --> <script> window.addEventListener('load', function() { if (document.querySelector('input[name="commit"]')) { document.querySelector('input[name="commit"]').addEventListener('click', function() { gtag('event', 'click', { event_category: 'button', event_label: 'Log In' }) }) } }); </script> </div> <script>var _comscore = _comscore || []; _comscore.push({ c1: "2", c2: "26766707" }); (function() { var s = document.createElement("script"), el = document.getElementsByTagName("script")[0]; s.async = true; s.src = (document.location.protocol == "https:" ? "https://sb" : "http://b") + ".scorecardresearch.com/beacon.js"; el.parentNode.insertBefore(s, el); })();</script><img src="https://sb.scorecardresearch.com/p?c1=2&amp;c2=26766707&amp;cv=2.0&amp;cj=1" style="position: absolute; visibility: hidden" /> <div id='react-modal'></div> <div class='DesignSystem'> <a class='u-showOnFocus' href='#site'> Skip to main content </a> </div> <div id="upgrade_ie_banner" style="display: none;"><p>Academia.edu no longer supports Internet Explorer.</p><p>To browse Academia.edu and the wider internet faster and more securely, please take a few seconds to&nbsp;<a href="https://www.academia.edu/upgrade-browser">upgrade your browser</a>.</p></div><script>// Show this banner for all versions of IE if (!!window.MSInputMethodContext || /(MSIE)/.test(navigator.userAgent)) { document.getElementById('upgrade_ie_banner').style.display = 'block'; }</script> <div class="DesignSystem bootstrap ShrinkableNav"><div class="navbar navbar-default main-header"><div class="container-wrapper" id="main-header-container"><div class="container"><div class="navbar-header"><div class="nav-left-wrapper u-mt0x"><div class="nav-logo"><a data-main-header-link-target="logo_home" href="https://www.academia.edu/"><img class="visible-xs-inline-block" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015-A.svg" width="24" height="24" /><img width="145.2" height="18" class="hidden-xs" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015.svg" /></a></div><div class="nav-search"><div class="SiteSearch-wrapper select2-no-default-pills"><form class="js-SiteSearch-form DesignSystem" action="https://www.academia.edu/search" accept-charset="UTF-8" method="get"><input name="utf8" type="hidden" value="&#x2713;" autocomplete="off" /><i class="SiteSearch-icon fa fa-search u-fw700 u-positionAbsolute u-tcGrayDark"></i><input class="js-SiteSearch-form-input SiteSearch-form-input form-control" data-main-header-click-target="search_input" name="q" placeholder="Search" type="text" value="" /></form></div></div></div><div class="nav-right-wrapper pull-right"><ul class="NavLinks js-main-nav list-unstyled"><li class="NavLinks-link"><a class="js-header-login-url Button Button--inverseGray Button--sm u-mb4x" id="nav_log_in" rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="NavLinks-link u-p0x"><a class="Button Button--inverseGray Button--sm u-mb4x" rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li></ul><button class="hidden-lg hidden-md hidden-sm u-ml4x navbar-toggle collapsed" data-target=".js-mobile-header-links" data-toggle="collapse" type="button"><span class="icon-bar"></span><span class="icon-bar"></span><span class="icon-bar"></span></button></div></div><div class="collapse navbar-collapse js-mobile-header-links"><ul class="nav navbar-nav"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li><li class="u-borderColorGrayLight u-borderBottom1 js-mobile-nav-expand-trigger"><a href="#">more&nbsp<span class="caret"></span></a></li><li><ul class="js-mobile-nav-expand-section nav navbar-nav u-m0x collapse"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/about">About</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/press">Press</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://medium.com/@academia">Blog</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/documents">Papers</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/hiring"><i class="fa fa-briefcase"></i>&nbsp;We're Hiring!</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://support.academia.edu/"><i class="fa fa-question-circle"></i>&nbsp;Help Center</a></li><li class="js-mobile-nav-collapse-trigger u-borderColorGrayLight u-borderBottom1 dropup" style="display:none"><a href="#">less&nbsp<span class="caret"></span></a></li></ul></li></ul></div></div></div><script>(function(){ var $moreLink = $(".js-mobile-nav-expand-trigger"); var $lessLink = $(".js-mobile-nav-collapse-trigger"); var $section = $('.js-mobile-nav-expand-section'); $moreLink.click(function(ev){ ev.preventDefault(); $moreLink.hide(); $lessLink.show(); $section.collapse('show'); }); $lessLink.click(function(ev){ ev.preventDefault(); $moreLink.show(); $lessLink.hide(); $section.collapse('hide'); }); })() if ($a.is_logged_in() || false) { new Aedu.NavigationController({ el: '.js-main-nav', showHighlightedNotification: false }); } else { $(".js-header-login-url").attr("href", $a.loginUrlWithRedirect()); } Aedu.autocompleteSearch = new AutocompleteSearch({el: '.js-SiteSearch-form'});</script></div></div> <div id='site' class='fixed'> <div id="content" class="clearfix"> <script>document.addEventListener('DOMContentLoaded', function(){ var $dismissible = $(".dismissible_banner"); $dismissible.click(function(ev) { $dismissible.hide(); }); });</script> <script src="//a.academia-assets.com/assets/webpack_bundles/profile.wjs-bundle-533ab26e53b28997c1064a839643ef40d866d497c665279ed26b743aacb62042.js" defer="defer"></script><script>Aedu.rankings = { showPaperRankingsLink: false } $viewedUser = Aedu.User.set_viewed( {"id":32986989,"first_name":"Zvi","middle_initials":"N","last_name":"Roth","page_name":"ZviRoth","domain_name":"huji","created_at":"2015-07-11T10:51:32.782-07:00","display_name":"Zvi N Roth","url":"https://huji.academia.edu/ZviRoth","photo":"/images/s65_no_pic.png","has_photo":false,"department":{"id":288396,"name":"Edmond and Lily Safra Center for Brain Sciences (ELSC)","url":"https://huji.academia.edu/Departments/Edmond_and_Lily_Safra_Center_for_Brain_Sciences_ELSC_/Documents","university":{"id":594,"name":"The Hebrew University of Jerusalem","url":"https://huji.academia.edu/"}},"position":"Graduate Student","position_id":3,"is_analytics_public":false,"interests":[{"id":161,"name":"Neuroscience","url":"https://www.academia.edu/Documents/in/Neuroscience"},{"id":5451,"name":"Computational Neuroscience","url":"https://www.academia.edu/Documents/in/Computational_Neuroscience"},{"id":29917,"name":"FMRI","url":"https://www.academia.edu/Documents/in/FMRI"},{"id":491655,"name":"Multivariate Pattern Analysis","url":"https://www.academia.edu/Documents/in/Multivariate_Pattern_Analysis"},{"id":49962,"name":"Visual Cortex","url":"https://www.academia.edu/Documents/in/Visual_Cortex"}]} ); if ($a.is_logged_in() && $viewedUser.is_current_user()) { $('body').addClass('profile-viewed-by-owner'); } $socialProfiles = []</script><div id="js-react-on-rails-context" style="display:none" data-rails-context="{&quot;inMailer&quot;:false,&quot;i18nLocale&quot;:&quot;en&quot;,&quot;i18nDefaultLocale&quot;:&quot;en&quot;,&quot;href&quot;:&quot;https://huji.academia.edu/ZviRoth&quot;,&quot;location&quot;:&quot;/ZviRoth&quot;,&quot;scheme&quot;:&quot;https&quot;,&quot;host&quot;:&quot;huji.academia.edu&quot;,&quot;port&quot;:null,&quot;pathname&quot;:&quot;/ZviRoth&quot;,&quot;search&quot;:null,&quot;httpAcceptLanguage&quot;:null,&quot;serverSide&quot;:false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="ProfileCheckPaperUpdate" data-props="{}" data-trace="false" data-dom-id="ProfileCheckPaperUpdate-react-component-81852839-f10e-496b-98f5-9ce842e15444"></div> <div id="ProfileCheckPaperUpdate-react-component-81852839-f10e-496b-98f5-9ce842e15444"></div> <div class="DesignSystem"><div class="onsite-ping" id="onsite-ping"></div></div><div class="profile-user-info DesignSystem"><div class="social-profile-container"><div class="left-panel-container"><div class="user-info-component-wrapper"><div class="user-summary-cta-container"><div class="user-summary-container"><div class="social-profile-avatar-container"><img class="profile-avatar u-positionAbsolute" border="0" alt="" src="//a.academia-assets.com/images/s200_no_pic.png" /></div><div class="title-container"><h1 class="ds2-5-heading-sans-serif-sm">Zvi N Roth</h1><div class="affiliations-container fake-truncate js-profile-affiliations"><div><a class="u-tcGrayDarker" href="https://huji.academia.edu/">The Hebrew University of Jerusalem</a>, <a class="u-tcGrayDarker" href="https://huji.academia.edu/Departments/Edmond_and_Lily_Safra_Center_for_Brain_Sciences_ELSC_/Documents">Edmond and Lily Safra Center for Brain Sciences (ELSC)</a>, <span class="u-tcGrayDarker">Graduate Student</span></div></div></div></div><div class="sidebar-cta-container"><button class="ds2-5-button hidden profile-cta-button grow js-profile-follow-button" data-broccoli-component="user-info.follow-button" data-click-track="profile-user-info-follow-button" data-follow-user-fname="Zvi" data-follow-user-id="32986989" data-follow-user-source="profile_button" data-has-google="false"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">add</span>Follow</button><button class="ds2-5-button hidden profile-cta-button grow js-profile-unfollow-button" data-broccoli-component="user-info.unfollow-button" data-click-track="profile-user-info-unfollow-button" data-unfollow-user-id="32986989"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">done</span>Following</button></div></div><div class="user-stats-container"><a><div class="stat-container js-profile-followers"><p class="label">Followers</p><p class="data">6</p></div></a><a><div class="stat-container js-profile-followees" data-broccoli-component="user-info.followees-count" data-click-track="profile-expand-user-info-following"><p class="label">Following</p><p class="data">12</p></div></a><a><div class="stat-container js-profile-coauthors" data-broccoli-component="user-info.coauthors-count" data-click-track="profile-expand-user-info-coauthors"><p class="label">Co-authors</p><p class="data">2</p></div></a><span><div class="stat-container"><p class="label"><span class="js-profile-total-view-text">Public Views</span></p><p class="data"><span class="js-profile-view-count"></span></p></div></span></div><div class="ri-section"><div class="ri-section-header"><span>Interests</span></div><div class="ri-tags-container"><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="32986989" href="https://www.academia.edu/Documents/in/Neuroscience"><div id="js-react-on-rails-context" style="display:none" data-rails-context="{&quot;inMailer&quot;:false,&quot;i18nLocale&quot;:&quot;en&quot;,&quot;i18nDefaultLocale&quot;:&quot;en&quot;,&quot;href&quot;:&quot;https://huji.academia.edu/ZviRoth&quot;,&quot;location&quot;:&quot;/ZviRoth&quot;,&quot;scheme&quot;:&quot;https&quot;,&quot;host&quot;:&quot;huji.academia.edu&quot;,&quot;port&quot;:null,&quot;pathname&quot;:&quot;/ZviRoth&quot;,&quot;search&quot;:null,&quot;httpAcceptLanguage&quot;:null,&quot;serverSide&quot;:false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Neuroscience&quot;]}" data-trace="false" data-dom-id="Pill-react-component-502abb07-fcac-4461-a7e2-f438c9ef5a1c"></div> <div id="Pill-react-component-502abb07-fcac-4461-a7e2-f438c9ef5a1c"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="32986989" href="https://www.academia.edu/Documents/in/Computational_Neuroscience"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Computational Neuroscience&quot;]}" data-trace="false" data-dom-id="Pill-react-component-d4d40b89-1116-423c-8d58-3fd2f953e8bc"></div> <div id="Pill-react-component-d4d40b89-1116-423c-8d58-3fd2f953e8bc"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="32986989" href="https://www.academia.edu/Documents/in/FMRI"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;FMRI&quot;]}" data-trace="false" data-dom-id="Pill-react-component-c23b485a-f88e-484e-8c85-a7ed06d07cf4"></div> <div id="Pill-react-component-c23b485a-f88e-484e-8c85-a7ed06d07cf4"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="32986989" href="https://www.academia.edu/Documents/in/Multivariate_Pattern_Analysis"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Multivariate Pattern Analysis&quot;]}" data-trace="false" data-dom-id="Pill-react-component-8c84e358-e8ef-4912-8917-a7fb6a9f66b3"></div> <div id="Pill-react-component-8c84e358-e8ef-4912-8917-a7fb6a9f66b3"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="32986989" href="https://www.academia.edu/Documents/in/Visual_Cortex"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Visual Cortex&quot;]}" data-trace="false" data-dom-id="Pill-react-component-4fe9cf6b-088e-438c-a787-3049d7ae44ae"></div> <div id="Pill-react-component-4fe9cf6b-088e-438c-a787-3049d7ae44ae"></div> </a></div></div></div></div><div class="right-panel-container"><div class="user-content-wrapper"><div class="uploads-container" id="social-redesign-work-container"><div class="upload-header"><h2 class="ds2-5-heading-sans-serif-xs">Uploads</h2></div><div class="documents-container backbone-social-profile-documents" style="width: 100%;"><div class="u-taCenter"></div><div class="profile--tab_content_container js-tab-pane tab-pane active" id="all"><div class="profile--tab_heading_container js-section-heading" data-section="Papers" id="Papers"><h3 class="profile--tab_heading_container">Papers by Zvi N Roth</h3></div><div class="js-work-strip profile--work_container" data-work-id="23905336"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/23905336/Functional_MRI_Representational_Similarity_Analysis_Reveals_a_Dissociation_between_Discriminative_and_Relative_Location_Information_in_the_Human_Visual_System"><img alt="Research paper thumbnail of Functional MRI Representational Similarity Analysis Reveals a Dissociation between Discriminative and Relative Location Information in the Human Visual System" class="work-thumbnail" src="https://attachments.academia-assets.com/44294062/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/23905336/Functional_MRI_Representational_Similarity_Analysis_Reveals_a_Dissociation_between_Discriminative_and_Relative_Location_Information_in_the_Human_Visual_System">Functional MRI Representational Similarity Analysis Reveals a Dissociation between Discriminative and Relative Location Information in the Human Visual System</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Neural responses in visual cortex are governed by a topographic mapping from retinal locations to...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Neural responses in visual cortex are governed by a topographic mapping from retinal locations to cortical responses. Moreover, at the voxel population level early visual cortex (EVC) activity enables accurate decoding of stimuli locations. However, in many cases information enabling one to discriminate between locations (i.e., discriminative information) may be less relevant than information regarding the relative location of two objects (i.e., relative information). For example, when planning to grab a cup, determining whether the cup is located at the same retinal location as the hand is hardly relevant, whereas the location of the cup relative to the hand is crucial for performing the action. We have previously used multivariate pattern analysis techniques to measure discriminative location information, and found the highest levels in EVC, in line with other studies. Here we show, using representational similarity analysis, that availability of discriminative information in fMRI activation patterns does not entail availability of relative information. Specifically, we find that relative location information can be reliably extracted from activity patterns in posterior intraparietal sulcus (pIPS), but not from EVC, where we find the spatial representation to be warped. We further show that this variability in relative information levels between regions can be explained by a computational model based on an array of receptive fields. Moreover, when the model&#39;s receptive fields are extended to include inhibitory surround regions, the model can account for the spatial warping in EVC. These results demonstrate how size and shape properties of receptive fields in human visual cortex contribute to the transformation of discriminative spatial representations into relative spatial representations along the visual stream.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="5912bf83db3f564896921a3b6d6f6307" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:44294062,&quot;asset_id&quot;:23905336,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/44294062/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="23905336"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="23905336"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 23905336; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=23905336]").text(description); $(".js-view-count[data-work-id=23905336]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 23905336; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='23905336']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 23905336, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "5912bf83db3f564896921a3b6d6f6307" } } $('.js-work-strip[data-work-id=23905336]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":23905336,"title":"Functional MRI Representational Similarity Analysis Reveals a Dissociation between Discriminative and Relative Location Information in the Human Visual System","translated_title":"","metadata":{"abstract":"Neural responses in visual cortex are governed by a topographic mapping from retinal locations to cortical responses. Moreover, at the voxel population level early visual cortex (EVC) activity enables accurate decoding of stimuli locations. However, in many cases information enabling one to discriminate between locations (i.e., discriminative information) may be less relevant than information regarding the relative location of two objects (i.e., relative information). For example, when planning to grab a cup, determining whether the cup is located at the same retinal location as the hand is hardly relevant, whereas the location of the cup relative to the hand is crucial for performing the action. We have previously used multivariate pattern analysis techniques to measure discriminative location information, and found the highest levels in EVC, in line with other studies. Here we show, using representational similarity analysis, that availability of discriminative information in fMRI activation patterns does not entail availability of relative information. Specifically, we find that relative location information can be reliably extracted from activity patterns in posterior intraparietal sulcus (pIPS), but not from EVC, where we find the spatial representation to be warped. We further show that this variability in relative information levels between regions can be explained by a computational model based on an array of receptive fields. Moreover, when the model's receptive fields are extended to include inhibitory surround regions, the model can account for the spatial warping in EVC. These results demonstrate how size and shape properties of receptive fields in human visual cortex contribute to the transformation of discriminative spatial representations into relative spatial representations along the visual stream."},"translated_abstract":"Neural responses in visual cortex are governed by a topographic mapping from retinal locations to cortical responses. Moreover, at the voxel population level early visual cortex (EVC) activity enables accurate decoding of stimuli locations. However, in many cases information enabling one to discriminate between locations (i.e., discriminative information) may be less relevant than information regarding the relative location of two objects (i.e., relative information). For example, when planning to grab a cup, determining whether the cup is located at the same retinal location as the hand is hardly relevant, whereas the location of the cup relative to the hand is crucial for performing the action. We have previously used multivariate pattern analysis techniques to measure discriminative location information, and found the highest levels in EVC, in line with other studies. Here we show, using representational similarity analysis, that availability of discriminative information in fMRI activation patterns does not entail availability of relative information. Specifically, we find that relative location information can be reliably extracted from activity patterns in posterior intraparietal sulcus (pIPS), but not from EVC, where we find the spatial representation to be warped. We further show that this variability in relative information levels between regions can be explained by a computational model based on an array of receptive fields. Moreover, when the model's receptive fields are extended to include inhibitory surround regions, the model can account for the spatial warping in EVC. These results demonstrate how size and shape properties of receptive fields in human visual cortex contribute to the transformation of discriminative spatial representations into relative spatial representations along the visual stream.","internal_url":"https://www.academia.edu/23905336/Functional_MRI_Representational_Similarity_Analysis_Reveals_a_Dissociation_between_Discriminative_and_Relative_Location_Information_in_the_Human_Visual_System","translated_internal_url":"","created_at":"2016-04-01T06:09:14.291-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":32986989,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":44294062,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/44294062/thumbnails/1.jpg","file_name":"Roth_2016_Frontiers.pdf","download_url":"https://www.academia.edu/attachments/44294062/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Functional_MRI_Representational_Similari.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/44294062/Roth_2016_Frontiers-libre.pdf?1459516941=\u0026response-content-disposition=attachment%3B+filename%3DFunctional_MRI_Representational_Similari.pdf\u0026Expires=1732757870\u0026Signature=eRP-8MBKuvTWOae1ivqdZozjU6WwtTsiNzYliM~9ZmV4JoE6qQaHIujBnJ9Ao0A1L08fWVPUlAH7ZB-Pa~nlGcndmg4kR2seapEhNK2zpNMc77knbom5g9xPWG-l~fA8AGQePRTDd0RASgakUu14zckfyzpIgwJGspeGONqbvF8-erZE2hcmnSl01ICB2T6wZHFUgpi4FrIuTrVbOLOrlAYgu0PlofJLNBzSEEi-38OD89UhUO6Hyi5~5J88sR9NKibIU~IzC4xN9Cz4tv3T-upNIujkXDDOWRsQTtmQ0oznCXdYS3W2j8Qp81iCx1hIOvnxhX-1WkmPyDiLmKKFUw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Functional_MRI_Representational_Similarity_Analysis_Reveals_a_Dissociation_between_Discriminative_and_Relative_Location_Information_in_the_Human_Visual_System","translated_slug":"","page_count":16,"language":"en","content_type":"Work","owner":{"id":32986989,"first_name":"Zvi","middle_initials":"N","last_name":"Roth","page_name":"ZviRoth","domain_name":"huji","created_at":"2015-07-11T10:51:32.782-07:00","display_name":"Zvi N Roth","url":"https://huji.academia.edu/ZviRoth"},"attachments":[{"id":44294062,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/44294062/thumbnails/1.jpg","file_name":"Roth_2016_Frontiers.pdf","download_url":"https://www.academia.edu/attachments/44294062/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Functional_MRI_Representational_Similari.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/44294062/Roth_2016_Frontiers-libre.pdf?1459516941=\u0026response-content-disposition=attachment%3B+filename%3DFunctional_MRI_Representational_Similari.pdf\u0026Expires=1732757870\u0026Signature=eRP-8MBKuvTWOae1ivqdZozjU6WwtTsiNzYliM~9ZmV4JoE6qQaHIujBnJ9Ao0A1L08fWVPUlAH7ZB-Pa~nlGcndmg4kR2seapEhNK2zpNMc77knbom5g9xPWG-l~fA8AGQePRTDd0RASgakUu14zckfyzpIgwJGspeGONqbvF8-erZE2hcmnSl01ICB2T6wZHFUgpi4FrIuTrVbOLOrlAYgu0PlofJLNBzSEEi-38OD89UhUO6Hyi5~5J88sR9NKibIU~IzC4xN9Cz4tv3T-upNIujkXDDOWRsQTtmQ0oznCXdYS3W2j8Qp81iCx1hIOvnxhX-1WkmPyDiLmKKFUw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":4269,"name":"Multidimensional Scaling","url":"https://www.academia.edu/Documents/in/Multidimensional_Scaling"},{"id":29917,"name":"FMRI","url":"https://www.academia.edu/Documents/in/FMRI"},{"id":320260,"name":"MVPA","url":"https://www.academia.edu/Documents/in/MVPA"},{"id":491655,"name":"Multivariate Pattern Analysis","url":"https://www.academia.edu/Documents/in/Multivariate_Pattern_Analysis"},{"id":907492,"name":"Relative Location","url":"https://www.academia.edu/Documents/in/Relative_Location"},{"id":1952604,"name":"Representational Similarity Analysis","url":"https://www.academia.edu/Documents/in/Representational_Similarity_Analysis"}],"urls":[{"id":6966280,"url":"http://journal.frontiersin.org/article/10.3389/fnint.2016.00016/full"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="15195393"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/15195393/Position_Invariance_and_Object_Affordances_in_Human_Parietal_Cortex"><img alt="Research paper thumbnail of Position Invariance and Object Affordances in Human Parietal Cortex" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/15195393/Position_Invariance_and_Object_Affordances_in_Human_Parietal_Cortex">Position Invariance and Object Affordances in Human Parietal Cortex</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/EhudZohary">Ehud Zohary</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/ZviRoth">Zvi N Roth</a></span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="15195393"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="15195393"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 15195393; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=15195393]").text(description); $(".js-view-count[data-work-id=15195393]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 15195393; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='15195393']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 15195393, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=15195393]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":15195393,"title":"Position Invariance and Object Affordances in Human Parietal Cortex","translated_title":"","metadata":{},"translated_abstract":null,"internal_url":"https://www.academia.edu/15195393/Position_Invariance_and_Object_Affordances_in_Human_Parietal_Cortex","translated_internal_url":"","created_at":"2015-08-26T07:02:33.925-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":34254952,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[{"id":5093555,"work_id":15195393,"tagging_user_id":34254952,"tagged_user_id":32986989,"co_author_invite_id":null,"email":"z***h@mail.huji.ac.il","affiliation":"The Hebrew University of Jerusalem","display_order":0,"name":"Zvi N Roth","title":"Position Invariance and Object Affordances in Human Parietal Cortex"}],"downloadable_attachments":[],"slug":"Position_Invariance_and_Object_Affordances_in_Human_Parietal_Cortex","translated_slug":"","page_count":null,"language":"en","content_type":"Work","owner":{"id":34254952,"first_name":"Ehud","middle_initials":null,"last_name":"Zohary","page_name":"EhudZohary","domain_name":"huji","created_at":"2015-08-26T07:01:35.779-07:00","display_name":"Ehud Zohary","url":"https://huji.academia.edu/EhudZohary"},"attachments":[],"research_interests":[],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="15195317"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/15195317/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex"><img alt="Research paper thumbnail of Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex" class="work-thumbnail" src="https://attachments.academia-assets.com/43448198/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/15195317/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex">Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/EhudZohary">Ehud Zohary</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/ZviRoth">Zvi N Roth</a></span></div><div class="wp-workCard_item"><span>Cerebral Cortex</span><span>, 2014</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="a010c0192c619af200cbdfcd30a99438" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:43448198,&quot;asset_id&quot;:15195317,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/43448198/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="15195317"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="15195317"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 15195317; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=15195317]").text(description); $(".js-view-count[data-work-id=15195317]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 15195317; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='15195317']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 15195317, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "a010c0192c619af200cbdfcd30a99438" } } $('.js-work-strip[data-work-id=15195317]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":15195317,"title":"Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex","translated_title":"","metadata":{"publication_date":{"day":null,"month":null,"year":2014,"errors":{}},"publication_name":"Cerebral Cortex"},"translated_abstract":null,"internal_url":"https://www.academia.edu/15195317/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex","translated_internal_url":"","created_at":"2015-08-26T07:02:22.143-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":34254952,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[{"id":5093552,"work_id":15195317,"tagging_user_id":34254952,"tagged_user_id":32986989,"co_author_invite_id":null,"email":"z***h@mail.huji.ac.il","affiliation":"The Hebrew University of Jerusalem","display_order":0,"name":"Zvi N Roth","title":"Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex"},{"id":5253488,"work_id":15195317,"tagging_user_id":32986989,"tagged_user_id":33143101,"co_author_invite_id":null,"email":"u***z@cc.huji.ac.il","display_order":4194304,"name":"Udi Zohary","title":"Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex"}],"downloadable_attachments":[{"id":43448198,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/43448198/thumbnails/1.jpg","file_name":"Fingerprints_of_Learned_Object_Recogniti20160306-26607-12138u.pdf","download_url":"https://www.academia.edu/attachments/43448198/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fingerprints_of_Learned_Object_Recogniti.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/43448198/Fingerprints_of_Learned_Object_Recogniti20160306-26607-12138u-libre.pdf?1457336575=\u0026response-content-disposition=attachment%3B+filename%3DFingerprints_of_Learned_Object_Recogniti.pdf\u0026Expires=1732757870\u0026Signature=Wq8y56NJU-QHRzM3ZyNRudtaHRa3ueHh~1YgpGxRawJBFsRwak5~Ahr0Qs06Vow5Qr3J2UjKI0V-rg~CHKbRNrz~1L1m9RMFmv0zCs790fbuoow2etDEM2eOYaL90RxEUD9ZvQ6pKffKU1Risn5yQuXv9t6ECY4dlpukhDLQgFhpZuY26xEHm~TpHjnAR2rrzboMgvNVXd27Aad~hXXxP4U6UDzMKec6F9XMgoLQaw0OAuiSQ2Dm5rRdlFwVn5p6d9kQUi8gIFZd5Ffi45e8rzHGqRDmNdBYt85fWGbO8k8KBN1JmbUsB1N8jqrUjWDCGtPe-XTwlEt-Z~UDkLxm0Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex","translated_slug":"","page_count":13,"language":"en","content_type":"Work","owner":{"id":34254952,"first_name":"Ehud","middle_initials":null,"last_name":"Zohary","page_name":"EhudZohary","domain_name":"huji","created_at":"2015-08-26T07:01:35.779-07:00","display_name":"Ehud Zohary","url":"https://huji.academia.edu/EhudZohary"},"attachments":[{"id":43448198,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/43448198/thumbnails/1.jpg","file_name":"Fingerprints_of_Learned_Object_Recogniti20160306-26607-12138u.pdf","download_url":"https://www.academia.edu/attachments/43448198/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fingerprints_of_Learned_Object_Recogniti.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/43448198/Fingerprints_of_Learned_Object_Recogniti20160306-26607-12138u-libre.pdf?1457336575=\u0026response-content-disposition=attachment%3B+filename%3DFingerprints_of_Learned_Object_Recogniti.pdf\u0026Expires=1732757870\u0026Signature=Wq8y56NJU-QHRzM3ZyNRudtaHRa3ueHh~1YgpGxRawJBFsRwak5~Ahr0Qs06Vow5Qr3J2UjKI0V-rg~CHKbRNrz~1L1m9RMFmv0zCs790fbuoow2etDEM2eOYaL90RxEUD9ZvQ6pKffKU1Risn5yQuXv9t6ECY4dlpukhDLQgFhpZuY26xEHm~TpHjnAR2rrzboMgvNVXd27Aad~hXXxP4U6UDzMKec6F9XMgoLQaw0OAuiSQ2Dm5rRdlFwVn5p6d9kQUi8gIFZd5Ffi45e8rzHGqRDmNdBYt85fWGbO8k8KBN1JmbUsB1N8jqrUjWDCGtPe-XTwlEt-Z~UDkLxm0Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":161,"name":"Neuroscience","url":"https://www.academia.edu/Documents/in/Neuroscience"},{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science"},{"id":29917,"name":"FMRI","url":"https://www.academia.edu/Documents/in/FMRI"},{"id":49962,"name":"Visual Cortex","url":"https://www.academia.edu/Documents/in/Visual_Cortex"},{"id":78467,"name":"Cerebral Cortex","url":"https://www.academia.edu/Documents/in/Cerebral_Cortex"},{"id":312130,"name":"Multivoxel Pattern Analysis","url":"https://www.academia.edu/Documents/in/Multivoxel_Pattern_Analysis"},{"id":1239755,"name":"Neurosciences","url":"https://www.academia.edu/Documents/in/Neurosciences"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="15062995"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/15062995/Position_and_Identity_Information_Available_in_fMRI_Patterns_of_Activity_in_Human_Visual_Cortex"><img alt="Research paper thumbnail of Position and Identity Information Available in fMRI Patterns of Activity in Human Visual Cortex" class="work-thumbnail" src="https://attachments.academia-assets.com/38529000/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/15062995/Position_and_Identity_Information_Available_in_fMRI_Patterns_of_Activity_in_Human_Visual_Cortex">Position and Identity Information Available in fMRI Patterns of Activity in Human Visual Cortex</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/EhudZohary">Ehud Zohary</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/ZviRoth">Zvi N Roth</a></span></div><div class="wp-workCard_item"><span>The Journal of Neuroscience</span><span>, Aug 19, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Parietal cortex is often implicated in visual processing of actions. Action understanding is esse...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Parietal cortex is often implicated in visual processing of actions. Action understanding is essentially abstract, specific to the type or goal of action, but greatly independent of variations in the perceived position of the action. If certain parietal regions are involved in action understanding, then we expect them to show these generalization and selectivity properties. However, additional functions of parietal cortex, such as self-action control, may impose other demands by requiring an accurate representation of the location of graspable objects. Therefore, the dimensions along which responses are modulated may indicate the functional role of specific parietal regions. Here, we studied the degree of position invariance and hand/object specificity during viewing of tool-grasping actions. To that end, we characterize the information available about location, hand, and tool identity in the patterns of fMRI activation in various cortical areas: early visual cortex, posterior intraparietal sulcus, anterior superior parietal lobule, and the ventral object-specific lateral occipital complex. Our results suggest a gradient within the human dorsal stream: along the posterior–anterior axis, position information is gradually lost, whereas hand and tool identity information is enhanced. This may reflect a gradual transformation of visual input from an initial retinotopic representation in early visual areas to an abstract, position-invariant representation of viewed action in anterior parietal cortex.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="87a74f002103aafcf60c342b149272d0" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:38529000,&quot;asset_id&quot;:15062995,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/38529000/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="15062995"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="15062995"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 15062995; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=15062995]").text(description); $(".js-view-count[data-work-id=15062995]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 15062995; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='15062995']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 15062995, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "87a74f002103aafcf60c342b149272d0" } } $('.js-work-strip[data-work-id=15062995]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":15062995,"title":"Position and Identity Information Available in fMRI Patterns of Activity in Human Visual Cortex","translated_title":"","metadata":{"abstract":"Parietal cortex is often implicated in visual processing of actions. Action understanding is essentially abstract, specific to the type or goal of action, but greatly independent of variations in the perceived position of the action. If certain parietal regions are involved in action understanding, then we expect them to show these generalization and selectivity properties. However, additional functions of parietal cortex, such as self-action control, may impose other demands by requiring an accurate representation of the location of graspable objects. Therefore, the dimensions along which responses are modulated may indicate the functional role of specific parietal regions. Here, we studied the degree of position invariance and hand/object specificity during viewing of tool-grasping actions. To that end, we characterize the information available about location, hand, and tool identity in the patterns of fMRI activation in various cortical areas: early visual cortex, posterior intraparietal sulcus, anterior superior parietal lobule, and the ventral object-specific lateral occipital complex. Our results suggest a gradient within the human dorsal stream: along the posterior–anterior axis, position information is gradually lost, whereas hand and tool identity information is enhanced. This may reflect a gradual transformation of visual input from an initial retinotopic representation in early visual areas to an abstract, position-invariant representation of viewed action in anterior parietal cortex.","publication_date":{"day":19,"month":8,"year":2015,"errors":{}},"publication_name":"The Journal of Neuroscience"},"translated_abstract":"Parietal cortex is often implicated in visual processing of actions. Action understanding is essentially abstract, specific to the type or goal of action, but greatly independent of variations in the perceived position of the action. If certain parietal regions are involved in action understanding, then we expect them to show these generalization and selectivity properties. However, additional functions of parietal cortex, such as self-action control, may impose other demands by requiring an accurate representation of the location of graspable objects. Therefore, the dimensions along which responses are modulated may indicate the functional role of specific parietal regions. Here, we studied the degree of position invariance and hand/object specificity during viewing of tool-grasping actions. To that end, we characterize the information available about location, hand, and tool identity in the patterns of fMRI activation in various cortical areas: early visual cortex, posterior intraparietal sulcus, anterior superior parietal lobule, and the ventral object-specific lateral occipital complex. Our results suggest a gradient within the human dorsal stream: along the posterior–anterior axis, position information is gradually lost, whereas hand and tool identity information is enhanced. This may reflect a gradual transformation of visual input from an initial retinotopic representation in early visual areas to an abstract, position-invariant representation of viewed action in anterior parietal cortex.","internal_url":"https://www.academia.edu/15062995/Position_and_Identity_Information_Available_in_fMRI_Patterns_of_Activity_in_Human_Visual_Cortex","translated_internal_url":"","created_at":"2015-08-20T15:44:07.044-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":32986989,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[{"id":4870832,"work_id":15062995,"tagging_user_id":32986989,"tagged_user_id":34254952,"co_author_invite_id":1107324,"email":"u***z@mail.huji.ac.il","affiliation":"The Hebrew University of Jerusalem","display_order":-1,"name":"Ehud Zohary","title":"Position and Identity Information Available in fMRI Patterns of Activity in Human Visual Cortex"}],"downloadable_attachments":[{"id":38529000,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/38529000/thumbnails/1.jpg","file_name":"Roth___Zohary_2015_JNS.pdf","download_url":"https://www.academia.edu/attachments/38529000/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Position_and_Identity_Information_Availa.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/38529000/Roth___Zohary_2015_JNS-libre.pdf?1440110539=\u0026response-content-disposition=attachment%3B+filename%3DPosition_and_Identity_Information_Availa.pdf\u0026Expires=1732757870\u0026Signature=aPAsyfEP~66se2TuJXT-gfYu-KdDGi-DIDiefmldThXXv-fE5lT2bW5xhTZXv51xXMP2CDH6yBhsSLtF1VHfOOR~-keTCsp0YHSaVXkCGA00RkGRyxFREyvvCdpaJQy1AwNkmNFemQqH5IwCeZS9anoqPoGo11hLChlmODb41Ktn6ELtFoqKDWNZpCNMsjm679K1GW9aSgg19RtbAMUyGnFrLA4d3Y8MtMCnTCPpSHk67saeVz7Tq~Ez8HLUx8l7btcKIpP5STEIuzxAcxmyeY3EUnXfDDeWT8GnMbLnDvbH2gA2eF8HOT3j8e9FF24CYXt~1qnyrNrrjJTgxbZRgw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Position_and_Identity_Information_Available_in_fMRI_Patterns_of_Activity_in_Human_Visual_Cortex","translated_slug":"","page_count":13,"language":"en","content_type":"Work","owner":{"id":32986989,"first_name":"Zvi","middle_initials":"N","last_name":"Roth","page_name":"ZviRoth","domain_name":"huji","created_at":"2015-07-11T10:51:32.782-07:00","display_name":"Zvi N Roth","url":"https://huji.academia.edu/ZviRoth"},"attachments":[{"id":38529000,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/38529000/thumbnails/1.jpg","file_name":"Roth___Zohary_2015_JNS.pdf","download_url":"https://www.academia.edu/attachments/38529000/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Position_and_Identity_Information_Availa.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/38529000/Roth___Zohary_2015_JNS-libre.pdf?1440110539=\u0026response-content-disposition=attachment%3B+filename%3DPosition_and_Identity_Information_Availa.pdf\u0026Expires=1732757870\u0026Signature=aPAsyfEP~66se2TuJXT-gfYu-KdDGi-DIDiefmldThXXv-fE5lT2bW5xhTZXv51xXMP2CDH6yBhsSLtF1VHfOOR~-keTCsp0YHSaVXkCGA00RkGRyxFREyvvCdpaJQy1AwNkmNFemQqH5IwCeZS9anoqPoGo11hLChlmODb41Ktn6ELtFoqKDWNZpCNMsjm679K1GW9aSgg19RtbAMUyGnFrLA4d3Y8MtMCnTCPpSHk67saeVz7Tq~Ez8HLUx8l7btcKIpP5STEIuzxAcxmyeY3EUnXfDDeWT8GnMbLnDvbH2gA2eF8HOT3j8e9FF24CYXt~1qnyrNrrjJTgxbZRgw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":2229,"name":"Vision Science","url":"https://www.academia.edu/Documents/in/Vision_Science"},{"id":29917,"name":"FMRI","url":"https://www.academia.edu/Documents/in/FMRI"},{"id":300467,"name":"Brain Decoding","url":"https://www.academia.edu/Documents/in/Brain_Decoding"},{"id":320260,"name":"MVPA","url":"https://www.academia.edu/Documents/in/MVPA"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="13925582"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/13925582/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex"><img alt="Research paper thumbnail of Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex" class="work-thumbnail" src="https://attachments.academia-assets.com/38160688/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/13925582/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex">Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/ZviRoth">Zvi N Roth</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/UdiZohary">Udi Zohary</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">One feature of visual processing in the ventral stream is that cortical responses gradually depar...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">One feature of visual processing in the ventral stream is that cortical responses gradually depart from the physical aspects of the visual stimulus and become correlated with perceptual experience. Thus, unlike early retinotopic areas, the responses in the object-related lateral occipital complex (LOC) are typically immune to parameter changes (e.g., contrast, location, etc.) when these do not affect recognition. Here, we use a complementary approach to highlight changes in brain activity following a shift in the perceptual state (in the absence of any alteration in the physical image). Specifically, we focus on LOC and early visual cortex (EVC) and compare their functional magnetic resonance imaging (fMRI) responses to degraded object images, before and after fast perceptual learning that renders initially unrecognized objects identifiable. Using 3 complementary analyses, we find that, in LOC, unlike EVC, learned recognition is associated with a change in the multivoxel response pattern to degraded object images, such that the response becomes significantly more correlated with that evoked by the intact version of the same image. This provides further evidence that the coding in LOC reflects the recognition of visual objects.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="bd3f6ba0723fd5293ef92aa66a435734" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:38160688,&quot;asset_id&quot;:13925582,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/38160688/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="13925582"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="13925582"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 13925582; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=13925582]").text(description); $(".js-view-count[data-work-id=13925582]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 13925582; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='13925582']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 13925582, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "bd3f6ba0723fd5293ef92aa66a435734" } } $('.js-work-strip[data-work-id=13925582]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":13925582,"title":"Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex","translated_title":"","metadata":{"abstract":"One feature of visual processing in the ventral stream is that cortical responses gradually depart from the physical aspects of the visual stimulus and become correlated with perceptual experience. Thus, unlike early retinotopic areas, the responses in the object-related lateral occipital complex (LOC) are typically immune to parameter changes (e.g., contrast, location, etc.) when these do not affect recognition. Here, we use a complementary approach to highlight changes in brain activity following a shift in the perceptual state (in the absence of any alteration in the physical image). Specifically, we focus on LOC and early visual cortex (EVC) and compare their functional magnetic resonance imaging (fMRI) responses to degraded object images, before and after fast perceptual learning that renders initially unrecognized objects identifiable. Using 3 complementary analyses, we find that, in LOC, unlike EVC, learned recognition is associated with a change in the multivoxel response pattern to degraded object images, such that the response becomes significantly more correlated with that evoked by the intact version of the same image. This provides further evidence that the coding in LOC reflects the recognition of visual objects."},"translated_abstract":"One feature of visual processing in the ventral stream is that cortical responses gradually depart from the physical aspects of the visual stimulus and become correlated with perceptual experience. Thus, unlike early retinotopic areas, the responses in the object-related lateral occipital complex (LOC) are typically immune to parameter changes (e.g., contrast, location, etc.) when these do not affect recognition. Here, we use a complementary approach to highlight changes in brain activity following a shift in the perceptual state (in the absence of any alteration in the physical image). Specifically, we focus on LOC and early visual cortex (EVC) and compare their functional magnetic resonance imaging (fMRI) responses to degraded object images, before and after fast perceptual learning that renders initially unrecognized objects identifiable. Using 3 complementary analyses, we find that, in LOC, unlike EVC, learned recognition is associated with a change in the multivoxel response pattern to degraded object images, such that the response becomes significantly more correlated with that evoked by the intact version of the same image. This provides further evidence that the coding in LOC reflects the recognition of visual objects.","internal_url":"https://www.academia.edu/13925582/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex","translated_internal_url":"","created_at":"2015-07-11T13:14:45.033-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":32986989,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[{"id":2891805,"work_id":13925582,"tagging_user_id":32986989,"tagged_user_id":33143101,"co_author_invite_id":745179,"email":"u***z@cc.huji.ac.il","display_order":4194304,"name":"Udi Zohary","title":"Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex"}],"downloadable_attachments":[{"id":38160688,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/38160688/thumbnails/1.jpg","file_name":"Cereb._Cortex-2014-Roth-cercor-bhu042.pdf","download_url":"https://www.academia.edu/attachments/38160688/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fingerprints_of_Learned_Object_Recogniti.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/38160688/Cereb._Cortex-2014-Roth-cercor-bhu042-libre.pdf?1436646182=\u0026response-content-disposition=attachment%3B+filename%3DFingerprints_of_Learned_Object_Recogniti.pdf\u0026Expires=1732757870\u0026Signature=UhTaunLWVKch2-NrCAfijHvm4IN-QEQL6T4OH2JuVmD81RGmRB2h3j4lJQq99Z22-16Sjn~t3-IapukJAJsSyEErUoPn4voNDTkGKlbVCn~egZINZfh80XLfDBQ6uA5~GH26lAU32iuz037AWZN-gC0tj8J2b~pD7qVGdch6SRKGhruAe~U9sNJ-C7XszqezI62trIH~jw37eDZD7lF3PK952hdSLPKUCURx1jnmssc4-MyFwMXhjHW3Zxv6UfyIeNUOarti4o7FgxhzAEtIVwRTEVfFMk4XqlaQBpgkpGM4rS5tzxnbZcRq~HFId3KgSPsn3ZM3tmNICLBEwFDF2g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex","translated_slug":"","page_count":13,"language":"en","content_type":"Work","owner":{"id":32986989,"first_name":"Zvi","middle_initials":"N","last_name":"Roth","page_name":"ZviRoth","domain_name":"huji","created_at":"2015-07-11T10:51:32.782-07:00","display_name":"Zvi N Roth","url":"https://huji.academia.edu/ZviRoth"},"attachments":[{"id":38160688,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/38160688/thumbnails/1.jpg","file_name":"Cereb._Cortex-2014-Roth-cercor-bhu042.pdf","download_url":"https://www.academia.edu/attachments/38160688/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fingerprints_of_Learned_Object_Recogniti.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/38160688/Cereb._Cortex-2014-Roth-cercor-bhu042-libre.pdf?1436646182=\u0026response-content-disposition=attachment%3B+filename%3DFingerprints_of_Learned_Object_Recogniti.pdf\u0026Expires=1732757870\u0026Signature=UhTaunLWVKch2-NrCAfijHvm4IN-QEQL6T4OH2JuVmD81RGmRB2h3j4lJQq99Z22-16Sjn~t3-IapukJAJsSyEErUoPn4voNDTkGKlbVCn~egZINZfh80XLfDBQ6uA5~GH26lAU32iuz037AWZN-gC0tj8J2b~pD7qVGdch6SRKGhruAe~U9sNJ-C7XszqezI62trIH~jw37eDZD7lF3PK952hdSLPKUCURx1jnmssc4-MyFwMXhjHW3Zxv6UfyIeNUOarti4o7FgxhzAEtIVwRTEVfFMk4XqlaQBpgkpGM4rS5tzxnbZcRq~HFId3KgSPsn3ZM3tmNICLBEwFDF2g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":161,"name":"Neuroscience","url":"https://www.academia.edu/Documents/in/Neuroscience"},{"id":29917,"name":"FMRI","url":"https://www.academia.edu/Documents/in/FMRI"},{"id":49962,"name":"Visual Cortex","url":"https://www.academia.edu/Documents/in/Visual_Cortex"},{"id":312130,"name":"Multivoxel Pattern Analysis","url":"https://www.academia.edu/Documents/in/Multivoxel_Pattern_Analysis"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="3200201" id="papers"><div class="js-work-strip profile--work_container" data-work-id="23905336"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/23905336/Functional_MRI_Representational_Similarity_Analysis_Reveals_a_Dissociation_between_Discriminative_and_Relative_Location_Information_in_the_Human_Visual_System"><img alt="Research paper thumbnail of Functional MRI Representational Similarity Analysis Reveals a Dissociation between Discriminative and Relative Location Information in the Human Visual System" class="work-thumbnail" src="https://attachments.academia-assets.com/44294062/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/23905336/Functional_MRI_Representational_Similarity_Analysis_Reveals_a_Dissociation_between_Discriminative_and_Relative_Location_Information_in_the_Human_Visual_System">Functional MRI Representational Similarity Analysis Reveals a Dissociation between Discriminative and Relative Location Information in the Human Visual System</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Neural responses in visual cortex are governed by a topographic mapping from retinal locations to...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Neural responses in visual cortex are governed by a topographic mapping from retinal locations to cortical responses. Moreover, at the voxel population level early visual cortex (EVC) activity enables accurate decoding of stimuli locations. However, in many cases information enabling one to discriminate between locations (i.e., discriminative information) may be less relevant than information regarding the relative location of two objects (i.e., relative information). For example, when planning to grab a cup, determining whether the cup is located at the same retinal location as the hand is hardly relevant, whereas the location of the cup relative to the hand is crucial for performing the action. We have previously used multivariate pattern analysis techniques to measure discriminative location information, and found the highest levels in EVC, in line with other studies. Here we show, using representational similarity analysis, that availability of discriminative information in fMRI activation patterns does not entail availability of relative information. Specifically, we find that relative location information can be reliably extracted from activity patterns in posterior intraparietal sulcus (pIPS), but not from EVC, where we find the spatial representation to be warped. We further show that this variability in relative information levels between regions can be explained by a computational model based on an array of receptive fields. Moreover, when the model&#39;s receptive fields are extended to include inhibitory surround regions, the model can account for the spatial warping in EVC. These results demonstrate how size and shape properties of receptive fields in human visual cortex contribute to the transformation of discriminative spatial representations into relative spatial representations along the visual stream.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="5912bf83db3f564896921a3b6d6f6307" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:44294062,&quot;asset_id&quot;:23905336,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/44294062/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="23905336"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="23905336"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 23905336; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=23905336]").text(description); $(".js-view-count[data-work-id=23905336]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 23905336; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='23905336']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 23905336, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "5912bf83db3f564896921a3b6d6f6307" } } $('.js-work-strip[data-work-id=23905336]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":23905336,"title":"Functional MRI Representational Similarity Analysis Reveals a Dissociation between Discriminative and Relative Location Information in the Human Visual System","translated_title":"","metadata":{"abstract":"Neural responses in visual cortex are governed by a topographic mapping from retinal locations to cortical responses. Moreover, at the voxel population level early visual cortex (EVC) activity enables accurate decoding of stimuli locations. However, in many cases information enabling one to discriminate between locations (i.e., discriminative information) may be less relevant than information regarding the relative location of two objects (i.e., relative information). For example, when planning to grab a cup, determining whether the cup is located at the same retinal location as the hand is hardly relevant, whereas the location of the cup relative to the hand is crucial for performing the action. We have previously used multivariate pattern analysis techniques to measure discriminative location information, and found the highest levels in EVC, in line with other studies. Here we show, using representational similarity analysis, that availability of discriminative information in fMRI activation patterns does not entail availability of relative information. Specifically, we find that relative location information can be reliably extracted from activity patterns in posterior intraparietal sulcus (pIPS), but not from EVC, where we find the spatial representation to be warped. We further show that this variability in relative information levels between regions can be explained by a computational model based on an array of receptive fields. Moreover, when the model's receptive fields are extended to include inhibitory surround regions, the model can account for the spatial warping in EVC. These results demonstrate how size and shape properties of receptive fields in human visual cortex contribute to the transformation of discriminative spatial representations into relative spatial representations along the visual stream."},"translated_abstract":"Neural responses in visual cortex are governed by a topographic mapping from retinal locations to cortical responses. Moreover, at the voxel population level early visual cortex (EVC) activity enables accurate decoding of stimuli locations. However, in many cases information enabling one to discriminate between locations (i.e., discriminative information) may be less relevant than information regarding the relative location of two objects (i.e., relative information). For example, when planning to grab a cup, determining whether the cup is located at the same retinal location as the hand is hardly relevant, whereas the location of the cup relative to the hand is crucial for performing the action. We have previously used multivariate pattern analysis techniques to measure discriminative location information, and found the highest levels in EVC, in line with other studies. Here we show, using representational similarity analysis, that availability of discriminative information in fMRI activation patterns does not entail availability of relative information. Specifically, we find that relative location information can be reliably extracted from activity patterns in posterior intraparietal sulcus (pIPS), but not from EVC, where we find the spatial representation to be warped. We further show that this variability in relative information levels between regions can be explained by a computational model based on an array of receptive fields. Moreover, when the model's receptive fields are extended to include inhibitory surround regions, the model can account for the spatial warping in EVC. These results demonstrate how size and shape properties of receptive fields in human visual cortex contribute to the transformation of discriminative spatial representations into relative spatial representations along the visual stream.","internal_url":"https://www.academia.edu/23905336/Functional_MRI_Representational_Similarity_Analysis_Reveals_a_Dissociation_between_Discriminative_and_Relative_Location_Information_in_the_Human_Visual_System","translated_internal_url":"","created_at":"2016-04-01T06:09:14.291-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":32986989,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":44294062,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/44294062/thumbnails/1.jpg","file_name":"Roth_2016_Frontiers.pdf","download_url":"https://www.academia.edu/attachments/44294062/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Functional_MRI_Representational_Similari.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/44294062/Roth_2016_Frontiers-libre.pdf?1459516941=\u0026response-content-disposition=attachment%3B+filename%3DFunctional_MRI_Representational_Similari.pdf\u0026Expires=1732757870\u0026Signature=eRP-8MBKuvTWOae1ivqdZozjU6WwtTsiNzYliM~9ZmV4JoE6qQaHIujBnJ9Ao0A1L08fWVPUlAH7ZB-Pa~nlGcndmg4kR2seapEhNK2zpNMc77knbom5g9xPWG-l~fA8AGQePRTDd0RASgakUu14zckfyzpIgwJGspeGONqbvF8-erZE2hcmnSl01ICB2T6wZHFUgpi4FrIuTrVbOLOrlAYgu0PlofJLNBzSEEi-38OD89UhUO6Hyi5~5J88sR9NKibIU~IzC4xN9Cz4tv3T-upNIujkXDDOWRsQTtmQ0oznCXdYS3W2j8Qp81iCx1hIOvnxhX-1WkmPyDiLmKKFUw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Functional_MRI_Representational_Similarity_Analysis_Reveals_a_Dissociation_between_Discriminative_and_Relative_Location_Information_in_the_Human_Visual_System","translated_slug":"","page_count":16,"language":"en","content_type":"Work","owner":{"id":32986989,"first_name":"Zvi","middle_initials":"N","last_name":"Roth","page_name":"ZviRoth","domain_name":"huji","created_at":"2015-07-11T10:51:32.782-07:00","display_name":"Zvi N Roth","url":"https://huji.academia.edu/ZviRoth"},"attachments":[{"id":44294062,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/44294062/thumbnails/1.jpg","file_name":"Roth_2016_Frontiers.pdf","download_url":"https://www.academia.edu/attachments/44294062/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Functional_MRI_Representational_Similari.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/44294062/Roth_2016_Frontiers-libre.pdf?1459516941=\u0026response-content-disposition=attachment%3B+filename%3DFunctional_MRI_Representational_Similari.pdf\u0026Expires=1732757870\u0026Signature=eRP-8MBKuvTWOae1ivqdZozjU6WwtTsiNzYliM~9ZmV4JoE6qQaHIujBnJ9Ao0A1L08fWVPUlAH7ZB-Pa~nlGcndmg4kR2seapEhNK2zpNMc77knbom5g9xPWG-l~fA8AGQePRTDd0RASgakUu14zckfyzpIgwJGspeGONqbvF8-erZE2hcmnSl01ICB2T6wZHFUgpi4FrIuTrVbOLOrlAYgu0PlofJLNBzSEEi-38OD89UhUO6Hyi5~5J88sR9NKibIU~IzC4xN9Cz4tv3T-upNIujkXDDOWRsQTtmQ0oznCXdYS3W2j8Qp81iCx1hIOvnxhX-1WkmPyDiLmKKFUw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":4269,"name":"Multidimensional Scaling","url":"https://www.academia.edu/Documents/in/Multidimensional_Scaling"},{"id":29917,"name":"FMRI","url":"https://www.academia.edu/Documents/in/FMRI"},{"id":320260,"name":"MVPA","url":"https://www.academia.edu/Documents/in/MVPA"},{"id":491655,"name":"Multivariate Pattern Analysis","url":"https://www.academia.edu/Documents/in/Multivariate_Pattern_Analysis"},{"id":907492,"name":"Relative Location","url":"https://www.academia.edu/Documents/in/Relative_Location"},{"id":1952604,"name":"Representational Similarity Analysis","url":"https://www.academia.edu/Documents/in/Representational_Similarity_Analysis"}],"urls":[{"id":6966280,"url":"http://journal.frontiersin.org/article/10.3389/fnint.2016.00016/full"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="15195393"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/15195393/Position_Invariance_and_Object_Affordances_in_Human_Parietal_Cortex"><img alt="Research paper thumbnail of Position Invariance and Object Affordances in Human Parietal Cortex" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/15195393/Position_Invariance_and_Object_Affordances_in_Human_Parietal_Cortex">Position Invariance and Object Affordances in Human Parietal Cortex</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/EhudZohary">Ehud Zohary</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/ZviRoth">Zvi N Roth</a></span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="15195393"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="15195393"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 15195393; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=15195393]").text(description); $(".js-view-count[data-work-id=15195393]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 15195393; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='15195393']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 15195393, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=15195393]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":15195393,"title":"Position Invariance and Object Affordances in Human Parietal Cortex","translated_title":"","metadata":{},"translated_abstract":null,"internal_url":"https://www.academia.edu/15195393/Position_Invariance_and_Object_Affordances_in_Human_Parietal_Cortex","translated_internal_url":"","created_at":"2015-08-26T07:02:33.925-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":34254952,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[{"id":5093555,"work_id":15195393,"tagging_user_id":34254952,"tagged_user_id":32986989,"co_author_invite_id":null,"email":"z***h@mail.huji.ac.il","affiliation":"The Hebrew University of Jerusalem","display_order":0,"name":"Zvi N Roth","title":"Position Invariance and Object Affordances in Human Parietal Cortex"}],"downloadable_attachments":[],"slug":"Position_Invariance_and_Object_Affordances_in_Human_Parietal_Cortex","translated_slug":"","page_count":null,"language":"en","content_type":"Work","owner":{"id":34254952,"first_name":"Ehud","middle_initials":null,"last_name":"Zohary","page_name":"EhudZohary","domain_name":"huji","created_at":"2015-08-26T07:01:35.779-07:00","display_name":"Ehud Zohary","url":"https://huji.academia.edu/EhudZohary"},"attachments":[],"research_interests":[],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="15195317"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/15195317/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex"><img alt="Research paper thumbnail of Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex" class="work-thumbnail" src="https://attachments.academia-assets.com/43448198/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/15195317/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex">Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/EhudZohary">Ehud Zohary</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/ZviRoth">Zvi N Roth</a></span></div><div class="wp-workCard_item"><span>Cerebral Cortex</span><span>, 2014</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="a010c0192c619af200cbdfcd30a99438" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:43448198,&quot;asset_id&quot;:15195317,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/43448198/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="15195317"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="15195317"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 15195317; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=15195317]").text(description); $(".js-view-count[data-work-id=15195317]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 15195317; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='15195317']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 15195317, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "a010c0192c619af200cbdfcd30a99438" } } $('.js-work-strip[data-work-id=15195317]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":15195317,"title":"Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex","translated_title":"","metadata":{"publication_date":{"day":null,"month":null,"year":2014,"errors":{}},"publication_name":"Cerebral Cortex"},"translated_abstract":null,"internal_url":"https://www.academia.edu/15195317/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex","translated_internal_url":"","created_at":"2015-08-26T07:02:22.143-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":34254952,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[{"id":5093552,"work_id":15195317,"tagging_user_id":34254952,"tagged_user_id":32986989,"co_author_invite_id":null,"email":"z***h@mail.huji.ac.il","affiliation":"The Hebrew University of Jerusalem","display_order":0,"name":"Zvi N Roth","title":"Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex"},{"id":5253488,"work_id":15195317,"tagging_user_id":32986989,"tagged_user_id":33143101,"co_author_invite_id":null,"email":"u***z@cc.huji.ac.il","display_order":4194304,"name":"Udi Zohary","title":"Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex"}],"downloadable_attachments":[{"id":43448198,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/43448198/thumbnails/1.jpg","file_name":"Fingerprints_of_Learned_Object_Recogniti20160306-26607-12138u.pdf","download_url":"https://www.academia.edu/attachments/43448198/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fingerprints_of_Learned_Object_Recogniti.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/43448198/Fingerprints_of_Learned_Object_Recogniti20160306-26607-12138u-libre.pdf?1457336575=\u0026response-content-disposition=attachment%3B+filename%3DFingerprints_of_Learned_Object_Recogniti.pdf\u0026Expires=1732757870\u0026Signature=Wq8y56NJU-QHRzM3ZyNRudtaHRa3ueHh~1YgpGxRawJBFsRwak5~Ahr0Qs06Vow5Qr3J2UjKI0V-rg~CHKbRNrz~1L1m9RMFmv0zCs790fbuoow2etDEM2eOYaL90RxEUD9ZvQ6pKffKU1Risn5yQuXv9t6ECY4dlpukhDLQgFhpZuY26xEHm~TpHjnAR2rrzboMgvNVXd27Aad~hXXxP4U6UDzMKec6F9XMgoLQaw0OAuiSQ2Dm5rRdlFwVn5p6d9kQUi8gIFZd5Ffi45e8rzHGqRDmNdBYt85fWGbO8k8KBN1JmbUsB1N8jqrUjWDCGtPe-XTwlEt-Z~UDkLxm0Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex","translated_slug":"","page_count":13,"language":"en","content_type":"Work","owner":{"id":34254952,"first_name":"Ehud","middle_initials":null,"last_name":"Zohary","page_name":"EhudZohary","domain_name":"huji","created_at":"2015-08-26T07:01:35.779-07:00","display_name":"Ehud Zohary","url":"https://huji.academia.edu/EhudZohary"},"attachments":[{"id":43448198,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/43448198/thumbnails/1.jpg","file_name":"Fingerprints_of_Learned_Object_Recogniti20160306-26607-12138u.pdf","download_url":"https://www.academia.edu/attachments/43448198/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fingerprints_of_Learned_Object_Recogniti.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/43448198/Fingerprints_of_Learned_Object_Recogniti20160306-26607-12138u-libre.pdf?1457336575=\u0026response-content-disposition=attachment%3B+filename%3DFingerprints_of_Learned_Object_Recogniti.pdf\u0026Expires=1732757870\u0026Signature=Wq8y56NJU-QHRzM3ZyNRudtaHRa3ueHh~1YgpGxRawJBFsRwak5~Ahr0Qs06Vow5Qr3J2UjKI0V-rg~CHKbRNrz~1L1m9RMFmv0zCs790fbuoow2etDEM2eOYaL90RxEUD9ZvQ6pKffKU1Risn5yQuXv9t6ECY4dlpukhDLQgFhpZuY26xEHm~TpHjnAR2rrzboMgvNVXd27Aad~hXXxP4U6UDzMKec6F9XMgoLQaw0OAuiSQ2Dm5rRdlFwVn5p6d9kQUi8gIFZd5Ffi45e8rzHGqRDmNdBYt85fWGbO8k8KBN1JmbUsB1N8jqrUjWDCGtPe-XTwlEt-Z~UDkLxm0Q__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":161,"name":"Neuroscience","url":"https://www.academia.edu/Documents/in/Neuroscience"},{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science"},{"id":29917,"name":"FMRI","url":"https://www.academia.edu/Documents/in/FMRI"},{"id":49962,"name":"Visual Cortex","url":"https://www.academia.edu/Documents/in/Visual_Cortex"},{"id":78467,"name":"Cerebral Cortex","url":"https://www.academia.edu/Documents/in/Cerebral_Cortex"},{"id":312130,"name":"Multivoxel Pattern Analysis","url":"https://www.academia.edu/Documents/in/Multivoxel_Pattern_Analysis"},{"id":1239755,"name":"Neurosciences","url":"https://www.academia.edu/Documents/in/Neurosciences"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="15062995"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/15062995/Position_and_Identity_Information_Available_in_fMRI_Patterns_of_Activity_in_Human_Visual_Cortex"><img alt="Research paper thumbnail of Position and Identity Information Available in fMRI Patterns of Activity in Human Visual Cortex" class="work-thumbnail" src="https://attachments.academia-assets.com/38529000/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/15062995/Position_and_Identity_Information_Available_in_fMRI_Patterns_of_Activity_in_Human_Visual_Cortex">Position and Identity Information Available in fMRI Patterns of Activity in Human Visual Cortex</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/EhudZohary">Ehud Zohary</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/ZviRoth">Zvi N Roth</a></span></div><div class="wp-workCard_item"><span>The Journal of Neuroscience</span><span>, Aug 19, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Parietal cortex is often implicated in visual processing of actions. Action understanding is esse...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Parietal cortex is often implicated in visual processing of actions. Action understanding is essentially abstract, specific to the type or goal of action, but greatly independent of variations in the perceived position of the action. If certain parietal regions are involved in action understanding, then we expect them to show these generalization and selectivity properties. However, additional functions of parietal cortex, such as self-action control, may impose other demands by requiring an accurate representation of the location of graspable objects. Therefore, the dimensions along which responses are modulated may indicate the functional role of specific parietal regions. Here, we studied the degree of position invariance and hand/object specificity during viewing of tool-grasping actions. To that end, we characterize the information available about location, hand, and tool identity in the patterns of fMRI activation in various cortical areas: early visual cortex, posterior intraparietal sulcus, anterior superior parietal lobule, and the ventral object-specific lateral occipital complex. Our results suggest a gradient within the human dorsal stream: along the posterior–anterior axis, position information is gradually lost, whereas hand and tool identity information is enhanced. This may reflect a gradual transformation of visual input from an initial retinotopic representation in early visual areas to an abstract, position-invariant representation of viewed action in anterior parietal cortex.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="87a74f002103aafcf60c342b149272d0" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:38529000,&quot;asset_id&quot;:15062995,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/38529000/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="15062995"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="15062995"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 15062995; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=15062995]").text(description); $(".js-view-count[data-work-id=15062995]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 15062995; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='15062995']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 15062995, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "87a74f002103aafcf60c342b149272d0" } } $('.js-work-strip[data-work-id=15062995]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":15062995,"title":"Position and Identity Information Available in fMRI Patterns of Activity in Human Visual Cortex","translated_title":"","metadata":{"abstract":"Parietal cortex is often implicated in visual processing of actions. Action understanding is essentially abstract, specific to the type or goal of action, but greatly independent of variations in the perceived position of the action. If certain parietal regions are involved in action understanding, then we expect them to show these generalization and selectivity properties. However, additional functions of parietal cortex, such as self-action control, may impose other demands by requiring an accurate representation of the location of graspable objects. Therefore, the dimensions along which responses are modulated may indicate the functional role of specific parietal regions. Here, we studied the degree of position invariance and hand/object specificity during viewing of tool-grasping actions. To that end, we characterize the information available about location, hand, and tool identity in the patterns of fMRI activation in various cortical areas: early visual cortex, posterior intraparietal sulcus, anterior superior parietal lobule, and the ventral object-specific lateral occipital complex. Our results suggest a gradient within the human dorsal stream: along the posterior–anterior axis, position information is gradually lost, whereas hand and tool identity information is enhanced. This may reflect a gradual transformation of visual input from an initial retinotopic representation in early visual areas to an abstract, position-invariant representation of viewed action in anterior parietal cortex.","publication_date":{"day":19,"month":8,"year":2015,"errors":{}},"publication_name":"The Journal of Neuroscience"},"translated_abstract":"Parietal cortex is often implicated in visual processing of actions. Action understanding is essentially abstract, specific to the type or goal of action, but greatly independent of variations in the perceived position of the action. If certain parietal regions are involved in action understanding, then we expect them to show these generalization and selectivity properties. However, additional functions of parietal cortex, such as self-action control, may impose other demands by requiring an accurate representation of the location of graspable objects. Therefore, the dimensions along which responses are modulated may indicate the functional role of specific parietal regions. Here, we studied the degree of position invariance and hand/object specificity during viewing of tool-grasping actions. To that end, we characterize the information available about location, hand, and tool identity in the patterns of fMRI activation in various cortical areas: early visual cortex, posterior intraparietal sulcus, anterior superior parietal lobule, and the ventral object-specific lateral occipital complex. Our results suggest a gradient within the human dorsal stream: along the posterior–anterior axis, position information is gradually lost, whereas hand and tool identity information is enhanced. This may reflect a gradual transformation of visual input from an initial retinotopic representation in early visual areas to an abstract, position-invariant representation of viewed action in anterior parietal cortex.","internal_url":"https://www.academia.edu/15062995/Position_and_Identity_Information_Available_in_fMRI_Patterns_of_Activity_in_Human_Visual_Cortex","translated_internal_url":"","created_at":"2015-08-20T15:44:07.044-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":32986989,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[{"id":4870832,"work_id":15062995,"tagging_user_id":32986989,"tagged_user_id":34254952,"co_author_invite_id":1107324,"email":"u***z@mail.huji.ac.il","affiliation":"The Hebrew University of Jerusalem","display_order":-1,"name":"Ehud Zohary","title":"Position and Identity Information Available in fMRI Patterns of Activity in Human Visual Cortex"}],"downloadable_attachments":[{"id":38529000,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/38529000/thumbnails/1.jpg","file_name":"Roth___Zohary_2015_JNS.pdf","download_url":"https://www.academia.edu/attachments/38529000/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Position_and_Identity_Information_Availa.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/38529000/Roth___Zohary_2015_JNS-libre.pdf?1440110539=\u0026response-content-disposition=attachment%3B+filename%3DPosition_and_Identity_Information_Availa.pdf\u0026Expires=1732757870\u0026Signature=aPAsyfEP~66se2TuJXT-gfYu-KdDGi-DIDiefmldThXXv-fE5lT2bW5xhTZXv51xXMP2CDH6yBhsSLtF1VHfOOR~-keTCsp0YHSaVXkCGA00RkGRyxFREyvvCdpaJQy1AwNkmNFemQqH5IwCeZS9anoqPoGo11hLChlmODb41Ktn6ELtFoqKDWNZpCNMsjm679K1GW9aSgg19RtbAMUyGnFrLA4d3Y8MtMCnTCPpSHk67saeVz7Tq~Ez8HLUx8l7btcKIpP5STEIuzxAcxmyeY3EUnXfDDeWT8GnMbLnDvbH2gA2eF8HOT3j8e9FF24CYXt~1qnyrNrrjJTgxbZRgw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Position_and_Identity_Information_Available_in_fMRI_Patterns_of_Activity_in_Human_Visual_Cortex","translated_slug":"","page_count":13,"language":"en","content_type":"Work","owner":{"id":32986989,"first_name":"Zvi","middle_initials":"N","last_name":"Roth","page_name":"ZviRoth","domain_name":"huji","created_at":"2015-07-11T10:51:32.782-07:00","display_name":"Zvi N Roth","url":"https://huji.academia.edu/ZviRoth"},"attachments":[{"id":38529000,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/38529000/thumbnails/1.jpg","file_name":"Roth___Zohary_2015_JNS.pdf","download_url":"https://www.academia.edu/attachments/38529000/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Position_and_Identity_Information_Availa.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/38529000/Roth___Zohary_2015_JNS-libre.pdf?1440110539=\u0026response-content-disposition=attachment%3B+filename%3DPosition_and_Identity_Information_Availa.pdf\u0026Expires=1732757870\u0026Signature=aPAsyfEP~66se2TuJXT-gfYu-KdDGi-DIDiefmldThXXv-fE5lT2bW5xhTZXv51xXMP2CDH6yBhsSLtF1VHfOOR~-keTCsp0YHSaVXkCGA00RkGRyxFREyvvCdpaJQy1AwNkmNFemQqH5IwCeZS9anoqPoGo11hLChlmODb41Ktn6ELtFoqKDWNZpCNMsjm679K1GW9aSgg19RtbAMUyGnFrLA4d3Y8MtMCnTCPpSHk67saeVz7Tq~Ez8HLUx8l7btcKIpP5STEIuzxAcxmyeY3EUnXfDDeWT8GnMbLnDvbH2gA2eF8HOT3j8e9FF24CYXt~1qnyrNrrjJTgxbZRgw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":2229,"name":"Vision Science","url":"https://www.academia.edu/Documents/in/Vision_Science"},{"id":29917,"name":"FMRI","url":"https://www.academia.edu/Documents/in/FMRI"},{"id":300467,"name":"Brain Decoding","url":"https://www.academia.edu/Documents/in/Brain_Decoding"},{"id":320260,"name":"MVPA","url":"https://www.academia.edu/Documents/in/MVPA"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="13925582"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/13925582/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex"><img alt="Research paper thumbnail of Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex" class="work-thumbnail" src="https://attachments.academia-assets.com/38160688/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/13925582/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex">Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://huji.academia.edu/ZviRoth">Zvi N Roth</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/UdiZohary">Udi Zohary</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">One feature of visual processing in the ventral stream is that cortical responses gradually depar...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">One feature of visual processing in the ventral stream is that cortical responses gradually depart from the physical aspects of the visual stimulus and become correlated with perceptual experience. Thus, unlike early retinotopic areas, the responses in the object-related lateral occipital complex (LOC) are typically immune to parameter changes (e.g., contrast, location, etc.) when these do not affect recognition. Here, we use a complementary approach to highlight changes in brain activity following a shift in the perceptual state (in the absence of any alteration in the physical image). Specifically, we focus on LOC and early visual cortex (EVC) and compare their functional magnetic resonance imaging (fMRI) responses to degraded object images, before and after fast perceptual learning that renders initially unrecognized objects identifiable. Using 3 complementary analyses, we find that, in LOC, unlike EVC, learned recognition is associated with a change in the multivoxel response pattern to degraded object images, such that the response becomes significantly more correlated with that evoked by the intact version of the same image. This provides further evidence that the coding in LOC reflects the recognition of visual objects.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="bd3f6ba0723fd5293ef92aa66a435734" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:38160688,&quot;asset_id&quot;:13925582,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/38160688/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="13925582"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="13925582"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 13925582; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=13925582]").text(description); $(".js-view-count[data-work-id=13925582]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 13925582; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='13925582']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 13925582, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "bd3f6ba0723fd5293ef92aa66a435734" } } $('.js-work-strip[data-work-id=13925582]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":13925582,"title":"Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex","translated_title":"","metadata":{"abstract":"One feature of visual processing in the ventral stream is that cortical responses gradually depart from the physical aspects of the visual stimulus and become correlated with perceptual experience. Thus, unlike early retinotopic areas, the responses in the object-related lateral occipital complex (LOC) are typically immune to parameter changes (e.g., contrast, location, etc.) when these do not affect recognition. Here, we use a complementary approach to highlight changes in brain activity following a shift in the perceptual state (in the absence of any alteration in the physical image). Specifically, we focus on LOC and early visual cortex (EVC) and compare their functional magnetic resonance imaging (fMRI) responses to degraded object images, before and after fast perceptual learning that renders initially unrecognized objects identifiable. Using 3 complementary analyses, we find that, in LOC, unlike EVC, learned recognition is associated with a change in the multivoxel response pattern to degraded object images, such that the response becomes significantly more correlated with that evoked by the intact version of the same image. This provides further evidence that the coding in LOC reflects the recognition of visual objects."},"translated_abstract":"One feature of visual processing in the ventral stream is that cortical responses gradually depart from the physical aspects of the visual stimulus and become correlated with perceptual experience. Thus, unlike early retinotopic areas, the responses in the object-related lateral occipital complex (LOC) are typically immune to parameter changes (e.g., contrast, location, etc.) when these do not affect recognition. Here, we use a complementary approach to highlight changes in brain activity following a shift in the perceptual state (in the absence of any alteration in the physical image). Specifically, we focus on LOC and early visual cortex (EVC) and compare their functional magnetic resonance imaging (fMRI) responses to degraded object images, before and after fast perceptual learning that renders initially unrecognized objects identifiable. Using 3 complementary analyses, we find that, in LOC, unlike EVC, learned recognition is associated with a change in the multivoxel response pattern to degraded object images, such that the response becomes significantly more correlated with that evoked by the intact version of the same image. This provides further evidence that the coding in LOC reflects the recognition of visual objects.","internal_url":"https://www.academia.edu/13925582/Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex","translated_internal_url":"","created_at":"2015-07-11T13:14:45.033-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":32986989,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[{"id":2891805,"work_id":13925582,"tagging_user_id":32986989,"tagged_user_id":33143101,"co_author_invite_id":745179,"email":"u***z@cc.huji.ac.il","display_order":4194304,"name":"Udi Zohary","title":"Fingerprints of Learned Object Recognition Seen in the fMRI Activation Patterns of Lateral Occipital Complex"}],"downloadable_attachments":[{"id":38160688,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/38160688/thumbnails/1.jpg","file_name":"Cereb._Cortex-2014-Roth-cercor-bhu042.pdf","download_url":"https://www.academia.edu/attachments/38160688/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fingerprints_of_Learned_Object_Recogniti.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/38160688/Cereb._Cortex-2014-Roth-cercor-bhu042-libre.pdf?1436646182=\u0026response-content-disposition=attachment%3B+filename%3DFingerprints_of_Learned_Object_Recogniti.pdf\u0026Expires=1732757870\u0026Signature=UhTaunLWVKch2-NrCAfijHvm4IN-QEQL6T4OH2JuVmD81RGmRB2h3j4lJQq99Z22-16Sjn~t3-IapukJAJsSyEErUoPn4voNDTkGKlbVCn~egZINZfh80XLfDBQ6uA5~GH26lAU32iuz037AWZN-gC0tj8J2b~pD7qVGdch6SRKGhruAe~U9sNJ-C7XszqezI62trIH~jw37eDZD7lF3PK952hdSLPKUCURx1jnmssc4-MyFwMXhjHW3Zxv6UfyIeNUOarti4o7FgxhzAEtIVwRTEVfFMk4XqlaQBpgkpGM4rS5tzxnbZcRq~HFId3KgSPsn3ZM3tmNICLBEwFDF2g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Fingerprints_of_Learned_Object_Recognition_Seen_in_the_fMRI_Activation_Patterns_of_Lateral_Occipital_Complex","translated_slug":"","page_count":13,"language":"en","content_type":"Work","owner":{"id":32986989,"first_name":"Zvi","middle_initials":"N","last_name":"Roth","page_name":"ZviRoth","domain_name":"huji","created_at":"2015-07-11T10:51:32.782-07:00","display_name":"Zvi N Roth","url":"https://huji.academia.edu/ZviRoth"},"attachments":[{"id":38160688,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/38160688/thumbnails/1.jpg","file_name":"Cereb._Cortex-2014-Roth-cercor-bhu042.pdf","download_url":"https://www.academia.edu/attachments/38160688/download_file?st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&st=MTczMjc1NDI3MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fingerprints_of_Learned_Object_Recogniti.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/38160688/Cereb._Cortex-2014-Roth-cercor-bhu042-libre.pdf?1436646182=\u0026response-content-disposition=attachment%3B+filename%3DFingerprints_of_Learned_Object_Recogniti.pdf\u0026Expires=1732757870\u0026Signature=UhTaunLWVKch2-NrCAfijHvm4IN-QEQL6T4OH2JuVmD81RGmRB2h3j4lJQq99Z22-16Sjn~t3-IapukJAJsSyEErUoPn4voNDTkGKlbVCn~egZINZfh80XLfDBQ6uA5~GH26lAU32iuz037AWZN-gC0tj8J2b~pD7qVGdch6SRKGhruAe~U9sNJ-C7XszqezI62trIH~jw37eDZD7lF3PK952hdSLPKUCURx1jnmssc4-MyFwMXhjHW3Zxv6UfyIeNUOarti4o7FgxhzAEtIVwRTEVfFMk4XqlaQBpgkpGM4rS5tzxnbZcRq~HFId3KgSPsn3ZM3tmNICLBEwFDF2g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":161,"name":"Neuroscience","url":"https://www.academia.edu/Documents/in/Neuroscience"},{"id":29917,"name":"FMRI","url":"https://www.academia.edu/Documents/in/FMRI"},{"id":49962,"name":"Visual Cortex","url":"https://www.academia.edu/Documents/in/Visual_Cortex"},{"id":312130,"name":"Multivoxel Pattern Analysis","url":"https://www.academia.edu/Documents/in/Multivoxel_Pattern_Analysis"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/google_contacts-0dfb882d836b94dbcb4a2d123d6933fc9533eda5be911641f20b4eb428429600.js"], function() { // from javascript_helper.rb $('.js-google-connect-button').click(function(e) { e.preventDefault(); GoogleContacts.authorize_and_show_contacts(); Aedu.Dismissibles.recordClickthrough("WowProfileImportContactsPrompt"); }); $('.js-update-biography-button').click(function(e) { e.preventDefault(); Aedu.Dismissibles.recordClickthrough("UpdateUserBiographyPrompt"); $.ajax({ url: $r.api_v0_profiles_update_about_path({ subdomain_param: 'api', about: "", }), type: 'PUT', success: function(response) { location.reload(); } }); }); $('.js-work-creator-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_document_path({ source: encodeURIComponent(""), }); }); $('.js-video-upload-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_video_path({ source: encodeURIComponent(""), }); }); $('.js-do-this-later-button').click(function() { $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("WowProfileImportContactsPrompt"); }); $('.js-update-biography-do-this-later-button').click(function(){ $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("UpdateUserBiographyPrompt"); }); $('.wow-profile-mentions-upsell--close').click(function(){ $('.wow-profile-mentions-upsell--panel').hide(); Aedu.Dismissibles.recordDismissal("WowProfileMentionsUpsell"); }); $('.wow-profile-mentions-upsell--button').click(function(){ Aedu.Dismissibles.recordClickthrough("WowProfileMentionsUpsell"); }); new WowProfile.SocialRedesignUserWorks({ initialWorksOffset: 20, allWorksOffset: 20, maxSections: 1 }) }); </script> </div></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile_edit-5ea339ee107c863779f560dd7275595239fed73f1a13d279d2b599a28c0ecd33.js","https://a.academia-assets.com/assets/add_coauthor-22174b608f9cb871d03443cafa7feac496fb50d7df2d66a53f5ee3c04ba67f53.js","https://a.academia-assets.com/assets/tab-dcac0130902f0cc2d8cb403714dd47454f11fc6fb0e99ae6a0827b06613abc20.js","https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js"], function() { // from javascript_helper.rb window.ae = window.ae || {}; window.ae.WowProfile = window.ae.WowProfile || {}; if(Aedu.User.current && Aedu.User.current.id === $viewedUser.id) { window.ae.WowProfile.current_user_edit = {}; new WowProfileEdit.EditUploadView({ el: '.js-edit-upload-button-wrapper', model: window.$current_user, }); new AddCoauthor.AddCoauthorsController(); } var userInfoView = new WowProfile.SocialRedesignUserInfo({ recaptcha_key: "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB" }); WowProfile.router = new WowProfile.Router({ userInfoView: userInfoView }); Backbone.history.start({ pushState: true, root: "/" + $viewedUser.page_name }); new WowProfile.UserWorksNav() }); </script> </div> <div class="bootstrap login"><div class="modal fade login-modal" id="login-modal"><div class="login-modal-dialog modal-dialog"><div class="modal-content"><div class="modal-header"><button class="close close" data-dismiss="modal" type="button"><span aria-hidden="true">&times;</span><span class="sr-only">Close</span></button><h4 class="modal-title text-center"><strong>Log In</strong></h4></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><button class="btn btn-fb btn-lg btn-block btn-v-center-content" id="login-facebook-oauth-button"><svg style="float: left; width: 19px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="facebook-square" class="svg-inline--fa fa-facebook-square fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M400 32H48A48 48 0 0 0 0 80v352a48 48 0 0 0 48 48h137.25V327.69h-63V256h63v-54.64c0-62.15 37-96.48 93.67-96.48 27.14 0 55.52 4.84 55.52 4.84v61h-31.27c-30.81 0-40.42 19.12-40.42 38.73V256h68.78l-11 71.69h-57.78V480H400a48 48 0 0 0 48-48V80a48 48 0 0 0-48-48z"></path></svg><small><strong>Log in</strong> with <strong>Facebook</strong></small></button><br /><button class="btn btn-google btn-lg btn-block btn-v-center-content" id="login-google-oauth-button"><svg style="float: left; width: 22px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="google-plus" class="svg-inline--fa fa-google-plus fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M256,8C119.1,8,8,119.1,8,256S119.1,504,256,504,504,392.9,504,256,392.9,8,256,8ZM185.3,380a124,124,0,0,1,0-248c31.3,0,60.1,11,83,32.3l-33.6,32.6c-13.2-12.9-31.3-19.1-49.4-19.1-42.9,0-77.2,35.5-77.2,78.1S142.3,334,185.3,334c32.6,0,64.9-19.1,70.1-53.3H185.3V238.1H302.2a109.2,109.2,0,0,1,1.9,20.7c0,70.8-47.5,121.2-118.8,121.2ZM415.5,273.8v35.5H380V273.8H344.5V238.3H380V202.8h35.5v35.5h35.2v35.5Z"></path></svg><small><strong>Log in</strong> with <strong>Google</strong></small></button><br /><style type="text/css">.sign-in-with-apple-button { width: 100%; height: 52px; border-radius: 3px; border: 1px solid black; cursor: pointer; }</style><script src="https://appleid.cdn-apple.com/appleauth/static/jsapi/appleid/1/en_US/appleid.auth.js" type="text/javascript"></script><div class="sign-in-with-apple-button" data-border="false" data-color="white" id="appleid-signin"><span &nbsp;&nbsp;="Sign Up with Apple" class="u-fs11"></span></div><script>AppleID.auth.init({ clientId: 'edu.academia.applesignon', scope: 'name email', redirectURI: 'https://www.academia.edu/sessions', state: "b1cf3d9091b98b02abfa6396fcf7ef58dc3f7255db48845d72a6779bca145f8d", });</script><script>// Hacky way of checking if on fast loswp if (window.loswp == null) { (function() { const Google = window?.Aedu?.Auth?.OauthButton?.Login?.Google; const Facebook = window?.Aedu?.Auth?.OauthButton?.Login?.Facebook; if (Google) { new Google({ el: '#login-google-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } if (Facebook) { new Facebook({ el: '#login-facebook-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } })(); }</script></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><div class="hr-heading login-hr-heading"><span class="hr-heading-text">or</span></div></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><form class="js-login-form" action="https://www.academia.edu/sessions" accept-charset="UTF-8" method="post"><input name="utf8" type="hidden" value="&#x2713;" autocomplete="off" /><input type="hidden" name="authenticity_token" value="syxpgCwCGJtMid3z1auwwwskp+QildsoAMEkSYD3LjhRML/lyPWKBQenN8e6zQXd6z6HU04nuSaKlUpUE4oo4A==" autocomplete="off" /><div class="form-group"><label class="control-label" for="login-modal-email-input" style="font-size: 14px;">Email</label><input class="form-control" id="login-modal-email-input" name="login" type="email" /></div><div class="form-group"><label class="control-label" for="login-modal-password-input" style="font-size: 14px;">Password</label><input class="form-control" id="login-modal-password-input" name="password" type="password" /></div><input type="hidden" name="post_login_redirect_url" id="post_login_redirect_url" value="https://huji.academia.edu/ZviRoth" autocomplete="off" /><div class="checkbox"><label><input type="checkbox" name="remember_me" id="remember_me" value="1" checked="checked" /><small style="font-size: 12px; margin-top: 2px; display: inline-block;">Remember me on this computer</small></label></div><br><input type="submit" name="commit" value="Log In" class="btn btn-primary btn-block btn-lg js-login-submit" data-disable-with="Log In" /></br></form><script>typeof window?.Aedu?.recaptchaManagedForm === 'function' && window.Aedu.recaptchaManagedForm( document.querySelector('.js-login-form'), document.querySelector('.js-login-submit') );</script><small style="font-size: 12px;"><br />or <a data-target="#login-modal-reset-password-container" data-toggle="collapse" href="javascript:void(0)">reset password</a></small><div class="collapse" id="login-modal-reset-password-container"><br /><div class="well margin-0x"><form class="js-password-reset-form" action="https://www.academia.edu/reset_password" accept-charset="UTF-8" method="post"><input name="utf8" type="hidden" value="&#x2713;" autocomplete="off" /><input type="hidden" name="authenticity_token" value="4sLzsdjGiyr3M5dmGI/EvbdVZUpIOwDfZyN824KKGpkA3iXUPDEZtLwdfVJ36XGjV09F/SSJYtHtdxLGEfccQQ==" autocomplete="off" /><p>Enter the email address you signed up with and we&#39;ll email you a reset link.</p><div class="form-group"><input class="form-control" name="email" type="email" /></div><script src="https://recaptcha.net/recaptcha/api.js" async defer></script> <script> var invisibleRecaptchaSubmit = function () { var closestForm = function (ele) { var curEle = ele.parentNode; while (curEle.nodeName !== 'FORM' && curEle.nodeName !== 'BODY'){ curEle = curEle.parentNode; } return curEle.nodeName === 'FORM' ? curEle : null }; var eles = document.getElementsByClassName('g-recaptcha'); if (eles.length > 0) { var form = closestForm(eles[0]); if (form) { form.submit(); } } }; </script> <input type="submit" data-sitekey="6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj" data-callback="invisibleRecaptchaSubmit" class="g-recaptcha btn btn-primary btn-block" value="Email me a link" value=""/> </form></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/collapse-45805421cf446ca5adf7aaa1935b08a3a8d1d9a6cc5d91a62a2a3a00b20b3e6a.js"], function() { // from javascript_helper.rb $("#login-modal-reset-password-container").on("shown.bs.collapse", function() { $(this).find("input[type=email]").focus(); }); }); </script> </div></div></div><div class="modal-footer"><div class="text-center"><small style="font-size: 12px;">Need an account?&nbsp;<a rel="nofollow" href="https://www.academia.edu/signup">Click here to sign up</a></small></div></div></div></div></div></div><script>// If we are on subdomain or non-bootstrapped page, redirect to login page instead of showing modal (function(){ if (typeof $ === 'undefined') return; var host = window.location.hostname; if ((host === $domain || host === "www."+$domain) && (typeof $().modal === 'function')) { $("#nav_log_in").click(function(e) { // Don't follow the link and open the modal e.preventDefault(); $("#login-modal").on('shown.bs.modal', function() { $(this).find("#login-modal-email-input").focus() }).modal('show'); }); } })()</script> <div class="bootstrap" id="footer"><div class="footer-content clearfix text-center padding-top-7x" style="width:100%;"><ul class="footer-links-secondary footer-links-wide list-inline margin-bottom-1x"><li><a href="https://www.academia.edu/about">About</a></li><li><a href="https://www.academia.edu/press">Press</a></li><li><a rel="nofollow" href="https://medium.com/academia">Blog</a></li><li><a href="https://www.academia.edu/documents">Papers</a></li><li><a href="https://www.academia.edu/topics">Topics</a></li><li><a href="https://www.academia.edu/journals">Academia.edu Journals</a></li><li><a rel="nofollow" href="https://www.academia.edu/hiring"><svg style="width: 13px; height: 13px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="briefcase" class="svg-inline--fa fa-briefcase fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M320 336c0 8.84-7.16 16-16 16h-96c-8.84 0-16-7.16-16-16v-48H0v144c0 25.6 22.4 48 48 48h416c25.6 0 48-22.4 48-48V288H320v48zm144-208h-80V80c0-25.6-22.4-48-48-48H176c-25.6 0-48 22.4-48 48v48H48c-25.6 0-48 22.4-48 48v80h512v-80c0-25.6-22.4-48-48-48zm-144 0H192V96h128v32z"></path></svg>&nbsp;<strong>We're Hiring!</strong></a></li><li><a rel="nofollow" href="https://support.academia.edu/"><svg style="width: 12px; height: 12px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="question-circle" class="svg-inline--fa fa-question-circle fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M504 256c0 136.997-111.043 248-248 248S8 392.997 8 256C8 119.083 119.043 8 256 8s248 111.083 248 248zM262.655 90c-54.497 0-89.255 22.957-116.549 63.758-3.536 5.286-2.353 12.415 2.715 16.258l34.699 26.31c5.205 3.947 12.621 3.008 16.665-2.122 17.864-22.658 30.113-35.797 57.303-35.797 20.429 0 45.698 13.148 45.698 32.958 0 14.976-12.363 22.667-32.534 33.976C247.128 238.528 216 254.941 216 296v4c0 6.627 5.373 12 12 12h56c6.627 0 12-5.373 12-12v-1.333c0-28.462 83.186-29.647 83.186-106.667 0-58.002-60.165-102-116.531-102zM256 338c-25.365 0-46 20.635-46 46 0 25.364 20.635 46 46 46s46-20.636 46-46c0-25.365-20.635-46-46-46z"></path></svg>&nbsp;<strong>Help Center</strong></a></li></ul><ul class="footer-links-tertiary list-inline margin-bottom-1x"><li class="small">Find new research papers in:</li><li class="small"><a href="https://www.academia.edu/Documents/in/Physics">Physics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Chemistry">Chemistry</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Biology">Biology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Health_Sciences">Health Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Ecology">Ecology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Earth_Sciences">Earth Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a></li></ul></div></div><div class="DesignSystem" id="credit" style="width:100%;"><ul class="u-pl0x footer-links-legal list-inline"><li><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li>Academia &copy;2024</li></ul></div><script> //<![CDATA[ window.detect_gmtoffset = true; window.Academia && window.Academia.set_gmtoffset && Academia.set_gmtoffset('/gmtoffset'); //]]> </script> <div id='overlay_background'></div> <div id='bootstrap-modal-container' class='bootstrap'></div> <div id='ds-modal-container' class='bootstrap DesignSystem'></div> <div id='full-screen-modal'></div> </div> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10