CINXE.COM
(PDF) MRC Cognition and Brain Sciences Unit
<!DOCTYPE html> <html > <head> <meta charset="utf-8"> <meta rel="search" type="application/opensearchdescription+xml" href="/open_search.xml" title="Academia.edu"> <meta content="width=device-width, initial-scale=1" name="viewport"> <meta name="google-site-verification" content="bKJMBZA7E43xhDOopFZkssMMkBRjvYERV-NaN4R6mrs"> <meta name="csrf-param" content="authenticity_token" /> <meta name="csrf-token" content="F9eDvyeV17XbcmD1K4udKy0DCb2BgwluCwbbiWx3Da0afta5BSXivGclw3eMuU9RbppJCr1GYQksvPB7m4TSvQ" /> <meta name="citation_title" content="MRC Cognition and Brain Sciences Unit" /> <meta name="citation_author" content="Ingrid Johnsrude" /> <meta name="twitter:card" content="summary" /> <meta name="twitter:url" content="https://www.academia.edu/84283310/MRC_Cognition_and_Brain_Sciences_Unit" /> <meta name="twitter:title" content="MRC Cognition and Brain Sciences Unit" /> <meta name="twitter:description" content="Speech comprehension is resistant to acoustic distortion in the input, reflecting listeners&#39; ability to adjust perceptual processes to match the speech input. For noise-vocoded sentences, a manipulation that removes spectral detail from speech," /> <meta name="twitter:image" content="https://0.academia-photos.com/47847299/14978817/15725171/s200_ingrid.johnsrude.jpg" /> <meta property="fb:app_id" content="2369844204" /> <meta property="og:type" content="article" /> <meta property="og:url" content="https://www.academia.edu/84283310/MRC_Cognition_and_Brain_Sciences_Unit" /> <meta property="og:title" content="MRC Cognition and Brain Sciences Unit" /> <meta property="og:image" content="http://a.academia-assets.com/images/open-graph-icons/fb-paper.gif" /> <meta property="og:description" content="Speech comprehension is resistant to acoustic distortion in the input, reflecting listeners&#39; ability to adjust perceptual processes to match the speech input. For noise-vocoded sentences, a manipulation that removes spectral detail from speech," /> <meta property="article:author" content="https://independent.academia.edu/IJohnsrude" /> <meta name="description" content="Speech comprehension is resistant to acoustic distortion in the input, reflecting listeners&#39; ability to adjust perceptual processes to match the speech input. For noise-vocoded sentences, a manipulation that removes spectral detail from speech," /> <title>(PDF) MRC Cognition and Brain Sciences Unit</title> <link rel="canonical" href="https://www.academia.edu/84283310/MRC_Cognition_and_Brain_Sciences_Unit" /> <script async src="https://www.googletagmanager.com/gtag/js?id=G-5VKX33P2DS"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-5VKX33P2DS', { cookie_domain: 'academia.edu', send_page_view: false, }); gtag('event', 'page_view', { 'controller': "single_work", 'action': "show", 'controller_action': 'single_work#show', 'logged_in': 'false', 'edge': 'unknown', // Send nil if there is no A/B test bucket, in case some records get logged // with missing data - that way we can distinguish between the two cases. // ab_test_bucket should be of the form <ab_test_name>:<bucket> 'ab_test_bucket': null, }) </script> <script> var $controller_name = 'single_work'; var $action_name = "show"; var $rails_env = 'production'; var $app_rev = 'b092bf3a3df71cf13feee7c143e83a57eb6b94fb'; var $domain = 'academia.edu'; var $app_host = "academia.edu"; var $asset_host = "academia-assets.com"; var $start_time = new Date().getTime(); var $recaptcha_key = "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB"; var $recaptcha_invisible_key = "6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj"; var $disableClientRecordHit = false; </script> <script> window.require = { config: function() { return function() {} } } </script> <script> window.Aedu = window.Aedu || {}; window.Aedu.hit_data = null; window.Aedu.serverRenderTime = new Date(1739822259000); window.Aedu.timeDifference = new Date().getTime() - 1739822259000; </script> <script type="application/ld+json">{"@context":"https://schema.org","@type":"ScholarlyArticle","author":[{"@context":"https://schema.org","@type":"Person","name":"Ingrid Johnsrude","url":"https://independent.academia.edu/IJohnsrude"}],"contributor":[],"dateCreated":"2022-08-07","headline":"MRC Cognition and Brain Sciences Unit","image":"https://attachments.academia-assets.com/89359807/thumbnails/1.jpg","inLanguage":"en","keywords":["Emotion","Control","Attention","Executive Control","Distributed Systems","Attentional Blink","Process Algebra","Threat","Startle"],"publisher":{"@context":"https://schema.org","@type":"Organization","name":null},"sourceOrganization":[{"@context":"https://schema.org","@type":"EducationalOrganization","name":null}],"thumbnailUrl":"https://attachments.academia-assets.com/89359807/thumbnails/1.jpg","url":"https://www.academia.edu/84283310/MRC_Cognition_and_Brain_Sciences_Unit"}</script><style type="text/css">@media(max-width: 567px){:root{--token-mode: Rebrand;--dropshadow: 0 2px 4px 0 #22223340;--primary-brand: #0645b1;--error-dark: #b60000;--success-dark: #05b01c;--inactive-fill: #ebebee;--hover: #0c3b8d;--pressed: #082f75;--button-primary-fill-inactive: #ebebee;--button-primary-fill: #0645b1;--button-primary-text: #ffffff;--button-primary-fill-hover: #0c3b8d;--button-primary-fill-press: #082f75;--button-primary-icon: #ffffff;--button-primary-fill-inverse: #ffffff;--button-primary-text-inverse: #082f75;--button-primary-icon-inverse: #0645b1;--button-primary-fill-inverse-hover: #cddaef;--button-primary-stroke-inverse-pressed: #0645b1;--button-secondary-stroke-inactive: #b1b1ba;--button-secondary-fill: #eef2f9;--button-secondary-text: #082f75;--button-secondary-fill-press: #cddaef;--button-secondary-fill-inactive: #ebebee;--button-secondary-stroke: #cddaef;--button-secondary-stroke-hover: #386ac1;--button-secondary-stroke-press: #0645b1;--button-secondary-text-inactive: #b1b1ba;--button-secondary-icon: #082f75;--button-secondary-fill-hover: #e6ecf7;--button-secondary-stroke-inverse: #ffffff;--button-secondary-fill-inverse: rgba(255, 255, 255, 0);--button-secondary-icon-inverse: #ffffff;--button-secondary-icon-hover: #082f75;--button-secondary-icon-press: #082f75;--button-secondary-text-inverse: #ffffff;--button-secondary-text-hover: #082f75;--button-secondary-text-press: #082f75;--button-secondary-fill-inverse-hover: #043059;--button-xs-stroke: #141413;--button-xs-stroke-hover: #0c3b8d;--button-xs-stroke-press: #082f75;--button-xs-stroke-inactive: #ebebee;--button-xs-text: #141413;--button-xs-text-hover: #0c3b8d;--button-xs-text-press: #082f75;--button-xs-text-inactive: #91919e;--button-xs-icon: #141413;--button-xs-icon-hover: #0c3b8d;--button-xs-icon-press: #082f75;--button-xs-icon-inactive: #91919e;--button-xs-fill: #ffffff;--button-xs-fill-hover: #f4f7fc;--button-xs-fill-press: #eef2f9;--buttons-button-text-inactive: #91919e;--buttons-button-focus: #0645b1;--buttons-button-icon-inactive: #91919e;--buttons-small-buttons-corner-radius: 16px;--buttons-small-buttons-l-r-padding: 20px;--buttons-small-buttons-height: 48px;--buttons-small-buttons-gap: 8px;--buttons-small-buttons-icon-only-width: 48px;--buttons-small-buttons-icon-size: 20px;--buttons-small-buttons-stroke-default: 1px;--buttons-small-buttons-stroke-thick: 2px;--buttons-large-buttons-l-r-padding: 32px;--buttons-large-buttons-height: 64px;--buttons-large-buttons-icon-only-width: 64px;--buttons-large-buttons-icon-size: 20px;--buttons-large-buttons-gap: 8px;--buttons-large-buttons-corner-radius: 16px;--buttons-large-buttons-stroke-default: 1px;--buttons-large-buttons-stroke-thick: 2px;--buttons-extra-small-buttons-l-r-padding: 8px;--buttons-extra-small-buttons-height: 32px;--buttons-extra-small-buttons-icon-size: 16px;--buttons-extra-small-buttons-gap: 4px;--buttons-extra-small-buttons-corner-radius: 8px;--buttons-stroke-default: 1px;--buttons-stroke-thick: 2px;--background-beige: #f9f7f4;--error-light: #fff2f2;--text-placeholder: #6d6d7d;--stroke-dark: #141413;--stroke-light: #dddde2;--stroke-medium: #535366;--accent-green: #ccffd4;--accent-turquoise: #ccf7ff;--accent-yellow: #f7ffcc;--accent-peach: #ffd4cc;--accent-violet: #f7ccff;--accent-purple: #f4f7fc;--text-primary: #141413;--secondary-brand: #141413;--text-hover: #0c3b8d;--text-white: #ffffff;--text-link: #0645b1;--text-press: #082f75;--success-light: #f0f8f1;--background-light-blue: #f4f7fc;--background-white: #ffffff;--premium-dark: #877440;--premium-light: #f9f6ed;--stroke-white: #ffffff;--inactive-content: #b1b1ba;--annotate-light: #a35dff;--annotate-dark: #824acc;--grid: #eef2f9;--inactive-stroke: #ebebee;--shadow: rgba(34, 34, 51, 0.25);--text-inactive: #6d6d7d;--text-error: #b60000;--stroke-error: #b60000;--background-error: #fff2f2;--background-black: #141413;--icon-default: #141413;--icon-blue: #0645b1;--background-grey: #dddde2;--icon-grey: #b1b1ba;--text-focus: #082f75;--brand-colors-neutral-black: #141413;--brand-colors-neutral-900: #535366;--brand-colors-neutral-800: #6d6d7d;--brand-colors-neutral-700: #91919e;--brand-colors-neutral-600: #b1b1ba;--brand-colors-neutral-500: #c8c8cf;--brand-colors-neutral-400: #dddde2;--brand-colors-neutral-300: #ebebee;--brand-colors-neutral-200: #f8f8fb;--brand-colors-neutral-100: #fafafa;--brand-colors-neutral-white: #ffffff;--brand-colors-blue-900: #043059;--brand-colors-blue-800: #082f75;--brand-colors-blue-700: #0c3b8d;--brand-colors-blue-600: #0645b1;--brand-colors-blue-500: #386ac1;--brand-colors-blue-400: #cddaef;--brand-colors-blue-300: #e6ecf7;--brand-colors-blue-200: #eef2f9;--brand-colors-blue-100: #f4f7fc;--brand-colors-gold-500: #877440;--brand-colors-gold-400: #e9e3d4;--brand-colors-gold-300: #f2efe8;--brand-colors-gold-200: #f9f6ed;--brand-colors-gold-100: #f9f7f4;--brand-colors-error-900: #920000;--brand-colors-error-500: #b60000;--brand-colors-success-900: #035c0f;--brand-colors-green: #ccffd4;--brand-colors-turquoise: #ccf7ff;--brand-colors-yellow: #f7ffcc;--brand-colors-peach: #ffd4cc;--brand-colors-violet: #f7ccff;--brand-colors-error-100: #fff2f2;--brand-colors-success-500: #05b01c;--brand-colors-success-100: #f0f8f1;--text-secondary: #535366;--icon-white: #ffffff;--background-beige-darker: #f2efe8;--icon-dark-grey: #535366;--type-font-family-sans-serif: DM Sans;--type-font-family-serif: Gupter;--type-font-family-mono: IBM Plex Mono;--type-weights-300: 300;--type-weights-400: 400;--type-weights-500: 500;--type-weights-700: 700;--type-sizes-12: 12px;--type-sizes-14: 14px;--type-sizes-16: 16px;--type-sizes-18: 18px;--type-sizes-20: 20px;--type-sizes-22: 22px;--type-sizes-24: 24px;--type-sizes-28: 28px;--type-sizes-30: 30px;--type-sizes-32: 32px;--type-sizes-40: 40px;--type-sizes-42: 42px;--type-sizes-48-2: 48px;--type-line-heights-16: 16px;--type-line-heights-20: 20px;--type-line-heights-23: 23px;--type-line-heights-24: 24px;--type-line-heights-25: 25px;--type-line-heights-26: 26px;--type-line-heights-29: 29px;--type-line-heights-30: 30px;--type-line-heights-32: 32px;--type-line-heights-34: 34px;--type-line-heights-35: 35px;--type-line-heights-36: 36px;--type-line-heights-38: 38px;--type-line-heights-40: 40px;--type-line-heights-46: 46px;--type-line-heights-48: 48px;--type-line-heights-52: 52px;--type-line-heights-58: 58px;--type-line-heights-68: 68px;--type-line-heights-74: 74px;--type-line-heights-82: 82px;--type-paragraph-spacings-0: 0px;--type-paragraph-spacings-4: 4px;--type-paragraph-spacings-8: 8px;--type-paragraph-spacings-16: 16px;--type-sans-serif-xl-font-weight: 400;--type-sans-serif-xl-size: 32px;--type-sans-serif-xl-line-height: 46px;--type-sans-serif-xl-paragraph-spacing: 16px;--type-sans-serif-lg-font-weight: 400;--type-sans-serif-lg-size: 30px;--type-sans-serif-lg-line-height: 36px;--type-sans-serif-lg-paragraph-spacing: 16px;--type-sans-serif-md-font-weight: 400;--type-sans-serif-md-line-height: 30px;--type-sans-serif-md-paragraph-spacing: 16px;--type-sans-serif-md-size: 24px;--type-sans-serif-xs-font-weight: 700;--type-sans-serif-xs-line-height: 24px;--type-sans-serif-xs-paragraph-spacing: 0px;--type-sans-serif-xs-size: 18px;--type-sans-serif-sm-font-weight: 400;--type-sans-serif-sm-line-height: 32px;--type-sans-serif-sm-paragraph-spacing: 16px;--type-sans-serif-sm-size: 20px;--type-body-xl-font-weight: 400;--type-body-xl-size: 24px;--type-body-xl-line-height: 36px;--type-body-xl-paragraph-spacing: 0px;--type-body-sm-font-weight: 400;--type-body-sm-size: 14px;--type-body-sm-line-height: 20px;--type-body-sm-paragraph-spacing: 8px;--type-body-xs-font-weight: 400;--type-body-xs-size: 12px;--type-body-xs-line-height: 16px;--type-body-xs-paragraph-spacing: 0px;--type-body-md-font-weight: 400;--type-body-md-size: 16px;--type-body-md-line-height: 20px;--type-body-md-paragraph-spacing: 4px;--type-body-lg-font-weight: 400;--type-body-lg-size: 20px;--type-body-lg-line-height: 26px;--type-body-lg-paragraph-spacing: 16px;--type-body-lg-medium-font-weight: 500;--type-body-lg-medium-size: 20px;--type-body-lg-medium-line-height: 32px;--type-body-lg-medium-paragraph-spacing: 16px;--type-body-md-medium-font-weight: 500;--type-body-md-medium-size: 16px;--type-body-md-medium-line-height: 20px;--type-body-md-medium-paragraph-spacing: 4px;--type-body-sm-bold-font-weight: 700;--type-body-sm-bold-size: 14px;--type-body-sm-bold-line-height: 20px;--type-body-sm-bold-paragraph-spacing: 8px;--type-body-sm-medium-font-weight: 500;--type-body-sm-medium-size: 14px;--type-body-sm-medium-line-height: 20px;--type-body-sm-medium-paragraph-spacing: 8px;--type-serif-md-font-weight: 400;--type-serif-md-size: 32px;--type-serif-md-paragraph-spacing: 0px;--type-serif-md-line-height: 40px;--type-serif-sm-font-weight: 400;--type-serif-sm-size: 24px;--type-serif-sm-paragraph-spacing: 0px;--type-serif-sm-line-height: 26px;--type-serif-lg-font-weight: 400;--type-serif-lg-size: 48px;--type-serif-lg-paragraph-spacing: 0px;--type-serif-lg-line-height: 52px;--type-serif-xs-font-weight: 400;--type-serif-xs-size: 18px;--type-serif-xs-line-height: 24px;--type-serif-xs-paragraph-spacing: 0px;--type-serif-xl-font-weight: 400;--type-serif-xl-size: 48px;--type-serif-xl-paragraph-spacing: 0px;--type-serif-xl-line-height: 58px;--type-mono-md-font-weight: 400;--type-mono-md-size: 22px;--type-mono-md-line-height: 24px;--type-mono-md-paragraph-spacing: 0px;--type-mono-lg-font-weight: 400;--type-mono-lg-size: 40px;--type-mono-lg-line-height: 40px;--type-mono-lg-paragraph-spacing: 0px;--type-mono-sm-font-weight: 400;--type-mono-sm-size: 14px;--type-mono-sm-line-height: 24px;--type-mono-sm-paragraph-spacing: 0px;--spacing-xs-4: 4px;--spacing-xs-8: 8px;--spacing-xs-16: 16px;--spacing-sm-24: 24px;--spacing-sm-32: 32px;--spacing-md-40: 40px;--spacing-md-48: 48px;--spacing-lg-64: 64px;--spacing-lg-80: 80px;--spacing-xlg-104: 104px;--spacing-xlg-152: 152px;--spacing-xs-12: 12px;--spacing-page-section: 80px;--spacing-card-list-spacing: 48px;--spacing-text-section-spacing: 64px;--spacing-md-xs-headings: 40px;--corner-radius-radius-lg: 16px;--corner-radius-radius-sm: 4px;--corner-radius-radius-md: 8px;--corner-radius-radius-round: 104px}}@media(min-width: 568px)and (max-width: 1279px){:root{--token-mode: Rebrand;--dropshadow: 0 2px 4px 0 #22223340;--primary-brand: #0645b1;--error-dark: #b60000;--success-dark: #05b01c;--inactive-fill: #ebebee;--hover: #0c3b8d;--pressed: #082f75;--button-primary-fill-inactive: #ebebee;--button-primary-fill: #0645b1;--button-primary-text: #ffffff;--button-primary-fill-hover: #0c3b8d;--button-primary-fill-press: #082f75;--button-primary-icon: #ffffff;--button-primary-fill-inverse: #ffffff;--button-primary-text-inverse: #082f75;--button-primary-icon-inverse: #0645b1;--button-primary-fill-inverse-hover: #cddaef;--button-primary-stroke-inverse-pressed: #0645b1;--button-secondary-stroke-inactive: #b1b1ba;--button-secondary-fill: #eef2f9;--button-secondary-text: #082f75;--button-secondary-fill-press: #cddaef;--button-secondary-fill-inactive: #ebebee;--button-secondary-stroke: #cddaef;--button-secondary-stroke-hover: #386ac1;--button-secondary-stroke-press: #0645b1;--button-secondary-text-inactive: #b1b1ba;--button-secondary-icon: #082f75;--button-secondary-fill-hover: #e6ecf7;--button-secondary-stroke-inverse: #ffffff;--button-secondary-fill-inverse: rgba(255, 255, 255, 0);--button-secondary-icon-inverse: #ffffff;--button-secondary-icon-hover: #082f75;--button-secondary-icon-press: #082f75;--button-secondary-text-inverse: #ffffff;--button-secondary-text-hover: #082f75;--button-secondary-text-press: #082f75;--button-secondary-fill-inverse-hover: #043059;--button-xs-stroke: #141413;--button-xs-stroke-hover: #0c3b8d;--button-xs-stroke-press: #082f75;--button-xs-stroke-inactive: #ebebee;--button-xs-text: #141413;--button-xs-text-hover: #0c3b8d;--button-xs-text-press: #082f75;--button-xs-text-inactive: #91919e;--button-xs-icon: #141413;--button-xs-icon-hover: #0c3b8d;--button-xs-icon-press: #082f75;--button-xs-icon-inactive: #91919e;--button-xs-fill: #ffffff;--button-xs-fill-hover: #f4f7fc;--button-xs-fill-press: #eef2f9;--buttons-button-text-inactive: #91919e;--buttons-button-focus: #0645b1;--buttons-button-icon-inactive: #91919e;--buttons-small-buttons-corner-radius: 16px;--buttons-small-buttons-l-r-padding: 20px;--buttons-small-buttons-height: 48px;--buttons-small-buttons-gap: 8px;--buttons-small-buttons-icon-only-width: 48px;--buttons-small-buttons-icon-size: 20px;--buttons-small-buttons-stroke-default: 1px;--buttons-small-buttons-stroke-thick: 2px;--buttons-large-buttons-l-r-padding: 32px;--buttons-large-buttons-height: 64px;--buttons-large-buttons-icon-only-width: 64px;--buttons-large-buttons-icon-size: 20px;--buttons-large-buttons-gap: 8px;--buttons-large-buttons-corner-radius: 16px;--buttons-large-buttons-stroke-default: 1px;--buttons-large-buttons-stroke-thick: 2px;--buttons-extra-small-buttons-l-r-padding: 8px;--buttons-extra-small-buttons-height: 32px;--buttons-extra-small-buttons-icon-size: 16px;--buttons-extra-small-buttons-gap: 4px;--buttons-extra-small-buttons-corner-radius: 8px;--buttons-stroke-default: 1px;--buttons-stroke-thick: 2px;--background-beige: #f9f7f4;--error-light: #fff2f2;--text-placeholder: #6d6d7d;--stroke-dark: #141413;--stroke-light: #dddde2;--stroke-medium: #535366;--accent-green: #ccffd4;--accent-turquoise: #ccf7ff;--accent-yellow: #f7ffcc;--accent-peach: #ffd4cc;--accent-violet: #f7ccff;--accent-purple: #f4f7fc;--text-primary: #141413;--secondary-brand: #141413;--text-hover: #0c3b8d;--text-white: #ffffff;--text-link: #0645b1;--text-press: #082f75;--success-light: #f0f8f1;--background-light-blue: #f4f7fc;--background-white: #ffffff;--premium-dark: #877440;--premium-light: #f9f6ed;--stroke-white: #ffffff;--inactive-content: #b1b1ba;--annotate-light: #a35dff;--annotate-dark: #824acc;--grid: #eef2f9;--inactive-stroke: #ebebee;--shadow: rgba(34, 34, 51, 0.25);--text-inactive: #6d6d7d;--text-error: #b60000;--stroke-error: #b60000;--background-error: #fff2f2;--background-black: #141413;--icon-default: #141413;--icon-blue: #0645b1;--background-grey: #dddde2;--icon-grey: #b1b1ba;--text-focus: #082f75;--brand-colors-neutral-black: #141413;--brand-colors-neutral-900: #535366;--brand-colors-neutral-800: #6d6d7d;--brand-colors-neutral-700: #91919e;--brand-colors-neutral-600: #b1b1ba;--brand-colors-neutral-500: #c8c8cf;--brand-colors-neutral-400: #dddde2;--brand-colors-neutral-300: #ebebee;--brand-colors-neutral-200: #f8f8fb;--brand-colors-neutral-100: #fafafa;--brand-colors-neutral-white: #ffffff;--brand-colors-blue-900: #043059;--brand-colors-blue-800: #082f75;--brand-colors-blue-700: #0c3b8d;--brand-colors-blue-600: #0645b1;--brand-colors-blue-500: #386ac1;--brand-colors-blue-400: #cddaef;--brand-colors-blue-300: #e6ecf7;--brand-colors-blue-200: #eef2f9;--brand-colors-blue-100: #f4f7fc;--brand-colors-gold-500: #877440;--brand-colors-gold-400: #e9e3d4;--brand-colors-gold-300: #f2efe8;--brand-colors-gold-200: #f9f6ed;--brand-colors-gold-100: #f9f7f4;--brand-colors-error-900: #920000;--brand-colors-error-500: #b60000;--brand-colors-success-900: #035c0f;--brand-colors-green: #ccffd4;--brand-colors-turquoise: #ccf7ff;--brand-colors-yellow: #f7ffcc;--brand-colors-peach: #ffd4cc;--brand-colors-violet: #f7ccff;--brand-colors-error-100: #fff2f2;--brand-colors-success-500: #05b01c;--brand-colors-success-100: #f0f8f1;--text-secondary: #535366;--icon-white: #ffffff;--background-beige-darker: #f2efe8;--icon-dark-grey: #535366;--type-font-family-sans-serif: DM Sans;--type-font-family-serif: Gupter;--type-font-family-mono: IBM Plex Mono;--type-weights-300: 300;--type-weights-400: 400;--type-weights-500: 500;--type-weights-700: 700;--type-sizes-12: 12px;--type-sizes-14: 14px;--type-sizes-16: 16px;--type-sizes-18: 18px;--type-sizes-20: 20px;--type-sizes-22: 22px;--type-sizes-24: 24px;--type-sizes-28: 28px;--type-sizes-30: 30px;--type-sizes-32: 32px;--type-sizes-40: 40px;--type-sizes-42: 42px;--type-sizes-48-2: 48px;--type-line-heights-16: 16px;--type-line-heights-20: 20px;--type-line-heights-23: 23px;--type-line-heights-24: 24px;--type-line-heights-25: 25px;--type-line-heights-26: 26px;--type-line-heights-29: 29px;--type-line-heights-30: 30px;--type-line-heights-32: 32px;--type-line-heights-34: 34px;--type-line-heights-35: 35px;--type-line-heights-36: 36px;--type-line-heights-38: 38px;--type-line-heights-40: 40px;--type-line-heights-46: 46px;--type-line-heights-48: 48px;--type-line-heights-52: 52px;--type-line-heights-58: 58px;--type-line-heights-68: 68px;--type-line-heights-74: 74px;--type-line-heights-82: 82px;--type-paragraph-spacings-0: 0px;--type-paragraph-spacings-4: 4px;--type-paragraph-spacings-8: 8px;--type-paragraph-spacings-16: 16px;--type-sans-serif-xl-font-weight: 400;--type-sans-serif-xl-size: 42px;--type-sans-serif-xl-line-height: 46px;--type-sans-serif-xl-paragraph-spacing: 16px;--type-sans-serif-lg-font-weight: 400;--type-sans-serif-lg-size: 32px;--type-sans-serif-lg-line-height: 36px;--type-sans-serif-lg-paragraph-spacing: 16px;--type-sans-serif-md-font-weight: 400;--type-sans-serif-md-line-height: 34px;--type-sans-serif-md-paragraph-spacing: 16px;--type-sans-serif-md-size: 28px;--type-sans-serif-xs-font-weight: 700;--type-sans-serif-xs-line-height: 25px;--type-sans-serif-xs-paragraph-spacing: 0px;--type-sans-serif-xs-size: 20px;--type-sans-serif-sm-font-weight: 400;--type-sans-serif-sm-line-height: 30px;--type-sans-serif-sm-paragraph-spacing: 16px;--type-sans-serif-sm-size: 24px;--type-body-xl-font-weight: 400;--type-body-xl-size: 24px;--type-body-xl-line-height: 36px;--type-body-xl-paragraph-spacing: 0px;--type-body-sm-font-weight: 400;--type-body-sm-size: 14px;--type-body-sm-line-height: 20px;--type-body-sm-paragraph-spacing: 8px;--type-body-xs-font-weight: 400;--type-body-xs-size: 12px;--type-body-xs-line-height: 16px;--type-body-xs-paragraph-spacing: 0px;--type-body-md-font-weight: 400;--type-body-md-size: 16px;--type-body-md-line-height: 20px;--type-body-md-paragraph-spacing: 4px;--type-body-lg-font-weight: 400;--type-body-lg-size: 20px;--type-body-lg-line-height: 26px;--type-body-lg-paragraph-spacing: 16px;--type-body-lg-medium-font-weight: 500;--type-body-lg-medium-size: 20px;--type-body-lg-medium-line-height: 32px;--type-body-lg-medium-paragraph-spacing: 16px;--type-body-md-medium-font-weight: 500;--type-body-md-medium-size: 16px;--type-body-md-medium-line-height: 20px;--type-body-md-medium-paragraph-spacing: 4px;--type-body-sm-bold-font-weight: 700;--type-body-sm-bold-size: 14px;--type-body-sm-bold-line-height: 20px;--type-body-sm-bold-paragraph-spacing: 8px;--type-body-sm-medium-font-weight: 500;--type-body-sm-medium-size: 14px;--type-body-sm-medium-line-height: 20px;--type-body-sm-medium-paragraph-spacing: 8px;--type-serif-md-font-weight: 400;--type-serif-md-size: 40px;--type-serif-md-paragraph-spacing: 0px;--type-serif-md-line-height: 48px;--type-serif-sm-font-weight: 400;--type-serif-sm-size: 28px;--type-serif-sm-paragraph-spacing: 0px;--type-serif-sm-line-height: 32px;--type-serif-lg-font-weight: 400;--type-serif-lg-size: 58px;--type-serif-lg-paragraph-spacing: 0px;--type-serif-lg-line-height: 68px;--type-serif-xs-font-weight: 400;--type-serif-xs-size: 18px;--type-serif-xs-line-height: 24px;--type-serif-xs-paragraph-spacing: 0px;--type-serif-xl-font-weight: 400;--type-serif-xl-size: 74px;--type-serif-xl-paragraph-spacing: 0px;--type-serif-xl-line-height: 82px;--type-mono-md-font-weight: 400;--type-mono-md-size: 22px;--type-mono-md-line-height: 24px;--type-mono-md-paragraph-spacing: 0px;--type-mono-lg-font-weight: 400;--type-mono-lg-size: 40px;--type-mono-lg-line-height: 40px;--type-mono-lg-paragraph-spacing: 0px;--type-mono-sm-font-weight: 400;--type-mono-sm-size: 14px;--type-mono-sm-line-height: 24px;--type-mono-sm-paragraph-spacing: 0px;--spacing-xs-4: 4px;--spacing-xs-8: 8px;--spacing-xs-16: 16px;--spacing-sm-24: 24px;--spacing-sm-32: 32px;--spacing-md-40: 40px;--spacing-md-48: 48px;--spacing-lg-64: 64px;--spacing-lg-80: 80px;--spacing-xlg-104: 104px;--spacing-xlg-152: 152px;--spacing-xs-12: 12px;--spacing-page-section: 104px;--spacing-card-list-spacing: 48px;--spacing-text-section-spacing: 80px;--spacing-md-xs-headings: 40px;--corner-radius-radius-lg: 16px;--corner-radius-radius-sm: 4px;--corner-radius-radius-md: 8px;--corner-radius-radius-round: 104px}}@media(min-width: 1280px){:root{--token-mode: Rebrand;--dropshadow: 0 2px 4px 0 #22223340;--primary-brand: #0645b1;--error-dark: #b60000;--success-dark: #05b01c;--inactive-fill: #ebebee;--hover: #0c3b8d;--pressed: #082f75;--button-primary-fill-inactive: #ebebee;--button-primary-fill: #0645b1;--button-primary-text: #ffffff;--button-primary-fill-hover: #0c3b8d;--button-primary-fill-press: #082f75;--button-primary-icon: #ffffff;--button-primary-fill-inverse: #ffffff;--button-primary-text-inverse: #082f75;--button-primary-icon-inverse: #0645b1;--button-primary-fill-inverse-hover: #cddaef;--button-primary-stroke-inverse-pressed: #0645b1;--button-secondary-stroke-inactive: #b1b1ba;--button-secondary-fill: #eef2f9;--button-secondary-text: #082f75;--button-secondary-fill-press: #cddaef;--button-secondary-fill-inactive: #ebebee;--button-secondary-stroke: #cddaef;--button-secondary-stroke-hover: #386ac1;--button-secondary-stroke-press: #0645b1;--button-secondary-text-inactive: #b1b1ba;--button-secondary-icon: #082f75;--button-secondary-fill-hover: #e6ecf7;--button-secondary-stroke-inverse: #ffffff;--button-secondary-fill-inverse: rgba(255, 255, 255, 0);--button-secondary-icon-inverse: #ffffff;--button-secondary-icon-hover: #082f75;--button-secondary-icon-press: #082f75;--button-secondary-text-inverse: #ffffff;--button-secondary-text-hover: #082f75;--button-secondary-text-press: #082f75;--button-secondary-fill-inverse-hover: #043059;--button-xs-stroke: #141413;--button-xs-stroke-hover: #0c3b8d;--button-xs-stroke-press: #082f75;--button-xs-stroke-inactive: #ebebee;--button-xs-text: #141413;--button-xs-text-hover: #0c3b8d;--button-xs-text-press: #082f75;--button-xs-text-inactive: #91919e;--button-xs-icon: #141413;--button-xs-icon-hover: #0c3b8d;--button-xs-icon-press: #082f75;--button-xs-icon-inactive: #91919e;--button-xs-fill: #ffffff;--button-xs-fill-hover: #f4f7fc;--button-xs-fill-press: #eef2f9;--buttons-button-text-inactive: #91919e;--buttons-button-focus: #0645b1;--buttons-button-icon-inactive: #91919e;--buttons-small-buttons-corner-radius: 16px;--buttons-small-buttons-l-r-padding: 20px;--buttons-small-buttons-height: 48px;--buttons-small-buttons-gap: 8px;--buttons-small-buttons-icon-only-width: 48px;--buttons-small-buttons-icon-size: 20px;--buttons-small-buttons-stroke-default: 1px;--buttons-small-buttons-stroke-thick: 2px;--buttons-large-buttons-l-r-padding: 32px;--buttons-large-buttons-height: 64px;--buttons-large-buttons-icon-only-width: 64px;--buttons-large-buttons-icon-size: 20px;--buttons-large-buttons-gap: 8px;--buttons-large-buttons-corner-radius: 16px;--buttons-large-buttons-stroke-default: 1px;--buttons-large-buttons-stroke-thick: 2px;--buttons-extra-small-buttons-l-r-padding: 8px;--buttons-extra-small-buttons-height: 32px;--buttons-extra-small-buttons-icon-size: 16px;--buttons-extra-small-buttons-gap: 4px;--buttons-extra-small-buttons-corner-radius: 8px;--buttons-stroke-default: 1px;--buttons-stroke-thick: 2px;--background-beige: #f9f7f4;--error-light: #fff2f2;--text-placeholder: #6d6d7d;--stroke-dark: #141413;--stroke-light: #dddde2;--stroke-medium: #535366;--accent-green: #ccffd4;--accent-turquoise: #ccf7ff;--accent-yellow: #f7ffcc;--accent-peach: #ffd4cc;--accent-violet: #f7ccff;--accent-purple: #f4f7fc;--text-primary: #141413;--secondary-brand: #141413;--text-hover: #0c3b8d;--text-white: #ffffff;--text-link: #0645b1;--text-press: #082f75;--success-light: #f0f8f1;--background-light-blue: #f4f7fc;--background-white: #ffffff;--premium-dark: #877440;--premium-light: #f9f6ed;--stroke-white: #ffffff;--inactive-content: #b1b1ba;--annotate-light: #a35dff;--annotate-dark: #824acc;--grid: #eef2f9;--inactive-stroke: #ebebee;--shadow: rgba(34, 34, 51, 0.25);--text-inactive: #6d6d7d;--text-error: #b60000;--stroke-error: #b60000;--background-error: #fff2f2;--background-black: #141413;--icon-default: #141413;--icon-blue: #0645b1;--background-grey: #dddde2;--icon-grey: #b1b1ba;--text-focus: #082f75;--brand-colors-neutral-black: #141413;--brand-colors-neutral-900: #535366;--brand-colors-neutral-800: #6d6d7d;--brand-colors-neutral-700: #91919e;--brand-colors-neutral-600: #b1b1ba;--brand-colors-neutral-500: #c8c8cf;--brand-colors-neutral-400: #dddde2;--brand-colors-neutral-300: #ebebee;--brand-colors-neutral-200: #f8f8fb;--brand-colors-neutral-100: #fafafa;--brand-colors-neutral-white: #ffffff;--brand-colors-blue-900: #043059;--brand-colors-blue-800: #082f75;--brand-colors-blue-700: #0c3b8d;--brand-colors-blue-600: #0645b1;--brand-colors-blue-500: #386ac1;--brand-colors-blue-400: #cddaef;--brand-colors-blue-300: #e6ecf7;--brand-colors-blue-200: #eef2f9;--brand-colors-blue-100: #f4f7fc;--brand-colors-gold-500: #877440;--brand-colors-gold-400: #e9e3d4;--brand-colors-gold-300: #f2efe8;--brand-colors-gold-200: #f9f6ed;--brand-colors-gold-100: #f9f7f4;--brand-colors-error-900: #920000;--brand-colors-error-500: #b60000;--brand-colors-success-900: #035c0f;--brand-colors-green: #ccffd4;--brand-colors-turquoise: #ccf7ff;--brand-colors-yellow: #f7ffcc;--brand-colors-peach: #ffd4cc;--brand-colors-violet: #f7ccff;--brand-colors-error-100: #fff2f2;--brand-colors-success-500: #05b01c;--brand-colors-success-100: #f0f8f1;--text-secondary: #535366;--icon-white: #ffffff;--background-beige-darker: #f2efe8;--icon-dark-grey: #535366;--type-font-family-sans-serif: DM Sans;--type-font-family-serif: Gupter;--type-font-family-mono: IBM Plex Mono;--type-weights-300: 300;--type-weights-400: 400;--type-weights-500: 500;--type-weights-700: 700;--type-sizes-12: 12px;--type-sizes-14: 14px;--type-sizes-16: 16px;--type-sizes-18: 18px;--type-sizes-20: 20px;--type-sizes-22: 22px;--type-sizes-24: 24px;--type-sizes-28: 28px;--type-sizes-30: 30px;--type-sizes-32: 32px;--type-sizes-40: 40px;--type-sizes-42: 42px;--type-sizes-48-2: 48px;--type-line-heights-16: 16px;--type-line-heights-20: 20px;--type-line-heights-23: 23px;--type-line-heights-24: 24px;--type-line-heights-25: 25px;--type-line-heights-26: 26px;--type-line-heights-29: 29px;--type-line-heights-30: 30px;--type-line-heights-32: 32px;--type-line-heights-34: 34px;--type-line-heights-35: 35px;--type-line-heights-36: 36px;--type-line-heights-38: 38px;--type-line-heights-40: 40px;--type-line-heights-46: 46px;--type-line-heights-48: 48px;--type-line-heights-52: 52px;--type-line-heights-58: 58px;--type-line-heights-68: 68px;--type-line-heights-74: 74px;--type-line-heights-82: 82px;--type-paragraph-spacings-0: 0px;--type-paragraph-spacings-4: 4px;--type-paragraph-spacings-8: 8px;--type-paragraph-spacings-16: 16px;--type-sans-serif-xl-font-weight: 400;--type-sans-serif-xl-size: 42px;--type-sans-serif-xl-line-height: 46px;--type-sans-serif-xl-paragraph-spacing: 16px;--type-sans-serif-lg-font-weight: 400;--type-sans-serif-lg-size: 32px;--type-sans-serif-lg-line-height: 38px;--type-sans-serif-lg-paragraph-spacing: 16px;--type-sans-serif-md-font-weight: 400;--type-sans-serif-md-line-height: 34px;--type-sans-serif-md-paragraph-spacing: 16px;--type-sans-serif-md-size: 28px;--type-sans-serif-xs-font-weight: 700;--type-sans-serif-xs-line-height: 25px;--type-sans-serif-xs-paragraph-spacing: 0px;--type-sans-serif-xs-size: 20px;--type-sans-serif-sm-font-weight: 400;--type-sans-serif-sm-line-height: 30px;--type-sans-serif-sm-paragraph-spacing: 16px;--type-sans-serif-sm-size: 24px;--type-body-xl-font-weight: 400;--type-body-xl-size: 24px;--type-body-xl-line-height: 36px;--type-body-xl-paragraph-spacing: 0px;--type-body-sm-font-weight: 400;--type-body-sm-size: 14px;--type-body-sm-line-height: 20px;--type-body-sm-paragraph-spacing: 8px;--type-body-xs-font-weight: 400;--type-body-xs-size: 12px;--type-body-xs-line-height: 16px;--type-body-xs-paragraph-spacing: 0px;--type-body-md-font-weight: 400;--type-body-md-size: 16px;--type-body-md-line-height: 20px;--type-body-md-paragraph-spacing: 4px;--type-body-lg-font-weight: 400;--type-body-lg-size: 20px;--type-body-lg-line-height: 26px;--type-body-lg-paragraph-spacing: 16px;--type-body-lg-medium-font-weight: 500;--type-body-lg-medium-size: 20px;--type-body-lg-medium-line-height: 32px;--type-body-lg-medium-paragraph-spacing: 16px;--type-body-md-medium-font-weight: 500;--type-body-md-medium-size: 16px;--type-body-md-medium-line-height: 20px;--type-body-md-medium-paragraph-spacing: 4px;--type-body-sm-bold-font-weight: 700;--type-body-sm-bold-size: 14px;--type-body-sm-bold-line-height: 20px;--type-body-sm-bold-paragraph-spacing: 8px;--type-body-sm-medium-font-weight: 500;--type-body-sm-medium-size: 14px;--type-body-sm-medium-line-height: 20px;--type-body-sm-medium-paragraph-spacing: 8px;--type-serif-md-font-weight: 400;--type-serif-md-size: 40px;--type-serif-md-paragraph-spacing: 0px;--type-serif-md-line-height: 48px;--type-serif-sm-font-weight: 400;--type-serif-sm-size: 28px;--type-serif-sm-paragraph-spacing: 0px;--type-serif-sm-line-height: 32px;--type-serif-lg-font-weight: 400;--type-serif-lg-size: 58px;--type-serif-lg-paragraph-spacing: 0px;--type-serif-lg-line-height: 68px;--type-serif-xs-font-weight: 400;--type-serif-xs-size: 18px;--type-serif-xs-line-height: 24px;--type-serif-xs-paragraph-spacing: 0px;--type-serif-xl-font-weight: 400;--type-serif-xl-size: 74px;--type-serif-xl-paragraph-spacing: 0px;--type-serif-xl-line-height: 82px;--type-mono-md-font-weight: 400;--type-mono-md-size: 22px;--type-mono-md-line-height: 24px;--type-mono-md-paragraph-spacing: 0px;--type-mono-lg-font-weight: 400;--type-mono-lg-size: 40px;--type-mono-lg-line-height: 40px;--type-mono-lg-paragraph-spacing: 0px;--type-mono-sm-font-weight: 400;--type-mono-sm-size: 14px;--type-mono-sm-line-height: 24px;--type-mono-sm-paragraph-spacing: 0px;--spacing-xs-4: 4px;--spacing-xs-8: 8px;--spacing-xs-16: 16px;--spacing-sm-24: 24px;--spacing-sm-32: 32px;--spacing-md-40: 40px;--spacing-md-48: 48px;--spacing-lg-64: 64px;--spacing-lg-80: 80px;--spacing-xlg-104: 104px;--spacing-xlg-152: 152px;--spacing-xs-12: 12px;--spacing-page-section: 152px;--spacing-card-list-spacing: 48px;--spacing-text-section-spacing: 80px;--spacing-md-xs-headings: 40px;--corner-radius-radius-lg: 16px;--corner-radius-radius-sm: 4px;--corner-radius-radius-md: 8px;--corner-radius-radius-round: 104px}}</style><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/single_work_page/loswp-f29774c14b4e629cbb4375c919ad1c2b2891ac825d0a410ea6339ae17e481a55.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/body-170d1319f0e354621e81ca17054bb147da2856ec0702fe440a99af314a6338c5.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/button-bfbac2a470372e2f3a6661a65fa7ff0a0fbf7aa32534d9a831d683d2a6f9e01b.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/heading-95367dc03b794f6737f30123738a886cf53b7a65cdef98a922a98591d60063e3.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/text_button-d1941ab08e91e29ee143084c4749da4aaffa350a2ac6eec2306b1d7a352d911a.css" /><link crossorigin="" href="https://fonts.gstatic.com/" rel="preconnect" /><link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,opsz,wght@0,9..40,100..1000;1,9..40,100..1000&family=Gupter:wght@400;500;700&family=IBM+Plex+Mono:wght@300;400&family=Material+Symbols+Outlined:opsz,wght,FILL,GRAD@20,400,0,0&display=swap" rel="stylesheet" /> </head> <body> <div id='react-modal'></div> <div class="js-upgrade-ie-banner" style="display: none; text-align: center; padding: 8px 0; background-color: #ebe480;"><p style="color: #000; font-size: 12px; margin: 0 0 4px;">Academia.edu no longer supports Internet Explorer.</p><p style="color: #000; font-size: 12px; margin: 0;">To browse Academia.edu and the wider internet faster and more securely, please take a few seconds to <a href="https://www.academia.edu/upgrade-browser">upgrade your browser</a>.</p></div><script>// Show this banner for all versions of IE if (!!window.MSInputMethodContext || /(MSIE)/.test(navigator.userAgent)) { document.querySelector('.js-upgrade-ie-banner').style.display = 'block'; }</script> <div class="bootstrap login"><div class="modal fade login-modal" id="login-modal"><div class="login-modal-dialog modal-dialog"><div class="modal-content"><div class="modal-header"><button class="close close" data-dismiss="modal" type="button"><span aria-hidden="true">×</span><span class="sr-only">Close</span></button><h4 class="modal-title text-center"><strong>Log In</strong></h4></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><button class="btn btn-fb btn-lg btn-block btn-v-center-content" id="login-facebook-oauth-button"><svg style="float: left; width: 19px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="facebook-square" class="svg-inline--fa fa-facebook-square fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M400 32H48A48 48 0 0 0 0 80v352a48 48 0 0 0 48 48h137.25V327.69h-63V256h63v-54.64c0-62.15 37-96.48 93.67-96.48 27.14 0 55.52 4.84 55.52 4.84v61h-31.27c-30.81 0-40.42 19.12-40.42 38.73V256h68.78l-11 71.69h-57.78V480H400a48 48 0 0 0 48-48V80a48 48 0 0 0-48-48z"></path></svg><small><strong>Log in</strong> with <strong>Facebook</strong></small></button><br /><button class="btn btn-google btn-lg btn-block btn-v-center-content" id="login-google-oauth-button"><svg style="float: left; width: 22px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="google-plus" class="svg-inline--fa fa-google-plus fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M256,8C119.1,8,8,119.1,8,256S119.1,504,256,504,504,392.9,504,256,392.9,8,256,8ZM185.3,380a124,124,0,0,1,0-248c31.3,0,60.1,11,83,32.3l-33.6,32.6c-13.2-12.9-31.3-19.1-49.4-19.1-42.9,0-77.2,35.5-77.2,78.1S142.3,334,185.3,334c32.6,0,64.9-19.1,70.1-53.3H185.3V238.1H302.2a109.2,109.2,0,0,1,1.9,20.7c0,70.8-47.5,121.2-118.8,121.2ZM415.5,273.8v35.5H380V273.8H344.5V238.3H380V202.8h35.5v35.5h35.2v35.5Z"></path></svg><small><strong>Log in</strong> with <strong>Google</strong></small></button><br /><style type="text/css">.sign-in-with-apple-button { width: 100%; height: 52px; border-radius: 3px; border: 1px solid black; cursor: pointer; } .sign-in-with-apple-button > div { margin: 0 auto; / This centers the Apple-rendered button horizontally }</style><script src="https://appleid.cdn-apple.com/appleauth/static/jsapi/appleid/1/en_US/appleid.auth.js" type="text/javascript"></script><div class="sign-in-with-apple-button" data-border="false" data-color="white" id="appleid-signin"><span ="Sign Up with Apple" class="u-fs11"></span></div><script>AppleID.auth.init({ clientId: 'edu.academia.applesignon', scope: 'name email', redirectURI: 'https://www.academia.edu/sessions', state: "551f9629cdf8e71462f8eeb321fa59d8b061f02e7b331697ab05e5ebd4aa2cd0", });</script><script>// Hacky way of checking if on fast loswp if (window.loswp == null) { (function() { const Google = window?.Aedu?.Auth?.OauthButton?.Login?.Google; const Facebook = window?.Aedu?.Auth?.OauthButton?.Login?.Facebook; if (Google) { new Google({ el: '#login-google-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } if (Facebook) { new Facebook({ el: '#login-facebook-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } })(); }</script></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><div class="hr-heading login-hr-heading"><span class="hr-heading-text">or</span></div></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><form class="js-login-form" action="https://www.academia.edu/sessions" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="u0JLPqa7u9_5LwjB7gnkkvZdnLatjySR2-9_-2n2J5a26x44hAuO1kV4q0NJOzbotcTcAZFKTPb8VVQJngX4hg" autocomplete="off" /><div class="form-group"><label class="control-label" for="login-modal-email-input" style="font-size: 14px;">Email</label><input class="form-control" id="login-modal-email-input" name="login" type="email" /></div><div class="form-group"><label class="control-label" for="login-modal-password-input" style="font-size: 14px;">Password</label><input class="form-control" id="login-modal-password-input" name="password" type="password" /></div><input type="hidden" name="post_login_redirect_url" id="post_login_redirect_url" value="https://www.academia.edu/84283310/MRC_Cognition_and_Brain_Sciences_Unit" autocomplete="off" /><div class="checkbox"><label><input type="checkbox" name="remember_me" id="remember_me" value="1" checked="checked" /><small style="font-size: 12px; margin-top: 2px; display: inline-block;">Remember me on this computer</small></label></div><br><input type="submit" name="commit" value="Log In" class="btn btn-primary btn-block btn-lg js-login-submit" data-disable-with="Log In" /></br></form><script>typeof window?.Aedu?.recaptchaManagedForm === 'function' && window.Aedu.recaptchaManagedForm( document.querySelector('.js-login-form'), document.querySelector('.js-login-submit') );</script><small style="font-size: 12px;"><br />or <a data-target="#login-modal-reset-password-container" data-toggle="collapse" href="javascript:void(0)">reset password</a></small><div class="collapse" id="login-modal-reset-password-container"><br /><div class="well margin-0x"><form class="js-password-reset-form" action="https://www.academia.edu/reset_password" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="1rJY1YLmDPj0s62ixbRsimxaQvAqdrS_INdzKNpdL_bbGw3ToFY58UjkDiBihr7wL8MCRxaz3NgHbVjaLa7w5g" autocomplete="off" /><p>Enter the email address you signed up with and we'll email you a reset link.</p><div class="form-group"><input class="form-control" name="email" type="email" /></div><input class="btn btn-primary btn-block g-recaptcha js-password-reset-submit" data-sitekey="6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj" type="submit" value="Email me a link" /></form></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/collapse-45805421cf446ca5adf7aaa1935b08a3a8d1d9a6cc5d91a62a2a3a00b20b3e6a.js"], function() { // from javascript_helper.rb $("#login-modal-reset-password-container").on("shown.bs.collapse", function() { $(this).find("input[type=email]").focus(); }); }); </script> </div></div></div><div class="modal-footer"><div class="text-center"><small style="font-size: 12px;">Need an account? <a rel="nofollow" href="https://www.academia.edu/signup">Click here to sign up</a></small></div></div></div></div></div></div><script>// If we are on subdomain or non-bootstrapped page, redirect to login page instead of showing modal (function(){ if (typeof $ === 'undefined') return; var host = window.location.hostname; if ((host === $domain || host === "www."+$domain) && (typeof $().modal === 'function')) { $("#nav_log_in").click(function(e) { // Don't follow the link and open the modal e.preventDefault(); $("#login-modal").on('shown.bs.modal', function() { $(this).find("#login-modal-email-input").focus() }).modal('show'); }); } })()</script> <div id="fb-root"></div><script>window.fbAsyncInit = function() { FB.init({ appId: "2369844204", version: "v8.0", status: true, cookie: true, xfbml: true }); // Additional initialization code. if (window.InitFacebook) { // facebook.ts already loaded, set it up. window.InitFacebook(); } else { // Set a flag for facebook.ts to find when it loads. window.academiaAuthReadyFacebook = true; } };</script> <div id="google-root"></div><script>window.loadGoogle = function() { if (window.InitGoogle) { // google.ts already loaded, set it up. window.InitGoogle("331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"); } else { // Set a flag for google.ts to use when it loads. window.GoogleClientID = "331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"; } };</script> <div class="header--container" id="main-header-container"><div class="header--inner-container header--inner-container-ds2"><div class="header-ds2--left-wrapper"><div class="header-ds2--left-wrapper-inner"><a data-main-header-link-target="logo_home" href="https://www.academia.edu/"><img class="hide-on-desktop-redesign" style="height: 24px; width: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015-A.svg" width="24" height="24" /><img width="145.2" height="18" class="hide-on-mobile-redesign" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015.svg" /></a><div class="header--search-container header--search-container-ds2"><form class="js-SiteSearch-form select2-no-default-pills" action="https://www.academia.edu/search" accept-charset="UTF-8" method="get"><svg style="width: 14px; height: 14px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="search" class="header--search-icon svg-inline--fa fa-search fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M505 442.7L405.3 343c-4.5-4.5-10.6-7-17-7H372c27.6-35.3 44-79.7 44-128C416 93.1 322.9 0 208 0S0 93.1 0 208s93.1 208 208 208c48.3 0 92.7-16.4 128-44v16.3c0 6.4 2.5 12.5 7 17l99.7 99.7c9.4 9.4 24.6 9.4 33.9 0l28.3-28.3c9.4-9.4 9.4-24.6.1-34zM208 336c-70.7 0-128-57.2-128-128 0-70.7 57.2-128 128-128 70.7 0 128 57.2 128 128 0 70.7-57.2 128-128 128z"></path></svg><input class="header--search-input header--search-input-ds2 js-SiteSearch-form-input" data-main-header-click-target="search_input" name="q" placeholder="Search" type="text" /></form></div></div></div><nav class="header--nav-buttons header--nav-buttons-ds2 js-main-nav"><button class="ds2-5-button ds2-5-button--secondary js-header-login-url header-button-ds2 header-login-ds2 hide-on-mobile-redesign react-login-modal-opener" data-signup-modal="{"location":"login-button--header"}" rel="nofollow">Log In</button><button class="ds2-5-button ds2-5-button--secondary header-button-ds2 hide-on-mobile-redesign react-login-modal-opener" data-signup-modal="{"location":"signup-button--header"}" rel="nofollow">Sign Up</button><button class="header--hamburger-button header--hamburger-button-ds2 hide-on-desktop-redesign js-header-hamburger-button"><div class="icon-bar"></div><div class="icon-bar" style="margin-top: 4px;"></div><div class="icon-bar" style="margin-top: 4px;"></div></button></nav></div><ul class="header--dropdown-container js-header-dropdown"><li class="header--dropdown-row"><a class="header--dropdown-link" href="https://www.academia.edu/login" rel="nofollow">Log In</a></li><li class="header--dropdown-row"><a class="header--dropdown-link" href="https://www.academia.edu/signup" rel="nofollow">Sign Up</a></li><li class="header--dropdown-row js-header-dropdown-expand-button"><button class="header--dropdown-button">more<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="caret-down" class="header--dropdown-button-icon svg-inline--fa fa-caret-down fa-w-10" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 320 512"><path fill="currentColor" d="M31.3 192h257.3c17.8 0 26.7 21.5 14.1 34.1L174.1 354.8c-7.8 7.8-20.5 7.8-28.3 0L17.2 226.1C4.6 213.5 13.5 192 31.3 192z"></path></svg></button></li><li><ul class="header--expanded-dropdown-container"><li class="header--dropdown-row"><a class="header--dropdown-link" href="https://www.academia.edu/about">About</a></li><li class="header--dropdown-row"><a class="header--dropdown-link" href="https://www.academia.edu/press">Press</a></li><li class="header--dropdown-row"><a class="header--dropdown-link" href="https://www.academia.edu/documents">Papers</a></li><li class="header--dropdown-row"><a class="header--dropdown-link" href="https://www.academia.edu/terms">Terms</a></li><li class="header--dropdown-row"><a class="header--dropdown-link" href="https://www.academia.edu/privacy">Privacy</a></li><li class="header--dropdown-row"><a class="header--dropdown-link" href="https://www.academia.edu/copyright">Copyright</a></li><li class="header--dropdown-row"><a class="header--dropdown-link" href="https://www.academia.edu/hiring"><svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="briefcase" class="header--dropdown-row-icon svg-inline--fa fa-briefcase fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M320 336c0 8.84-7.16 16-16 16h-96c-8.84 0-16-7.16-16-16v-48H0v144c0 25.6 22.4 48 48 48h416c25.6 0 48-22.4 48-48V288H320v48zm144-208h-80V80c0-25.6-22.4-48-48-48H176c-25.6 0-48 22.4-48 48v48H48c-25.6 0-48 22.4-48 48v80h512v-80c0-25.6-22.4-48-48-48zm-144 0H192V96h128v32z"></path></svg>We're Hiring!</a></li><li class="header--dropdown-row"><a class="header--dropdown-link" href="https://support.academia.edu/hc/en-us"><svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="question-circle" class="header--dropdown-row-icon svg-inline--fa fa-question-circle fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M504 256c0 136.997-111.043 248-248 248S8 392.997 8 256C8 119.083 119.043 8 256 8s248 111.083 248 248zM262.655 90c-54.497 0-89.255 22.957-116.549 63.758-3.536 5.286-2.353 12.415 2.715 16.258l34.699 26.31c5.205 3.947 12.621 3.008 16.665-2.122 17.864-22.658 30.113-35.797 57.303-35.797 20.429 0 45.698 13.148 45.698 32.958 0 14.976-12.363 22.667-32.534 33.976C247.128 238.528 216 254.941 216 296v4c0 6.627 5.373 12 12 12h56c6.627 0 12-5.373 12-12v-1.333c0-28.462 83.186-29.647 83.186-106.667 0-58.002-60.165-102-116.531-102zM256 338c-25.365 0-46 20.635-46 46 0 25.364 20.635 46 46 46s46-20.636 46-46c0-25.365-20.635-46-46-46z"></path></svg>Help Center</a></li><li class="header--dropdown-row js-header-dropdown-collapse-button"><button class="header--dropdown-button">less<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="caret-up" class="header--dropdown-button-icon svg-inline--fa fa-caret-up fa-w-10" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 320 512"><path fill="currentColor" d="M288.662 352H31.338c-17.818 0-26.741-21.543-14.142-34.142l128.662-128.662c7.81-7.81 20.474-7.81 28.284 0l128.662 128.662c12.6 12.599 3.676 34.142-14.142 34.142z"></path></svg></button></li></ul></li></ul></div> <script src="//a.academia-assets.com/assets/webpack_bundles/fast_loswp-bundle-e5ca05062a7092a8f6f5af11f70589210af26f6a0030f102b0b21b22451b9d41.js" defer="defer"></script><script>window.loswp = {}; window.loswp.author = 47847299; window.loswp.bulkDownloadFilterCounts = {}; window.loswp.hasDownloadableAttachment = true; window.loswp.hasViewableAttachments = true; // TODO: just use routes for this window.loswp.loginUrl = "https://www.academia.edu/login?post_login_redirect_url=https%3A%2F%2Fwww.academia.edu%2F84283310%2FMRC_Cognition_and_Brain_Sciences_Unit%3Fauto%3Ddownload"; window.loswp.translateUrl = "https://www.academia.edu/login?post_login_redirect_url=https%3A%2F%2Fwww.academia.edu%2F84283310%2FMRC_Cognition_and_Brain_Sciences_Unit%3Fshow_translation%3Dtrue"; window.loswp.previewableAttachments = [{"id":89359807,"identifier":"Attachment_89359807","shouldShowBulkDownload":false}]; window.loswp.shouldDetectTimezone = true; window.loswp.shouldShowBulkDownload = true; window.loswp.showSignupCaptcha = false window.loswp.willEdgeCache = false; window.loswp.work = {"work":{"id":84283310,"created_at":"2022-08-07T12:43:31.351-07:00","from_world_paper_id":211976890,"updated_at":"2025-01-28T12:56:17.618-08:00","_data":{"grobid_abstract":"Speech comprehension is resistant to acoustic distortion in the input, reflecting listeners' ability to adjust perceptual processes to match the speech input. For noise-vocoded sentences, a manipulation that removes spectral detail from speech, listeners' reporting improved from near 0% to 70% correct over 30 sentences (Experiment 1). Learning was enhanced if listeners heard distorted sentences while they knew the identity of the undistorted target (Experiments 2 and 3). Learning was absent when listeners were trained with nonword sentences (Experiments 4 and 5), although the meaning of the training sentences did not affect learning (Experiment 5). Perceptual learning of noise-vocoded speech depends on higher level information, consistent with top-down, lexically driven learning. Similar processes may facilitate comprehension of speech in an unfamiliar accent or following cochlear implantation. Humans are able to understand speech in a variety of situations that dramatically affect the sounds that reach their ears. They can understand talkers with quite different (foreign or regional) accents, who speak at different speeds, in rooms that introduce reverberation, or when the speech is conveyed over low-fidelity devices such as the telephone. The robustness of speech comprehension to many forms of variation and distortion is currently unmatched by computer speech recognition systems and may therefore reflect a unique specialization of the human perceptual system. Experimental work has demonstrated that speech perception remains robust even when challenged with extreme forms of artificial distortion. For example, speech remains understandable when formants are resynthesized as sinusoids , a manipulation that removes most of the natural qualities of the human voice from a speech signal. Other manipulations have shown that dramatic alterations to both the temporal and spectral properties of speech do not substantially impair the intelligibility of spoken language when background noise is absent. It is unlikely that there is any single set of acoustic properties or cues that are preserved in all of these different forms of distorted yet still-intelligible speech (cf. . Therefore, robustness in speech perception reflects the multiple acoustic means by which stable elements of speech (such as phonetic features or syllables) are coded in clear speech: This redundancy permits comprehension when any single cue is lost. Robustness in speech comprehension may also derive from the operation of compensatory mechanisms that are recruited when speech becomes difficult to understand: processes of adaptation and perceptual learning are two such mechanisms. The human language system can dynamically adapt to variation in the acoustic realization of speech, tuning the perceptual system so as to optimally process the current speech input. These adaptation processes can take place very rapidly. For example, adaptation to natural changes in speech rate (e.g.,","grobid_abstract_attachment_id":"89359807"},"document_type":"paper","pre_hit_view_count_baseline":null,"quality":"high","language":"en","title":"MRC Cognition and Brain Sciences Unit","broadcastable":true,"draft":null,"has_indexable_attachment":true,"indexable":true}}["work"]; window.loswp.workCoauthors = [47847299]; window.loswp.locale = "en"; window.loswp.countryCode = "SG"; window.loswp.cwvAbTestBucket = ""; window.loswp.designVariant = "ds_vanilla"; window.loswp.fullPageMobileSutdModalVariant = "full_page_mobile_sutd_modal"; window.loswp.useOptimizedScribd4genScript = false; window.loginModal = {}; window.loginModal.appleClientId = 'edu.academia.applesignon'; window.userInChina = "false";</script><script defer="" src="https://accounts.google.com/gsi/client"></script><div class="ds-loswp-container"><div class="ds-work-card--grid-container"><div class="ds-work-card--container js-loswp-work-card ds-work-card--no-bottom-spacing"><div class="ds-work-card--cover"><div class="ds-work-cover--wrapper"><div class="ds-work-cover--container"><button class="ds-work-cover--clickable js-swp-download-button" data-signup-modal="{"location":"swp-splash-paper-cover","attachmentId":89359807,"attachmentType":"pdf"}"><img alt="First page of “MRC Cognition and Brain Sciences Unit”" class="ds-work-cover--cover-thumbnail" src="https://0.academia-photos.com/attachment_thumbnails/89359807/mini_magick20220807-1-7t900e.png?1659901540" /><img alt="PDF Icon" class="ds-work-cover--file-icon" src="//a.academia-assets.com/images/single_work_splash/adobe_icon.svg" /><div class="ds-work-cover--hover-container"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">download</span><p>Download Free PDF</p></div><div class="ds-work-cover--ribbon-container">Download Free PDF</div><div class="ds-work-cover--ribbon-triangle"></div></button></div></div></div><div class="ds-work-card--work-information"><h1 class="ds-work-card--work-title">MRC Cognition and Brain Sciences Unit</h1><div class="ds-work-card--work-authors ds-work-card--detail"><a class="ds-work-card--author js-wsj-grid-card-author ds2-5-body-md ds2-5-body-link" data-author-id="47847299" href="https://independent.academia.edu/IJohnsrude"><img alt="Profile image of Ingrid Johnsrude" class="ds-work-card--author-avatar" src="https://0.academia-photos.com/47847299/14978817/15725171/s65_ingrid.johnsrude.jpg" />Ingrid Johnsrude</a></div><div class="ds-work-card--detail"><div class="ds-work-card--work-metadata"><div class="ds-work-card--work-metadata__stat"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">visibility</span><p class="ds2-5-body-sm" id="work-metadata-view-count">…</p></div><div class="ds-work-card--work-metadata__stat"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">description</span><p class="ds2-5-body-sm">20 pages</p></div><div class="ds-work-card--work-metadata__stat"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">link</span><p class="ds2-5-body-sm">1 file</p></div></div><script>(async () => { const workId = 84283310; const worksViewsPath = "/v0/works/views?subdomain_param=api&work_ids%5B%5D=84283310"; const getWorkViews = async (workId) => { const response = await fetch(worksViewsPath); if (!response.ok) { throw new Error('Failed to load work views'); } const data = await response.json(); return data.views[workId]; }; // Get the view count for the work - we send this immediately rather than waiting for // the DOM to load, so it can be available as soon as possible (but without holding up // the backend or other resource requests, because it's a bit expensive and not critical). const viewCount = await getWorkViews(workId); const updateViewCount = (viewCount) => { try { const viewCountNumber = parseInt(viewCount, 10); if (viewCountNumber === 0) { // Remove the whole views element if there are zero views. document.getElementById('work-metadata-view-count')?.parentNode?.remove(); return; } const commaizedViewCount = viewCountNumber.toLocaleString(); const viewCountBody = document.getElementById('work-metadata-view-count'); if (!viewCountBody) { throw new Error('Failed to find work views element'); } viewCountBody.textContent = `${commaizedViewCount} views`; } catch (error) { // Remove the whole views element if there was some issue parsing. document.getElementById('work-metadata-view-count')?.parentNode?.remove(); throw new Error(`Failed to parse view count: ${viewCount}`, error); } }; // If the DOM is still loading, wait for it to be ready before updating the view count. if (document.readyState === "loading") { document.addEventListener('DOMContentLoaded', () => { updateViewCount(viewCount); }); // Otherwise, just update it immediately. } else { updateViewCount(viewCount); } })();</script></div><p class="ds-work-card--work-abstract ds-work-card--detail ds2-5-body-md">Speech comprehension is resistant to acoustic distortion in the input, reflecting listeners' ability to adjust perceptual processes to match the speech input. For noise-vocoded sentences, a manipulation that removes spectral detail from speech, listeners' reporting improved from near 0% to 70% correct over 30 sentences (Experiment 1). Learning was enhanced if listeners heard distorted sentences while they knew the identity of the undistorted target (Experiments 2 and 3). Learning was absent when listeners were trained with nonword sentences (Experiments 4 and 5), although the meaning of the training sentences did not affect learning (Experiment 5). Perceptual learning of noise-vocoded speech depends on higher level information, consistent with top-down, lexically driven learning. Similar processes may facilitate comprehension of speech in an unfamiliar accent or following cochlear implantation. Humans are able to understand speech in a variety of situations that dramatically affect the sounds that reach their ears. They can understand talkers with quite different (foreign or regional) accents, who speak at different speeds, in rooms that introduce reverberation, or when the speech is conveyed over low-fidelity devices such as the telephone. The robustness of speech comprehension to many forms of variation and distortion is currently unmatched by computer speech recognition systems and may therefore reflect a unique specialization of the human perceptual system. Experimental work has demonstrated that speech perception remains robust even when challenged with extreme forms of artificial distortion. For example, speech remains understandable when formants are resynthesized as sinusoids , a manipulation that removes most of the natural qualities of the human voice from a speech signal. Other manipulations have shown that dramatic alterations to both the temporal and spectral properties of speech do not substantially impair the intelligibility of spoken language when background noise is absent. It is unlikely that there is any single set of acoustic properties or cues that are preserved in all of these different forms of distorted yet still-intelligible speech (cf. . Therefore, robustness in speech perception reflects the multiple acoustic means by which stable elements of speech (such as phonetic features or syllables) are coded in clear speech: This redundancy permits comprehension when any single cue is lost. Robustness in speech comprehension may also derive from the operation of compensatory mechanisms that are recruited when speech becomes difficult to understand: processes of adaptation and perceptual learning are two such mechanisms. The human language system can dynamically adapt to variation in the acoustic realization of speech, tuning the perceptual system so as to optimally process the current speech input. These adaptation processes can take place very rapidly. For example, adaptation to natural changes in speech rate (e.g.,</p><div class="ds-work-card--button-container"><button class="ds2-5-button js-swp-download-button" data-signup-modal="{"location":"continue-reading-button--work-card","attachmentId":89359807,"attachmentType":"pdf","workUrl":"https://www.academia.edu/84283310/MRC_Cognition_and_Brain_Sciences_Unit"}">See full PDF</button><button class="ds2-5-button ds2-5-button--secondary js-swp-download-button" data-signup-modal="{"location":"download-pdf-button--work-card","attachmentId":89359807,"attachmentType":"pdf","workUrl":"https://www.academia.edu/84283310/MRC_Cognition_and_Brain_Sciences_Unit"}"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">download</span>Download PDF</button></div><div class="ds-signup-banner-trigger-container"><div class="ds-signup-banner-trigger ds-signup-banner-trigger-control"></div></div><div class="ds-signup-banner ds-signup-banner-control"><div id="ds-signup-banner-close-button"><button class="ds2-5-button ds2-5-button--secondary ds2-5-button--inverse"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">close</span></button></div><div class="ds-signup-banner-ctas"><img src="//a.academia-assets.com/images/academia-logo-capital-white.svg" /><h4 class="ds2-5-heading-serif-sm">Sign up for access to the world's latest research</h4><button class="ds2-5-button ds2-5-button--inverse ds2-5-button--full-width js-swp-download-button" data-signup-modal="{"location":"signup-banner"}">Sign up for free<span class="material-symbols-outlined" style="font-size: 20px" translate="no">arrow_forward</span></button></div><div class="ds-signup-banner-divider"></div><div class="ds-signup-banner-reasons"><div class="ds-signup-banner-reasons-item"><span class="material-symbols-outlined" style="font-size: 24px" translate="no">check</span><span>Get notified about relevant papers</span></div><div class="ds-signup-banner-reasons-item"><span class="material-symbols-outlined" style="font-size: 24px" translate="no">check</span><span>Save papers to use in your research</span></div><div class="ds-signup-banner-reasons-item"><span class="material-symbols-outlined" style="font-size: 24px" translate="no">check</span><span>Join the discussion with peers</span></div><div class="ds-signup-banner-reasons-item"><span class="material-symbols-outlined" style="font-size: 24px" translate="no">check</span><span>Track your impact</span></div></div></div><script>(() => { // Set up signup banner show/hide behavior: // 1. If the signup banner trigger (a 242px-high* invisible div underneath the 'See Full PDF' / 'Download PDF' buttons) // is already fully scrolled above the viewport, show the banner by default // 2. If the signup banner trigger is fully visible, show the banner // 3. If the signup banner trigger has even a few pixels scrolled below the viewport, hide the banner // // * 242px is the empirically determined height of the signup banner. It's better to be a bit taller than // necessary than too short, so it's fine that the mobile (small breakpoint) banner is shorter. // First check session storage for the signup banner's visibility state const signupBannerHidden = sessionStorage.getItem('ds-signup-banner-hidden'); if (signupBannerHidden === 'true') { return; } const signupBanner = document.querySelector('.ds-signup-banner'); const signupBannerTrigger = document.querySelector('.ds-signup-banner-trigger'); if (!signupBannerTrigger) { window.Sentry.captureMessage("Signup banner trigger not found"); return; } let footerShown = false; window.addEventListener('load', () => { const rect = signupBannerTrigger.getBoundingClientRect(); // If page loaded up already scrolled below the trigger (via scroll restoration), show the banner by default if (rect.bottom < 0) { footerShown = true; signupBanner.classList.add('ds-signup-banner-visible'); } }); // Wait for trigger to fully enter viewport before showing banner (ensures PDF CTAs are never covered by banner) const observer = new IntersectionObserver((entries) => { entries.forEach(entry => { if (entry.isIntersecting && !footerShown) { footerShown = true; signupBanner.classList.add('ds-signup-banner-visible'); } else if (!entry.isIntersecting && footerShown) { if (signupBannerTrigger.getBoundingClientRect().bottom > 0) { footerShown = false; signupBanner.classList.remove('ds-signup-banner-visible'); } } }); }); observer.observe(signupBannerTrigger); // Set up signup banner close button event handler: const signupBannerCloseButton = document.querySelector('#ds-signup-banner-close-button'); signupBannerCloseButton.addEventListener('click', () => { signupBanner.classList.remove('ds-signup-banner-visible'); observer.unobserve(signupBannerTrigger); // Store the signup banner's visibility state in session storage sessionStorage.setItem('ds-signup-banner-hidden', 'true'); }); })();</script></div></div></div><div data-auto_select="false" data-client_id="331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b" data-doc_id="89359807" data-landing_url="https://www.academia.edu/84283310/MRC_Cognition_and_Brain_Sciences_Unit" data-login_uri="https://www.academia.edu/registrations/google_one_tap" data-moment_callback="onGoogleOneTapEvent" id="g_id_onload"></div><div class="ds-sticky-ctas--wrapper js-loswp-sticky-ctas hidden"><div class="ds-sticky-ctas--grid-container"><div class="ds-sticky-ctas--container"><button class="ds2-5-button js-swp-download-button" data-signup-modal="{"location":"continue-reading-button--sticky-ctas","attachmentId":89359807,"attachmentType":"pdf","workUrl":null}">See full PDF</button><button class="ds2-5-button ds2-5-button--secondary js-swp-download-button" data-signup-modal="{"location":"download-pdf-button--sticky-ctas","attachmentId":89359807,"attachmentType":"pdf","workUrl":null}"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">download</span>Download PDF</button></div></div></div><div class="ds-below-fold--grid-container"><div class="ds-work--container js-loswp-embedded-document"><div class="attachment_preview" data-attachment="Attachment_89359807" style="display: none"><div class="js-scribd-document-container"><div class="scribd--document-loading js-scribd-document-loader" style="display: block;"><img alt="Loading..." src="//a.academia-assets.com/images/loaders/paper-load.gif" /><p>Loading Preview</p></div></div><div style="text-align: center;"><div class="scribd--no-preview-alert js-preview-unavailable"><p>Sorry, preview is currently unavailable. You can download the paper by clicking the button above.</p></div></div></div></div><div class="ds-sidebar--container js-work-sidebar"><div class="ds-related-content--container"><h2 class="ds-related-content--heading">Related papers</h2><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="0" data-entity-id="6540038" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/6540038/Lexical_Information_Drives_Perceptual_Learning_of_Distorted_Speech_Evidence_From_the_Comprehension_of_Noise_Vocoded_Sentences">Lexical Information Drives Perceptual Learning of Distorted Speech: Evidence From the Comprehension of Noise-Vocoded Sentences</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="10477029" href="https://royalholloway.academia.edu/CarolynMcGettigan">Carolyn McGettigan</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Journal of Experimental Psychology-general, 2005</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Lexical Information Drives Perceptual Learning of Distorted Speech: Evidence From the Comprehension of Noise-Vocoded Sentences","attachmentId":33306585,"attachmentType":"pdf","work_url":"https://www.academia.edu/6540038/Lexical_Information_Drives_Perceptual_Learning_of_Distorted_Speech_Evidence_From_the_Comprehension_of_Noise_Vocoded_Sentences","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/6540038/Lexical_Information_Drives_Perceptual_Learning_of_Distorted_Speech_Evidence_From_the_Comprehension_of_Noise_Vocoded_Sentences"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="1" data-entity-id="21310762" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/21310762/HYPOTHESIS_AND_THEORY_ARTICLE_Speech_perception_under_adverse_conditions_insights_from_behavioral_computational_and_neuroscience_research_SYSTEMS_NEUROSCIENCE">HYPOTHESIS AND THEORY ARTICLE Speech perception under adverse conditions: insights from behavioral, computational, and neuroscience research SYSTEMS NEUROSCIENCE</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="30028044" href="https://independent.academia.edu/SaraGuediche">Sara Guediche</a><span>, </span><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="37870095" href="https://independent.academia.edu/SheilaBlumstein">Sheila Blumstein</a></div><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"HYPOTHESIS AND THEORY ARTICLE Speech perception under adverse conditions: insights from behavioral, computational, and neuroscience research SYSTEMS NEUROSCIENCE","attachmentId":41810406,"attachmentType":"pdf","work_url":"https://www.academia.edu/21310762/HYPOTHESIS_AND_THEORY_ARTICLE_Speech_perception_under_adverse_conditions_insights_from_behavioral_computational_and_neuroscience_research_SYSTEMS_NEUROSCIENCE","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/21310762/HYPOTHESIS_AND_THEORY_ARTICLE_Speech_perception_under_adverse_conditions_insights_from_behavioral_computational_and_neuroscience_research_SYSTEMS_NEUROSCIENCE"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="2" data-entity-id="100319111" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/100319111/A_proposed_mechanism_for_rapid_adaptation_to_spectrally_distorted_speech">A proposed mechanism for rapid adaptation to spectrally distorted speech</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="266128807" href="https://independent.academia.edu/EVANSTUARTBALABANBALABAN">EVAN STUART BALABAN BALABAN</a></div><p class="ds-related-work--metadata ds2-5-body-xs">The Journal of the Acoustical Society of America, 2015</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"A proposed mechanism for rapid adaptation to spectrally distorted speech","attachmentId":101175590,"attachmentType":"pdf","work_url":"https://www.academia.edu/100319111/A_proposed_mechanism_for_rapid_adaptation_to_spectrally_distorted_speech","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/100319111/A_proposed_mechanism_for_rapid_adaptation_to_spectrally_distorted_speech"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="3" data-entity-id="87194483" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/87194483/Speech_perception_under_adverse_conditions_insights_from_behavioral_computational_and_neuroscience_research">Speech perception under adverse conditions: insights from behavioral, computational, and neuroscience research</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="37870095" href="https://independent.academia.edu/SheilaBlumstein">Sheila Blumstein</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Frontiers in Systems Neuroscience, 2014</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Speech perception under adverse conditions: insights from behavioral, computational, and neuroscience research","attachmentId":91474247,"attachmentType":"pdf","work_url":"https://www.academia.edu/87194483/Speech_perception_under_adverse_conditions_insights_from_behavioral_computational_and_neuroscience_research","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/87194483/Speech_perception_under_adverse_conditions_insights_from_behavioral_computational_and_neuroscience_research"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="4" data-entity-id="126476844" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/126476844/On_the_robustness_of_Speech_Perception">On the robustness of Speech Perception</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="46697892" href="https://independent.academia.edu/DiehlRandy">Randy Diehl</a></div><p class="ds-related-work--metadata ds2-5-body-xs">ICPhS, 2011</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"On the robustness of Speech Perception","attachmentId":120348555,"attachmentType":"pdf","work_url":"https://www.academia.edu/126476844/On_the_robustness_of_Speech_Perception","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/126476844/On_the_robustness_of_Speech_Perception"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="5" data-entity-id="81007269" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/81007269/On_the_Use_of_the_Distortion_Sensitivity_Approach_in_Examining_the_Role_of_Linguistic_Abilities_in_Speech_Understanding_in_Noise">On the Use of the Distortion-Sensitivity Approach in Examining the Role of Linguistic Abilities in Speech Understanding in Noise</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="21357268" href="https://independent.academia.edu/EHuysmans">E Huysmans</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Journal of Speech, Language, and Hearing Research, 2011</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"On the Use of the Distortion-Sensitivity Approach in Examining the Role of Linguistic Abilities in Speech Understanding in Noise","attachmentId":87199811,"attachmentType":"pdf","work_url":"https://www.academia.edu/81007269/On_the_Use_of_the_Distortion_Sensitivity_Approach_in_Examining_the_Role_of_Linguistic_Abilities_in_Speech_Understanding_in_Noise","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/81007269/On_the_Use_of_the_Distortion_Sensitivity_Approach_in_Examining_the_Role_of_Linguistic_Abilities_in_Speech_Understanding_in_Noise"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="6" data-entity-id="103142483" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/103142483/What_we_do_not_know_about_the_mechanisms_underlying_adaptive_speech_perception_A_computational_framework_and_review">What we do (not) know about the mechanisms underlying adaptive speech perception: A computational framework and review</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="97108524" href="https://rochester.academia.edu/CKurumada">Chigusa Kurumada</a></div><p class="ds-related-work--metadata ds2-5-body-xs">2023</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"What we do (not) know about the mechanisms underlying adaptive speech perception: A computational framework and review","attachmentId":103228217,"attachmentType":"pdf","work_url":"https://www.academia.edu/103142483/What_we_do_not_know_about_the_mechanisms_underlying_adaptive_speech_perception_A_computational_framework_and_review","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/103142483/What_we_do_not_know_about_the_mechanisms_underlying_adaptive_speech_perception_A_computational_framework_and_review"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="7" data-entity-id="62367212" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/62367212/Mechanisms_of_noise_robust_representation_of_speech_in_primary_auditory_cortex">Mechanisms of noise robust representation of speech in primary auditory cortex</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="31957814" href="https://umd.academia.edu/JFritz">Jonathan Fritz</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Proceedings of the National Academy of Sciences, 2014</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Mechanisms of noise robust representation of speech in primary auditory cortex","attachmentId":75157460,"attachmentType":"pdf","work_url":"https://www.academia.edu/62367212/Mechanisms_of_noise_robust_representation_of_speech_in_primary_auditory_cortex","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/62367212/Mechanisms_of_noise_robust_representation_of_speech_in_primary_auditory_cortex"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="8" data-entity-id="123251812" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/123251812/Adaptive_Plasticity_in_Perceiving_Speech_Sounds">Adaptive Plasticity in Perceiving Speech Sounds</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="12402623" href="https://independent.academia.edu/JeanVroomen">Jean Vroomen</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Speech Perception, 2022</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Adaptive Plasticity in Perceiving Speech Sounds","attachmentId":117727634,"attachmentType":"pdf","work_url":"https://www.academia.edu/123251812/Adaptive_Plasticity_in_Perceiving_Speech_Sounds","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/123251812/Adaptive_Plasticity_in_Perceiving_Speech_Sounds"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="9" data-entity-id="121997398" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/121997398/Editorial_Perceptual_flexibility_for_speech_What_are_the_pros_and_cons">Editorial Perceptual flexibility for speech: What are the pros and cons</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="55020781" href="https://pdx.academia.edu/CarolynQuam">Carolyn Quam</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Brain and Language, 2022</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Editorial Perceptual flexibility for speech: What are the pros and cons","attachmentId":116748955,"attachmentType":"pdf","work_url":"https://www.academia.edu/121997398/Editorial_Perceptual_flexibility_for_speech_What_are_the_pros_and_cons","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/121997398/Editorial_Perceptual_flexibility_for_speech_What_are_the_pros_and_cons"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="10" data-entity-id="68211736" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/68211736/Basic_Issues_in_Speech_Perception">Basic Issues in Speech Perception</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="59145902" href="https://independent.academia.edu/rafikanurhidayah2">rafika nurhidayah</a></div><p class="ds-related-work--metadata ds2-5-body-xs">2019</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Basic Issues in Speech Perception","attachmentId":78769511,"attachmentType":"pdf","work_url":"https://www.academia.edu/68211736/Basic_Issues_in_Speech_Perception","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/68211736/Basic_Issues_in_Speech_Perception"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="11" data-entity-id="121997452" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/121997452/Perceptual_flexibility_for_speech_What_are_the_pros_and_cons">Perceptual flexibility for speech: What are the pros and cons</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="55020781" href="https://pdx.academia.edu/CarolynQuam">Carolyn Quam</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Brain and Language, 2022</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Perceptual flexibility for speech: What are the pros and cons","attachmentId":116748996,"attachmentType":"pdf","work_url":"https://www.academia.edu/121997452/Perceptual_flexibility_for_speech_What_are_the_pros_and_cons","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/121997452/Perceptual_flexibility_for_speech_What_are_the_pros_and_cons"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="12" data-entity-id="126042731" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/126042731/Topics_in_speech_perception">Topics in speech perception</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="33351420" href="https://indiana.academia.edu/DianeKewleyport">Diane Kewley-port</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Proceedings of International Conferences of Experimental Linguistics, 2019</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Topics in speech perception","attachmentId":119980407,"attachmentType":"pdf","work_url":"https://www.academia.edu/126042731/Topics_in_speech_perception","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/126042731/Topics_in_speech_perception"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="13" data-entity-id="115613718" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/115613718/Talker_adaptation_in_speech_perception_Adjusting_the_signal_or_the_representations">Talker adaptation in speech perception: Adjusting the signal or the representations?</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="28766005" href="https://colorado.academia.edu/RebeccaScarborough">Rebecca Scarborough</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Cognition, 2008</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Talker adaptation in speech perception: Adjusting the signal or the representations?","attachmentId":111971982,"attachmentType":"pdf","work_url":"https://www.academia.edu/115613718/Talker_adaptation_in_speech_perception_Adjusting_the_signal_or_the_representations","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/115613718/Talker_adaptation_in_speech_perception_Adjusting_the_signal_or_the_representations"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="14" data-entity-id="13811855" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/13811855/Perceptual_learning_of_noise_vocoded_words_Effects_of_feedback_and_lexicality">Perceptual learning of noise vocoded words: Effects of feedback and lexicality</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="32911360" href="https://independent.academia.edu/RCarlyon">Robert Carlyon</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Journal of Experimental Psychology: Human Perception and Performance, 2008</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Perceptual learning of noise vocoded words: Effects of feedback and lexicality","attachmentId":44923686,"attachmentType":"pdf","work_url":"https://www.academia.edu/13811855/Perceptual_learning_of_noise_vocoded_words_Effects_of_feedback_and_lexicality","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/13811855/Perceptual_learning_of_noise_vocoded_words_Effects_of_feedback_and_lexicality"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="15" data-entity-id="92163639" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/92163639/The_Effect_of_Modified_Speech_on_Listening_to_Authentic_Speech">The Effect of Modified Speech on Listening to Authentic Speech</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="171678488" href="https://independent.academia.edu/Rkhodabakhsh">Reza khodabakhsh</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Journal of Language Teaching and Research, 2010</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"The Effect of Modified Speech on Listening to Authentic Speech","attachmentId":95246346,"attachmentType":"pdf","work_url":"https://www.academia.edu/92163639/The_Effect_of_Modified_Speech_on_Listening_to_Authentic_Speech","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/92163639/The_Effect_of_Modified_Speech_on_Listening_to_Authentic_Speech"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="16" data-entity-id="32010802" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/32010802/Speech_perception_as_an_active_cognitive_process">Speech perception as an active cognitive process</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="42860068" href="https://chicago.academia.edu/ShannonHeald">Shannon Heald</a><span>, </span><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="1571" href="https://chicago.academia.edu/HowardNusbaum">Howard Nusbaum</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Frontiers in Systems Neuroscience, 2014</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Speech perception as an active cognitive process","attachmentId":52277550,"attachmentType":"pdf","work_url":"https://www.academia.edu/32010802/Speech_perception_as_an_active_cognitive_process","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/32010802/Speech_perception_as_an_active_cognitive_process"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="17" data-entity-id="14340909" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/14340909/Multiple_routes_to_the_perceptual_learning_of_speech">Multiple routes to the perceptual learning of speech</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="33290041" href="https://indiana.academia.edu/TessaBent">Tessa Bent</a></div><p class="ds-related-work--metadata ds2-5-body-xs">The Journal of the Acoustical Society of America, 2008</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Multiple routes to the perceptual learning of speech","attachmentId":44306457,"attachmentType":"pdf","work_url":"https://www.academia.edu/14340909/Multiple_routes_to_the_perceptual_learning_of_speech","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/14340909/Multiple_routes_to_the_perceptual_learning_of_speech"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="18" data-entity-id="27912866" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/27912866/Spectral_restoration_of_speech_Intelligibility_is_increased_by_inserting_noise_in_spectral_gaps">Spectral restoration of speech: Intelligibility is increased by inserting noise in spectral gaps</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="2981729" href="https://uwm.academia.edu/RichardMwarren">Richard M. Warren</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Perception & Psychophysics, 1997</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Spectral restoration of speech: Intelligibility is increased by inserting noise in spectral gaps","attachmentId":48204029,"attachmentType":"pdf","work_url":"https://www.academia.edu/27912866/Spectral_restoration_of_speech_Intelligibility_is_increased_by_inserting_noise_in_spectral_gaps","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/27912866/Spectral_restoration_of_speech_Intelligibility_is_increased_by_inserting_noise_in_spectral_gaps"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="19" data-entity-id="13775293" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/13775293/Auditory_Adaptation_in_Voice_Perception">Auditory Adaptation in Voice Perception</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="32884292" href="https://independent.academia.edu/NadineKloth">Nadine Kloth</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Current Biology, 2008</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Auditory Adaptation in Voice Perception","attachmentId":44968799,"attachmentType":"pdf","work_url":"https://www.academia.edu/13775293/Auditory_Adaptation_in_Voice_Perception","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/13775293/Auditory_Adaptation_in_Voice_Perception"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="20" data-entity-id="79442005" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/79442005/The_Dynamic_Nature_of_Speech_Perception">The Dynamic Nature of Speech Perception</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="34729942" href="https://independent.academia.edu/DennisNorris">Dennis Norris</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Language and Speech, 2006</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"The Dynamic Nature of Speech Perception","attachmentId":86155623,"attachmentType":"pdf","work_url":"https://www.academia.edu/79442005/The_Dynamic_Nature_of_Speech_Perception","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/79442005/The_Dynamic_Nature_of_Speech_Perception"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="21" data-entity-id="22335311" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/22335311/Characterization_of_speech_understanding_in_various_types_of_noise">Characterization of speech understanding in various types of noise</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="35077723" href="https://independent.academia.edu/LenaWong2">Lena Wong</a></div><p class="ds-related-work--metadata ds2-5-body-xs">The Journal of the Acoustical Society of America, 2012</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Characterization of speech understanding in various types of noise","attachmentId":42969445,"attachmentType":"pdf","work_url":"https://www.academia.edu/22335311/Characterization_of_speech_understanding_in_various_types_of_noise","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/22335311/Characterization_of_speech_understanding_in_various_types_of_noise"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="22" data-entity-id="887308" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/887308/Perceptual_adaptation_and_intelligibility_of_multiple_talkers_for_two_types_of_degraded_speech">Perceptual adaptation and intelligibility of multiple talkers for two types of degraded speech</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="709005" href="https://nyu.academia.edu/AdamBuchwald">Adam Buchwald</a></div><p class="ds-related-work--metadata ds2-5-body-xs">The Journal of the Acoustical Society …, 2009</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Perceptual adaptation and intelligibility of multiple talkers for two types of degraded speech","attachmentId":51184360,"attachmentType":"pdf","work_url":"https://www.academia.edu/887308/Perceptual_adaptation_and_intelligibility_of_multiple_talkers_for_two_types_of_degraded_speech","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/887308/Perceptual_adaptation_and_intelligibility_of_multiple_talkers_for_two_types_of_degraded_speech"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="23" data-entity-id="114570434" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/114570434/Perceptual_learning_for_speech_Is_there_a_return_to_normal">Perceptual learning for speech: Is there a return to normal?</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="218403923" href="https://independent.academia.edu/TanyaKraljic">Tanya Kraljic</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Cognitive Psychology, 2005</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Perceptual learning for speech: Is there a return to normal?","attachmentId":111234282,"attachmentType":"pdf","work_url":"https://www.academia.edu/114570434/Perceptual_learning_for_speech_Is_there_a_return_to_normal","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/114570434/Perceptual_learning_for_speech_Is_there_a_return_to_normal"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="24" data-entity-id="86009455" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/86009455/Interactions_Between_Unsupervised_Learning_and_the_Degree_of_Spectral_Mismatch_on_Short_Term_Perceptual_Adaptation_to_Spectrally_Shifted_Speech">Interactions Between Unsupervised Learning and the Degree of Spectral Mismatch on Short-Term Perceptual Adaptation to Spectrally Shifted Speech</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="151037491" href="https://ucla.academia.edu/QianJieFu">Qian-Jie Fu</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Ear &amp; Hearing, 2009</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Interactions Between Unsupervised Learning and the Degree of Spectral Mismatch on Short-Term Perceptual Adaptation to Spectrally Shifted Speech","attachmentId":90555159,"attachmentType":"pdf","work_url":"https://www.academia.edu/86009455/Interactions_Between_Unsupervised_Learning_and_the_Degree_of_Spectral_Mismatch_on_Short_Term_Perceptual_Adaptation_to_Spectrally_Shifted_Speech","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/86009455/Interactions_Between_Unsupervised_Learning_and_the_Degree_of_Spectral_Mismatch_on_Short_Term_Perceptual_Adaptation_to_Spectrally_Shifted_Speech"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="25" data-entity-id="114004586" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/114004586/Perceptual_learning_and_speech_perception_A_new_hypothesis">Perceptual learning and speech perception: A new hypothesis</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="36687357" href="https://haifa.academia.edu/KarenBanai">Karen Banai</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Proceedings of the International Symposium on Auditory and Audiological Research, 2019</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Perceptual learning and speech perception: A new hypothesis","attachmentId":110817543,"attachmentType":"pdf","work_url":"https://www.academia.edu/114004586/Perceptual_learning_and_speech_perception_A_new_hypothesis","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/114004586/Perceptual_learning_and_speech_perception_A_new_hypothesis"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="26" data-entity-id="3273616" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/3273616/Speech_perception">Speech perception</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="3113017" href="https://wustl.academia.edu/CaseyOCallaghan">Casey O'Callaghan</a></div><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Speech perception","attachmentId":31117376,"attachmentType":"pdf","work_url":"https://www.academia.edu/3273616/Speech_perception","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/3273616/Speech_perception"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div><div class="ds-related-work--container js-related-work-sidebar-card" data-collection-position="27" data-entity-id="6540032" data-sort-order="default"><a class="ds-related-work--title js-related-work-grid-card-title ds2-5-body-md ds2-5-body-link" href="https://www.academia.edu/6540032/Investigating_the_perception_of_noise_vocoded_speech_an_individual_differences_approach">Investigating the perception of noise-vocoded speech - an individual differences approach</a><div class="ds-related-work--metadata"><a class="js-related-work-grid-card-author ds2-5-body-sm ds2-5-body-link" data-author-id="10477029" href="https://royalholloway.academia.edu/CarolynMcGettigan">Carolyn McGettigan</a></div><p class="ds-related-work--metadata ds2-5-body-xs">Journal of The Acoustical Society of America, 2008</p><div class="ds-related-work--ctas"><button class="ds2-5-text-link ds2-5-text-link--inline js-swp-download-button" data-signup-modal="{"location":"wsj-grid-card-download-pdf-modal","work_title":"Investigating the perception of noise-vocoded speech - an individual differences approach","attachmentId":48817123,"attachmentType":"pdf","work_url":"https://www.academia.edu/6540032/Investigating_the_perception_of_noise_vocoded_speech_an_individual_differences_approach","alternativeTracking":true}"><span class="material-symbols-outlined" style="font-size: 18px" translate="no">download</span><span class="ds2-5-text-link__content">Download free PDF</span></button><a class="ds2-5-text-link ds2-5-text-link--inline js-related-work-grid-card-view-pdf" href="https://www.academia.edu/6540032/Investigating_the_perception_of_noise_vocoded_speech_an_individual_differences_approach"><span class="ds2-5-text-link__content">View PDF</span><span class="material-symbols-outlined" style="font-size: 18px" translate="no">chevron_right</span></a></div></div></div><div class="ds-related-content--container"><h2 class="ds-related-content--heading">Related topics</h2><div class="ds-research-interests--pills-container"><a class="js-related-research-interest ds-research-interests--pill" data-entity-id="254" rel="nofollow" href="https://www.academia.edu/Documents/in/Emotion">Emotion</a><a class="js-related-research-interest ds-research-interests--pill" data-entity-id="7523" rel="nofollow" href="https://www.academia.edu/Documents/in/Control">Control</a><a class="js-related-research-interest ds-research-interests--pill" data-entity-id="7736" rel="nofollow" href="https://www.academia.edu/Documents/in/Attention">Attention</a><a class="js-related-research-interest ds-research-interests--pill" data-entity-id="32362" rel="nofollow" href="https://www.academia.edu/Documents/in/Executive_Control">Executive Control</a><a class="js-related-research-interest ds-research-interests--pill" data-entity-id="36299" rel="nofollow" href="https://www.academia.edu/Documents/in/Distributed_Systems">Distributed Systems</a><a class="js-related-research-interest ds-research-interests--pill" data-entity-id="80547" rel="nofollow" href="https://www.academia.edu/Documents/in/Attentional_Blink">Attentional Blink</a><a class="js-related-research-interest ds-research-interests--pill" data-entity-id="146921" rel="nofollow" href="https://www.academia.edu/Documents/in/Process_Algebra">Process Algebra</a><a class="js-related-research-interest ds-research-interests--pill" data-entity-id="174835" rel="nofollow" href="https://www.academia.edu/Documents/in/Threat">Threat</a><a class="js-related-research-interest ds-research-interests--pill" data-entity-id="174857" rel="nofollow" href="https://www.academia.edu/Documents/in/Startle">Startle</a></div></div></div></div></div><div class="footer--content"><ul class="footer--main-links hide-on-mobile"><li><a href="https://www.academia.edu/about">About</a></li><li><a href="https://www.academia.edu/press">Press</a></li><li><a href="https://www.academia.edu/documents">Papers</a></li><li><a href="https://www.academia.edu/topics">Topics</a></li><li><a href="https://www.academia.edu/hiring"><svg style="width: 13px; height: 13px; position: relative; bottom: -1px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="briefcase" class="svg-inline--fa fa-briefcase fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M320 336c0 8.84-7.16 16-16 16h-96c-8.84 0-16-7.16-16-16v-48H0v144c0 25.6 22.4 48 48 48h416c25.6 0 48-22.4 48-48V288H320v48zm144-208h-80V80c0-25.6-22.4-48-48-48H176c-25.6 0-48 22.4-48 48v48H48c-25.6 0-48 22.4-48 48v80h512v-80c0-25.6-22.4-48-48-48zm-144 0H192V96h128v32z"></path></svg> <strong>We're Hiring!</strong></a></li><li><a href="https://support.academia.edu/hc/en-us"><svg style="width: 12px; height: 12px; position: relative; bottom: -1px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="question-circle" class="svg-inline--fa fa-question-circle fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M504 256c0 136.997-111.043 248-248 248S8 392.997 8 256C8 119.083 119.043 8 256 8s248 111.083 248 248zM262.655 90c-54.497 0-89.255 22.957-116.549 63.758-3.536 5.286-2.353 12.415 2.715 16.258l34.699 26.31c5.205 3.947 12.621 3.008 16.665-2.122 17.864-22.658 30.113-35.797 57.303-35.797 20.429 0 45.698 13.148 45.698 32.958 0 14.976-12.363 22.667-32.534 33.976C247.128 238.528 216 254.941 216 296v4c0 6.627 5.373 12 12 12h56c6.627 0 12-5.373 12-12v-1.333c0-28.462 83.186-29.647 83.186-106.667 0-58.002-60.165-102-116.531-102zM256 338c-25.365 0-46 20.635-46 46 0 25.364 20.635 46 46 46s46-20.636 46-46c0-25.365-20.635-46-46-46z"></path></svg> <strong>Help Center</strong></a></li></ul><ul class="footer--research-interests"><li>Find new research papers in:</li><li><a href="https://www.academia.edu/Documents/in/Physics">Physics</a></li><li><a href="https://www.academia.edu/Documents/in/Chemistry">Chemistry</a></li><li><a href="https://www.academia.edu/Documents/in/Biology">Biology</a></li><li><a href="https://www.academia.edu/Documents/in/Health_Sciences">Health Sciences</a></li><li><a href="https://www.academia.edu/Documents/in/Ecology">Ecology</a></li><li><a href="https://www.academia.edu/Documents/in/Earth_Sciences">Earth Sciences</a></li><li><a href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a></li><li><a href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a></li><li><a href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a></li></ul><ul class="footer--legal-links hide-on-mobile"><li><a href="https://www.academia.edu/terms">Terms</a></li><li><a href="https://www.academia.edu/privacy">Privacy</a></li><li><a href="https://www.academia.edu/copyright">Copyright</a></li><li>Academia ©2025</li></ul></div> </body> </html>