CINXE.COM
Eye tracking - Wikipedia
<!DOCTYPE html> <html class="client-nojs vector-feature-language-in-header-enabled vector-feature-language-in-main-page-header-disabled vector-feature-sticky-header-disabled vector-feature-page-tools-pinned-disabled vector-feature-toc-pinned-clientpref-1 vector-feature-main-menu-pinned-disabled vector-feature-limited-width-clientpref-1 vector-feature-limited-width-content-enabled vector-feature-custom-font-size-clientpref-1 vector-feature-appearance-pinned-clientpref-1 vector-feature-night-mode-enabled skin-theme-clientpref-day vector-toc-available" lang="en" dir="ltr"> <head> <meta charset="UTF-8"> <title>Eye tracking - Wikipedia</title> <script>(function(){var className="client-js vector-feature-language-in-header-enabled vector-feature-language-in-main-page-header-disabled vector-feature-sticky-header-disabled vector-feature-page-tools-pinned-disabled vector-feature-toc-pinned-clientpref-1 vector-feature-main-menu-pinned-disabled vector-feature-limited-width-clientpref-1 vector-feature-limited-width-content-enabled vector-feature-custom-font-size-clientpref-1 vector-feature-appearance-pinned-clientpref-1 vector-feature-night-mode-enabled skin-theme-clientpref-day vector-toc-available";var cookie=document.cookie.match(/(?:^|; )enwikimwclientpreferences=([^;]+)/);if(cookie){cookie[1].split('%2C').forEach(function(pref){className=className.replace(new RegExp('(^| )'+pref.replace(/-clientpref-\w+$|[^\w-]+/g,'')+'-clientpref-\\w+( |$)'),'$1'+pref+'$2');});}document.documentElement.className=className;}());RLCONF={"wgBreakFrames":false,"wgSeparatorTransformTable":["",""],"wgDigitTransformTable":["",""],"wgDefaultDateFormat":"dmy", "wgMonthNames":["","January","February","March","April","May","June","July","August","September","October","November","December"],"wgRequestId":"90ffb936-81bf-4c6a-b484-b8562562fe03","wgCanonicalNamespace":"","wgCanonicalSpecialPageName":false,"wgNamespaceNumber":0,"wgPageName":"Eye_tracking","wgTitle":"Eye tracking","wgCurRevisionId":1256379581,"wgRevisionId":1256379581,"wgArticleId":1543423,"wgIsArticle":true,"wgIsRedirect":false,"wgAction":"view","wgUserName":null,"wgUserGroups":["*"],"wgCategories":["Webarchive template wayback links","CS1 Swedish-language sources (sv)","CS1 Polish-language sources (pl)","CS1 German-language sources (de)","Articles with short description","Short description matches Wikidata","Use dmy dates from August 2020","Commons category link is on Wikidata","Articles containing video clips","Attention","Cognitive science","Human eye","History of human–computer interaction","Market research","Multimodal interaction","Promotion and marketing communications", "Usability","Vision","Web design","Applications of computer vision","Virtual reality"],"wgPageViewLanguage":"en","wgPageContentLanguage":"en","wgPageContentModel":"wikitext","wgRelevantPageName":"Eye_tracking","wgRelevantArticleId":1543423,"wgIsProbablyEditable":true,"wgRelevantPageIsProbablyEditable":true,"wgRestrictionEdit":[],"wgRestrictionMove":[],"wgNoticeProject":"wikipedia","wgCiteReferencePreviewsActive":false,"wgFlaggedRevsParams":{"tags":{"status":{"levels":1}}},"wgMediaViewerOnClick":true,"wgMediaViewerEnabledByDefault":true,"wgPopupsFlags":0,"wgVisualEditor":{"pageLanguageCode":"en","pageLanguageDir":"ltr","pageVariantFallbacks":"en"},"wgMFDisplayWikibaseDescriptions":{"search":true,"watchlist":true,"tagline":false,"nearby":true},"wgWMESchemaEditAttemptStepOversample":false,"wgWMEPageLength":80000,"wgRelatedArticlesCompat":[],"wgCentralAuthMobileDomain":false,"wgEditSubmitButtonLabelPublish":true,"wgULSPosition":"interlanguage","wgULSisCompactLinksEnabled":false, "wgVector2022LanguageInHeader":true,"wgULSisLanguageSelectorEmpty":false,"wgWikibaseItemId":"Q970687","wgCheckUserClientHintsHeadersJsApi":["brands","architecture","bitness","fullVersionList","mobile","model","platform","platformVersion"],"GEHomepageSuggestedEditsEnableTopics":true,"wgGETopicsMatchModeEnabled":false,"wgGEStructuredTaskRejectionReasonTextInputEnabled":false,"wgGELevelingUpEnabledForUser":false};RLSTATE={"ext.globalCssJs.user.styles":"ready","site.styles":"ready","user.styles":"ready","ext.globalCssJs.user":"ready","user":"ready","user.options":"loading","ext.cite.styles":"ready","mediawiki.page.gallery.styles":"ready","ext.tmh.player.styles":"ready","skins.vector.search.codex.styles":"ready","skins.vector.styles":"ready","skins.vector.icons":"ready","jquery.makeCollapsible.styles":"ready","ext.wikimediamessages.styles":"ready","ext.visualEditor.desktopArticleTarget.noscript":"ready","ext.uls.interlanguage":"ready","wikibase.client.init":"ready","ext.wikimediaBadges": "ready"};RLPAGEMODULES=["ext.cite.ux-enhancements","mediawiki.page.gallery","mediawiki.page.media","ext.tmh.player","ext.scribunto.logs","site","mediawiki.page.ready","jquery.makeCollapsible","mediawiki.toc","skins.vector.js","ext.centralNotice.geoIP","ext.centralNotice.startUp","ext.gadget.ReferenceTooltips","ext.gadget.switcher","ext.urlShortener.toolbar","ext.centralauth.centralautologin","mmv.bootstrap","ext.popups","ext.visualEditor.desktopArticleTarget.init","ext.visualEditor.targetLoader","ext.echo.centralauth","ext.eventLogging","ext.wikimediaEvents","ext.navigationTiming","ext.uls.interface","ext.cx.eventlogging.campaigns","ext.cx.uls.quick.actions","wikibase.client.vector-2022","ext.checkUser.clientHints","ext.growthExperiments.SuggestedEditSession","wikibase.sidebar.tracking"];</script> <script>(RLQ=window.RLQ||[]).push(function(){mw.loader.impl(function(){return["user.options@12s5i",function($,jQuery,require,module){mw.user.tokens.set({"patrolToken":"+\\","watchToken":"+\\","csrfToken":"+\\"}); }];});});</script> <link rel="stylesheet" href="/w/load.php?lang=en&modules=ext.cite.styles%7Cext.tmh.player.styles%7Cext.uls.interlanguage%7Cext.visualEditor.desktopArticleTarget.noscript%7Cext.wikimediaBadges%7Cext.wikimediamessages.styles%7Cjquery.makeCollapsible.styles%7Cmediawiki.page.gallery.styles%7Cskins.vector.icons%2Cstyles%7Cskins.vector.search.codex.styles%7Cwikibase.client.init&only=styles&skin=vector-2022"> <script async="" src="/w/load.php?lang=en&modules=startup&only=scripts&raw=1&skin=vector-2022"></script> <meta name="ResourceLoaderDynamicStyles" content=""> <link rel="stylesheet" href="/w/load.php?lang=en&modules=site.styles&only=styles&skin=vector-2022"> <meta name="generator" content="MediaWiki 1.44.0-wmf.4"> <meta name="referrer" content="origin"> <meta name="referrer" content="origin-when-cross-origin"> <meta name="robots" content="max-image-preview:standard"> <meta name="format-detection" content="telephone=no"> <meta property="og:image" content="https://upload.wikimedia.org/wikipedia/commons/0/0f/Eyetracker1.jpg"> <meta property="og:image:width" content="1200"> <meta property="og:image:height" content="1168"> <meta property="og:image" content="https://upload.wikimedia.org/wikipedia/commons/0/0f/Eyetracker1.jpg"> <meta property="og:image:width" content="800"> <meta property="og:image:height" content="779"> <meta property="og:image:width" content="640"> <meta property="og:image:height" content="623"> <meta name="viewport" content="width=1120"> <meta property="og:title" content="Eye tracking - Wikipedia"> <meta property="og:type" content="website"> <link rel="preconnect" href="//upload.wikimedia.org"> <link rel="alternate" media="only screen and (max-width: 640px)" href="//en.m.wikipedia.org/wiki/Eye_tracking"> <link rel="alternate" type="application/x-wiki" title="Edit this page" href="/w/index.php?title=Eye_tracking&action=edit"> <link rel="apple-touch-icon" href="/static/apple-touch/wikipedia.png"> <link rel="icon" href="/static/favicon/wikipedia.ico"> <link rel="search" type="application/opensearchdescription+xml" href="/w/rest.php/v1/search" title="Wikipedia (en)"> <link rel="EditURI" type="application/rsd+xml" href="//en.wikipedia.org/w/api.php?action=rsd"> <link rel="canonical" href="https://en.wikipedia.org/wiki/Eye_tracking"> <link rel="license" href="https://creativecommons.org/licenses/by-sa/4.0/deed.en"> <link rel="alternate" type="application/atom+xml" title="Wikipedia Atom feed" href="/w/index.php?title=Special:RecentChanges&feed=atom"> <link rel="dns-prefetch" href="//meta.wikimedia.org" /> <link rel="dns-prefetch" href="//login.wikimedia.org"> </head> <body class="skin--responsive skin-vector skin-vector-search-vue mediawiki ltr sitedir-ltr mw-hide-empty-elt ns-0 ns-subject mw-editable page-Eye_tracking rootpage-Eye_tracking skin-vector-2022 action-view"><a class="mw-jump-link" href="#bodyContent">Jump to content</a> <div class="vector-header-container"> <header class="vector-header mw-header"> <div class="vector-header-start"> <nav class="vector-main-menu-landmark" aria-label="Site"> <div id="vector-main-menu-dropdown" class="vector-dropdown vector-main-menu-dropdown vector-button-flush-left vector-button-flush-right" > <input type="checkbox" id="vector-main-menu-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-main-menu-dropdown" class="vector-dropdown-checkbox " aria-label="Main menu" > <label id="vector-main-menu-dropdown-label" for="vector-main-menu-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-menu mw-ui-icon-wikimedia-menu"></span> <span class="vector-dropdown-label-text">Main menu</span> </label> <div class="vector-dropdown-content"> <div id="vector-main-menu-unpinned-container" class="vector-unpinned-container"> <div id="vector-main-menu" class="vector-main-menu vector-pinnable-element"> <div class="vector-pinnable-header vector-main-menu-pinnable-header vector-pinnable-header-unpinned" data-feature-name="main-menu-pinned" data-pinnable-element-id="vector-main-menu" data-pinned-container-id="vector-main-menu-pinned-container" data-unpinned-container-id="vector-main-menu-unpinned-container" > <div class="vector-pinnable-header-label">Main menu</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-main-menu.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-main-menu.unpin">hide</button> </div> <div id="p-navigation" class="vector-menu mw-portlet mw-portlet-navigation" > <div class="vector-menu-heading"> Navigation </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="n-mainpage-description" class="mw-list-item"><a href="/wiki/Main_Page" title="Visit the main page [z]" accesskey="z"><span>Main page</span></a></li><li id="n-contents" class="mw-list-item"><a href="/wiki/Wikipedia:Contents" title="Guides to browsing Wikipedia"><span>Contents</span></a></li><li id="n-currentevents" class="mw-list-item"><a href="/wiki/Portal:Current_events" title="Articles related to current events"><span>Current events</span></a></li><li id="n-randompage" class="mw-list-item"><a href="/wiki/Special:Random" title="Visit a randomly selected article [x]" accesskey="x"><span>Random article</span></a></li><li id="n-aboutsite" class="mw-list-item"><a href="/wiki/Wikipedia:About" title="Learn about Wikipedia and how it works"><span>About Wikipedia</span></a></li><li id="n-contactpage" class="mw-list-item"><a href="//en.wikipedia.org/wiki/Wikipedia:Contact_us" title="How to contact Wikipedia"><span>Contact us</span></a></li> </ul> </div> </div> <div id="p-interaction" class="vector-menu mw-portlet mw-portlet-interaction" > <div class="vector-menu-heading"> Contribute </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="n-help" class="mw-list-item"><a href="/wiki/Help:Contents" title="Guidance on how to use and edit Wikipedia"><span>Help</span></a></li><li id="n-introduction" class="mw-list-item"><a href="/wiki/Help:Introduction" title="Learn how to edit Wikipedia"><span>Learn to edit</span></a></li><li id="n-portal" class="mw-list-item"><a href="/wiki/Wikipedia:Community_portal" title="The hub for editors"><span>Community portal</span></a></li><li id="n-recentchanges" class="mw-list-item"><a href="/wiki/Special:RecentChanges" title="A list of recent changes to Wikipedia [r]" accesskey="r"><span>Recent changes</span></a></li><li id="n-upload" class="mw-list-item"><a href="/wiki/Wikipedia:File_upload_wizard" title="Add images or other media for use on Wikipedia"><span>Upload file</span></a></li> </ul> </div> </div> </div> </div> </div> </div> </nav> <a href="/wiki/Main_Page" class="mw-logo"> <img class="mw-logo-icon" src="/static/images/icons/wikipedia.png" alt="" aria-hidden="true" height="50" width="50"> <span class="mw-logo-container skin-invert"> <img class="mw-logo-wordmark" alt="Wikipedia" src="/static/images/mobile/copyright/wikipedia-wordmark-en.svg" style="width: 7.5em; height: 1.125em;"> <img class="mw-logo-tagline" alt="The Free Encyclopedia" src="/static/images/mobile/copyright/wikipedia-tagline-en.svg" width="117" height="13" style="width: 7.3125em; height: 0.8125em;"> </span> </a> </div> <div class="vector-header-end"> <div id="p-search" role="search" class="vector-search-box-vue vector-search-box-collapses vector-search-box-show-thumbnail vector-search-box-auto-expand-width vector-search-box"> <a href="/wiki/Special:Search" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only search-toggle" title="Search Wikipedia [f]" accesskey="f"><span class="vector-icon mw-ui-icon-search mw-ui-icon-wikimedia-search"></span> <span>Search</span> </a> <div class="vector-typeahead-search-container"> <div class="cdx-typeahead-search cdx-typeahead-search--show-thumbnail cdx-typeahead-search--auto-expand-width"> <form action="/w/index.php" id="searchform" class="cdx-search-input cdx-search-input--has-end-button"> <div id="simpleSearch" class="cdx-search-input__input-wrapper" data-search-loc="header-moved"> <div class="cdx-text-input cdx-text-input--has-start-icon"> <input class="cdx-text-input__input" type="search" name="search" placeholder="Search Wikipedia" aria-label="Search Wikipedia" autocapitalize="sentences" title="Search Wikipedia [f]" accesskey="f" id="searchInput" > <span class="cdx-text-input__icon cdx-text-input__start-icon"></span> </div> <input type="hidden" name="title" value="Special:Search"> </div> <button class="cdx-button cdx-search-input__end-button">Search</button> </form> </div> </div> </div> <nav class="vector-user-links vector-user-links-wide" aria-label="Personal tools"> <div class="vector-user-links-main"> <div id="p-vector-user-menu-preferences" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <div id="p-vector-user-menu-userpage" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <nav class="vector-appearance-landmark" aria-label="Appearance"> <div id="vector-appearance-dropdown" class="vector-dropdown " title="Change the appearance of the page's font size, width, and color" > <input type="checkbox" id="vector-appearance-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-appearance-dropdown" class="vector-dropdown-checkbox " aria-label="Appearance" > <label id="vector-appearance-dropdown-label" for="vector-appearance-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-appearance mw-ui-icon-wikimedia-appearance"></span> <span class="vector-dropdown-label-text">Appearance</span> </label> <div class="vector-dropdown-content"> <div id="vector-appearance-unpinned-container" class="vector-unpinned-container"> </div> </div> </div> </nav> <div id="p-vector-user-menu-notifications" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <div id="p-vector-user-menu-overflow" class="vector-menu mw-portlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-sitesupport-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="https://donate.wikimedia.org/wiki/Special:FundraiserRedirector?utm_source=donate&utm_medium=sidebar&utm_campaign=C13_en.wikipedia.org&uselang=en" class=""><span>Donate</span></a> </li> <li id="pt-createaccount-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="/w/index.php?title=Special:CreateAccount&returnto=Eye+tracking" title="You are encouraged to create an account and log in; however, it is not mandatory" class=""><span>Create account</span></a> </li> <li id="pt-login-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="/w/index.php?title=Special:UserLogin&returnto=Eye+tracking" title="You're encouraged to log in; however, it's not mandatory. [o]" accesskey="o" class=""><span>Log in</span></a> </li> </ul> </div> </div> </div> <div id="vector-user-links-dropdown" class="vector-dropdown vector-user-menu vector-button-flush-right vector-user-menu-logged-out" title="Log in and more options" > <input type="checkbox" id="vector-user-links-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-user-links-dropdown" class="vector-dropdown-checkbox " aria-label="Personal tools" > <label id="vector-user-links-dropdown-label" for="vector-user-links-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-ellipsis mw-ui-icon-wikimedia-ellipsis"></span> <span class="vector-dropdown-label-text">Personal tools</span> </label> <div class="vector-dropdown-content"> <div id="p-personal" class="vector-menu mw-portlet mw-portlet-personal user-links-collapsible-item" title="User menu" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-sitesupport" class="user-links-collapsible-item mw-list-item"><a href="https://donate.wikimedia.org/wiki/Special:FundraiserRedirector?utm_source=donate&utm_medium=sidebar&utm_campaign=C13_en.wikipedia.org&uselang=en"><span>Donate</span></a></li><li id="pt-createaccount" class="user-links-collapsible-item mw-list-item"><a href="/w/index.php?title=Special:CreateAccount&returnto=Eye+tracking" title="You are encouraged to create an account and log in; however, it is not mandatory"><span class="vector-icon mw-ui-icon-userAdd mw-ui-icon-wikimedia-userAdd"></span> <span>Create account</span></a></li><li id="pt-login" class="user-links-collapsible-item mw-list-item"><a href="/w/index.php?title=Special:UserLogin&returnto=Eye+tracking" title="You're encouraged to log in; however, it's not mandatory. [o]" accesskey="o"><span class="vector-icon mw-ui-icon-logIn mw-ui-icon-wikimedia-logIn"></span> <span>Log in</span></a></li> </ul> </div> </div> <div id="p-user-menu-anon-editor" class="vector-menu mw-portlet mw-portlet-user-menu-anon-editor" > <div class="vector-menu-heading"> Pages for logged out editors <a href="/wiki/Help:Introduction" aria-label="Learn more about editing"><span>learn more</span></a> </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-anoncontribs" class="mw-list-item"><a href="/wiki/Special:MyContributions" title="A list of edits made from this IP address [y]" accesskey="y"><span>Contributions</span></a></li><li id="pt-anontalk" class="mw-list-item"><a href="/wiki/Special:MyTalk" title="Discussion about edits from this IP address [n]" accesskey="n"><span>Talk</span></a></li> </ul> </div> </div> </div> </div> </nav> </div> </header> </div> <div class="mw-page-container"> <div class="mw-page-container-inner"> <div class="vector-sitenotice-container"> <div id="siteNotice"><!-- CentralNotice --></div> </div> <div class="vector-column-start"> <div class="vector-main-menu-container"> <div id="mw-navigation"> <nav id="mw-panel" class="vector-main-menu-landmark" aria-label="Site"> <div id="vector-main-menu-pinned-container" class="vector-pinned-container"> </div> </nav> </div> </div> <div class="vector-sticky-pinned-container"> <nav id="mw-panel-toc" aria-label="Contents" data-event-name="ui.sidebar-toc" class="mw-table-of-contents-container vector-toc-landmark"> <div id="vector-toc-pinned-container" class="vector-pinned-container"> <div id="vector-toc" class="vector-toc vector-pinnable-element"> <div class="vector-pinnable-header vector-toc-pinnable-header vector-pinnable-header-pinned" data-feature-name="toc-pinned" data-pinnable-element-id="vector-toc" > <h2 class="vector-pinnable-header-label">Contents</h2> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-toc.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-toc.unpin">hide</button> </div> <ul class="vector-toc-contents" id="mw-panel-toc-list"> <li id="toc-mw-content-text" class="vector-toc-list-item vector-toc-level-1"> <a href="#" class="vector-toc-link"> <div class="vector-toc-text">(Top)</div> </a> </li> <li id="toc-History" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#History"> <div class="vector-toc-text"> <span class="vector-toc-numb">1</span> <span>History</span> </div> </a> <ul id="toc-History-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Tracker_types" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Tracker_types"> <div class="vector-toc-text"> <span class="vector-toc-numb">2</span> <span>Tracker types</span> </div> </a> <button aria-controls="toc-Tracker_types-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle Tracker types subsection</span> </button> <ul id="toc-Tracker_types-sublist" class="vector-toc-list"> <li id="toc-Eye-attached_tracking" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Eye-attached_tracking"> <div class="vector-toc-text"> <span class="vector-toc-numb">2.1</span> <span>Eye-attached tracking</span> </div> </a> <ul id="toc-Eye-attached_tracking-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Optical_tracking" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Optical_tracking"> <div class="vector-toc-text"> <span class="vector-toc-numb">2.2</span> <span>Optical tracking</span> </div> </a> <ul id="toc-Optical_tracking-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Electric_potential_measurement" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Electric_potential_measurement"> <div class="vector-toc-text"> <span class="vector-toc-numb">2.3</span> <span>Electric potential measurement</span> </div> </a> <ul id="toc-Electric_potential_measurement-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Technologies_and_techniques" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Technologies_and_techniques"> <div class="vector-toc-text"> <span class="vector-toc-numb">3</span> <span>Technologies and techniques</span> </div> </a> <ul id="toc-Technologies_and_techniques-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Data_presentation" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Data_presentation"> <div class="vector-toc-text"> <span class="vector-toc-numb">4</span> <span>Data presentation</span> </div> </a> <ul id="toc-Data_presentation-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Eye-tracking_vs._gaze-tracking" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Eye-tracking_vs._gaze-tracking"> <div class="vector-toc-text"> <span class="vector-toc-numb">5</span> <span>Eye-tracking vs. gaze-tracking</span> </div> </a> <ul id="toc-Eye-tracking_vs._gaze-tracking-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Practice" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Practice"> <div class="vector-toc-text"> <span class="vector-toc-numb">6</span> <span>Practice</span> </div> </a> <button aria-controls="toc-Practice-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle Practice subsection</span> </button> <ul id="toc-Practice-sublist" class="vector-toc-list"> <li id="toc-Eye-tracking_while_driving_a_car_in_a_difficult_situation" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Eye-tracking_while_driving_a_car_in_a_difficult_situation"> <div class="vector-toc-text"> <span class="vector-toc-numb">6.1</span> <span>Eye-tracking while driving a car in a difficult situation</span> </div> </a> <ul id="toc-Eye-tracking_while_driving_a_car_in_a_difficult_situation-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Eye-tracking_of_younger_and_elderly_people_while_walking" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Eye-tracking_of_younger_and_elderly_people_while_walking"> <div class="vector-toc-text"> <span class="vector-toc-numb">6.2</span> <span>Eye-tracking of younger and elderly people while walking</span> </div> </a> <ul id="toc-Eye-tracking_of_younger_and_elderly_people_while_walking-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Applications" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Applications"> <div class="vector-toc-text"> <span class="vector-toc-numb">7</span> <span>Applications</span> </div> </a> <button aria-controls="toc-Applications-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle Applications subsection</span> </button> <ul id="toc-Applications-sublist" class="vector-toc-list"> <li id="toc-Commercial_applications" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Commercial_applications"> <div class="vector-toc-text"> <span class="vector-toc-numb">7.1</span> <span>Commercial applications</span> </div> </a> <ul id="toc-Commercial_applications-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Safety_applications" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Safety_applications"> <div class="vector-toc-text"> <span class="vector-toc-numb">7.2</span> <span>Safety applications</span> </div> </a> <ul id="toc-Safety_applications-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Game_theory_applications" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Game_theory_applications"> <div class="vector-toc-text"> <span class="vector-toc-numb">7.3</span> <span>Game theory applications</span> </div> </a> <ul id="toc-Game_theory_applications-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Assistive_technology" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Assistive_technology"> <div class="vector-toc-text"> <span class="vector-toc-numb">7.4</span> <span>Assistive technology</span> </div> </a> <ul id="toc-Assistive_technology-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Aviation_applications" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Aviation_applications"> <div class="vector-toc-text"> <span class="vector-toc-numb">7.5</span> <span>Aviation applications</span> </div> </a> <ul id="toc-Aviation_applications-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Automotive_applications" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Automotive_applications"> <div class="vector-toc-text"> <span class="vector-toc-numb">7.6</span> <span>Automotive applications</span> </div> </a> <ul id="toc-Automotive_applications-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Entertainment_applications" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Entertainment_applications"> <div class="vector-toc-text"> <span class="vector-toc-numb">7.7</span> <span>Entertainment applications</span> </div> </a> <ul id="toc-Entertainment_applications-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Engineering_applications" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Engineering_applications"> <div class="vector-toc-text"> <span class="vector-toc-numb">7.8</span> <span>Engineering applications</span> </div> </a> <ul id="toc-Engineering_applications-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Cartographic_applications" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Cartographic_applications"> <div class="vector-toc-text"> <span class="vector-toc-numb">7.9</span> <span>Cartographic applications</span> </div> </a> <ul id="toc-Cartographic_applications-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Privacy_concerns" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Privacy_concerns"> <div class="vector-toc-text"> <span class="vector-toc-numb">8</span> <span>Privacy concerns</span> </div> </a> <ul id="toc-Privacy_concerns-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-See_also" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#See_also"> <div class="vector-toc-text"> <span class="vector-toc-numb">9</span> <span>See also</span> </div> </a> <ul id="toc-See_also-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Notes" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Notes"> <div class="vector-toc-text"> <span class="vector-toc-numb">10</span> <span>Notes</span> </div> </a> <ul id="toc-Notes-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-References" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#References"> <div class="vector-toc-text"> <span class="vector-toc-numb">11</span> <span>References</span> </div> </a> <button aria-controls="toc-References-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle References subsection</span> </button> <ul id="toc-References-sublist" class="vector-toc-list"> <li id="toc-Commercial_eye_tracking" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Commercial_eye_tracking"> <div class="vector-toc-text"> <span class="vector-toc-numb">11.1</span> <span>Commercial eye tracking</span> </div> </a> <ul id="toc-Commercial_eye_tracking-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> </ul> </div> </div> </nav> </div> </div> <div class="mw-content-container"> <main id="content" class="mw-body"> <header class="mw-body-header vector-page-titlebar"> <nav aria-label="Contents" class="vector-toc-landmark"> <div id="vector-page-titlebar-toc" class="vector-dropdown vector-page-titlebar-toc vector-button-flush-left" > <input type="checkbox" id="vector-page-titlebar-toc-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-page-titlebar-toc" class="vector-dropdown-checkbox " aria-label="Toggle the table of contents" > <label id="vector-page-titlebar-toc-label" for="vector-page-titlebar-toc-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-listBullet mw-ui-icon-wikimedia-listBullet"></span> <span class="vector-dropdown-label-text">Toggle the table of contents</span> </label> <div class="vector-dropdown-content"> <div id="vector-page-titlebar-toc-unpinned-container" class="vector-unpinned-container"> </div> </div> </div> </nav> <h1 id="firstHeading" class="firstHeading mw-first-heading"><span class="mw-page-title-main">Eye tracking</span></h1> <div id="p-lang-btn" class="vector-dropdown mw-portlet mw-portlet-lang" > <input type="checkbox" id="p-lang-btn-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-p-lang-btn" class="vector-dropdown-checkbox mw-interlanguage-selector" aria-label="Go to an article in another language. Available in 21 languages" > <label id="p-lang-btn-label" for="p-lang-btn-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--action-progressive mw-portlet-lang-heading-21" aria-hidden="true" ><span class="vector-icon mw-ui-icon-language-progressive mw-ui-icon-wikimedia-language-progressive"></span> <span class="vector-dropdown-label-text">21 languages</span> </label> <div class="vector-dropdown-content"> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li class="interlanguage-link interwiki-ar mw-list-item"><a href="https://ar.wikipedia.org/wiki/%D8%AA%D8%AA%D8%A8%D8%B9_%D8%AD%D8%B1%D9%83%D8%A9_%D8%A7%D9%84%D8%B9%D9%8A%D9%86" title="تتبع حركة العين – Arabic" lang="ar" hreflang="ar" data-title="تتبع حركة العين" data-language-autonym="العربية" data-language-local-name="Arabic" class="interlanguage-link-target"><span>العربية</span></a></li><li class="interlanguage-link interwiki-ca mw-list-item"><a href="https://ca.wikipedia.org/wiki/Seguiment_d%27ulls" title="Seguiment d'ulls – Catalan" lang="ca" hreflang="ca" data-title="Seguiment d'ulls" data-language-autonym="Català" data-language-local-name="Catalan" class="interlanguage-link-target"><span>Català</span></a></li><li class="interlanguage-link interwiki-cs mw-list-item"><a href="https://cs.wikipedia.org/wiki/Sledov%C3%A1n%C3%AD_pohybu_o%C4%8D%C3%AD" title="Sledování pohybu očí – Czech" lang="cs" hreflang="cs" data-title="Sledování pohybu očí" data-language-autonym="Čeština" data-language-local-name="Czech" class="interlanguage-link-target"><span>Čeština</span></a></li><li class="interlanguage-link interwiki-de mw-list-item"><a href="https://de.wikipedia.org/wiki/Eye-Tracking" title="Eye-Tracking – German" lang="de" hreflang="de" data-title="Eye-Tracking" data-language-autonym="Deutsch" data-language-local-name="German" class="interlanguage-link-target"><span>Deutsch</span></a></li><li class="interlanguage-link interwiki-et mw-list-item"><a href="https://et.wikipedia.org/wiki/Pilguj%C3%A4lgimine" title="Pilgujälgimine – Estonian" lang="et" hreflang="et" data-title="Pilgujälgimine" data-language-autonym="Eesti" data-language-local-name="Estonian" class="interlanguage-link-target"><span>Eesti</span></a></li><li class="interlanguage-link interwiki-es mw-list-item"><a href="https://es.wikipedia.org/wiki/Seguimiento_de_ojos" title="Seguimiento de ojos – Spanish" lang="es" hreflang="es" data-title="Seguimiento de ojos" data-language-autonym="Español" data-language-local-name="Spanish" class="interlanguage-link-target"><span>Español</span></a></li><li class="interlanguage-link interwiki-fa mw-list-item"><a href="https://fa.wikipedia.org/wiki/%D8%B1%D8%AF%DB%8C%D8%A7%D8%A8%DB%8C_%DA%86%D8%B4%D9%85" title="ردیابی چشم – Persian" lang="fa" hreflang="fa" data-title="ردیابی چشم" data-language-autonym="فارسی" data-language-local-name="Persian" class="interlanguage-link-target"><span>فارسی</span></a></li><li class="interlanguage-link interwiki-fr mw-list-item"><a href="https://fr.wikipedia.org/wiki/Oculom%C3%A9trie" title="Oculométrie – French" lang="fr" hreflang="fr" data-title="Oculométrie" data-language-autonym="Français" data-language-local-name="French" class="interlanguage-link-target"><span>Français</span></a></li><li class="interlanguage-link interwiki-gl mw-list-item"><a href="https://gl.wikipedia.org/wiki/Seguimento_ocular" title="Seguimento ocular – Galician" lang="gl" hreflang="gl" data-title="Seguimento ocular" data-language-autonym="Galego" data-language-local-name="Galician" class="interlanguage-link-target"><span>Galego</span></a></li><li class="interlanguage-link interwiki-ko mw-list-item"><a href="https://ko.wikipedia.org/wiki/%EC%8B%9C%ED%91%9C_%EC%B6%94%EC%A0%81" title="시표 추적 – Korean" lang="ko" hreflang="ko" data-title="시표 추적" data-language-autonym="한국어" data-language-local-name="Korean" class="interlanguage-link-target"><span>한국어</span></a></li><li class="interlanguage-link interwiki-it mw-list-item"><a href="https://it.wikipedia.org/wiki/Oculometria" title="Oculometria – Italian" lang="it" hreflang="it" data-title="Oculometria" data-language-autonym="Italiano" data-language-local-name="Italian" class="interlanguage-link-target"><span>Italiano</span></a></li><li class="interlanguage-link interwiki-ja mw-list-item"><a href="https://ja.wikipedia.org/wiki/%E3%82%A2%E3%82%A4%E3%83%88%E3%83%A9%E3%83%83%E3%82%AD%E3%83%B3%E3%82%B0" title="アイトラッキング – Japanese" lang="ja" hreflang="ja" data-title="アイトラッキング" data-language-autonym="日本語" data-language-local-name="Japanese" class="interlanguage-link-target"><span>日本語</span></a></li><li class="interlanguage-link interwiki-no mw-list-item"><a href="https://no.wikipedia.org/wiki/%C3%98yesporing" title="Øyesporing – Norwegian Bokmål" lang="nb" hreflang="nb" data-title="Øyesporing" data-language-autonym="Norsk bokmål" data-language-local-name="Norwegian Bokmål" class="interlanguage-link-target"><span>Norsk bokmål</span></a></li><li class="interlanguage-link interwiki-pl mw-list-item"><a href="https://pl.wikipedia.org/wiki/Okulografia" title="Okulografia – Polish" lang="pl" hreflang="pl" data-title="Okulografia" data-language-autonym="Polski" data-language-local-name="Polish" class="interlanguage-link-target"><span>Polski</span></a></li><li class="interlanguage-link interwiki-ru mw-list-item"><a href="https://ru.wikipedia.org/wiki/%D0%9E%D0%BA%D1%83%D0%BB%D0%BE%D0%B3%D1%80%D0%B0%D1%84%D0%B8%D1%8F" title="Окулография – Russian" lang="ru" hreflang="ru" data-title="Окулография" data-language-autonym="Русский" data-language-local-name="Russian" class="interlanguage-link-target"><span>Русский</span></a></li><li class="interlanguage-link interwiki-fi mw-list-item"><a href="https://fi.wikipedia.org/wiki/Katseenseuranta" title="Katseenseuranta – Finnish" lang="fi" hreflang="fi" data-title="Katseenseuranta" data-language-autonym="Suomi" data-language-local-name="Finnish" class="interlanguage-link-target"><span>Suomi</span></a></li><li class="interlanguage-link interwiki-sv mw-list-item"><a href="https://sv.wikipedia.org/wiki/Blicksp%C3%A5rning" title="Blickspårning – Swedish" lang="sv" hreflang="sv" data-title="Blickspårning" data-language-autonym="Svenska" data-language-local-name="Swedish" class="interlanguage-link-target"><span>Svenska</span></a></li><li class="interlanguage-link interwiki-tr mw-list-item"><a href="https://tr.wikipedia.org/wiki/G%C3%B6z_takibi" title="Göz takibi – Turkish" lang="tr" hreflang="tr" data-title="Göz takibi" data-language-autonym="Türkçe" data-language-local-name="Turkish" class="interlanguage-link-target"><span>Türkçe</span></a></li><li class="interlanguage-link interwiki-uk mw-list-item"><a href="https://uk.wikipedia.org/wiki/%D0%90%D0%B9-%D1%82%D1%80%D0%B5%D0%BA%D1%96%D0%BD%D0%B3" title="Ай-трекінг – Ukrainian" lang="uk" hreflang="uk" data-title="Ай-трекінг" data-language-autonym="Українська" data-language-local-name="Ukrainian" class="interlanguage-link-target"><span>Українська</span></a></li><li class="interlanguage-link interwiki-zh-yue mw-list-item"><a href="https://zh-yue.wikipedia.org/wiki/%E7%9C%BC%E5%8B%95%E8%BF%BD%E8%B9%A4" title="眼動追蹤 – Cantonese" lang="yue" hreflang="yue" data-title="眼動追蹤" data-language-autonym="粵語" data-language-local-name="Cantonese" class="interlanguage-link-target"><span>粵語</span></a></li><li class="interlanguage-link interwiki-zh mw-list-item"><a href="https://zh.wikipedia.org/wiki/%E7%9C%BC%E5%8A%A8%E8%BF%BD%E8%B8%AA" title="眼动追踪 – Chinese" lang="zh" hreflang="zh" data-title="眼动追踪" data-language-autonym="中文" data-language-local-name="Chinese" class="interlanguage-link-target"><span>中文</span></a></li> </ul> <div class="after-portlet after-portlet-lang"><span class="wb-langlinks-edit wb-langlinks-link"><a href="https://www.wikidata.org/wiki/Special:EntityPage/Q970687#sitelinks-wikipedia" title="Edit interlanguage links" class="wbc-editpage">Edit links</a></span></div> </div> </div> </div> </header> <div class="vector-page-toolbar"> <div class="vector-page-toolbar-container"> <div id="left-navigation"> <nav aria-label="Namespaces"> <div id="p-associated-pages" class="vector-menu vector-menu-tabs mw-portlet mw-portlet-associated-pages" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-nstab-main" class="selected vector-tab-noicon mw-list-item"><a href="/wiki/Eye_tracking" title="View the content page [c]" accesskey="c"><span>Article</span></a></li><li id="ca-talk" class="vector-tab-noicon mw-list-item"><a href="/wiki/Talk:Eye_tracking" rel="discussion" title="Discuss improvements to the content page [t]" accesskey="t"><span>Talk</span></a></li> </ul> </div> </div> <div id="vector-variants-dropdown" class="vector-dropdown emptyPortlet" > <input type="checkbox" id="vector-variants-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-variants-dropdown" class="vector-dropdown-checkbox " aria-label="Change language variant" > <label id="vector-variants-dropdown-label" for="vector-variants-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet" aria-hidden="true" ><span class="vector-dropdown-label-text">English</span> </label> <div class="vector-dropdown-content"> <div id="p-variants" class="vector-menu mw-portlet mw-portlet-variants emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> </div> </div> </nav> </div> <div id="right-navigation" class="vector-collapsible"> <nav aria-label="Views"> <div id="p-views" class="vector-menu vector-menu-tabs mw-portlet mw-portlet-views" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-view" class="selected vector-tab-noicon mw-list-item"><a href="/wiki/Eye_tracking"><span>Read</span></a></li><li id="ca-edit" class="vector-tab-noicon mw-list-item"><a href="/w/index.php?title=Eye_tracking&action=edit" title="Edit this page [e]" accesskey="e"><span>Edit</span></a></li><li id="ca-history" class="vector-tab-noicon mw-list-item"><a href="/w/index.php?title=Eye_tracking&action=history" title="Past revisions of this page [h]" accesskey="h"><span>View history</span></a></li> </ul> </div> </div> </nav> <nav class="vector-page-tools-landmark" aria-label="Page tools"> <div id="vector-page-tools-dropdown" class="vector-dropdown vector-page-tools-dropdown" > <input type="checkbox" id="vector-page-tools-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-page-tools-dropdown" class="vector-dropdown-checkbox " aria-label="Tools" > <label id="vector-page-tools-dropdown-label" for="vector-page-tools-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet" aria-hidden="true" ><span class="vector-dropdown-label-text">Tools</span> </label> <div class="vector-dropdown-content"> <div id="vector-page-tools-unpinned-container" class="vector-unpinned-container"> <div id="vector-page-tools" class="vector-page-tools vector-pinnable-element"> <div class="vector-pinnable-header vector-page-tools-pinnable-header vector-pinnable-header-unpinned" data-feature-name="page-tools-pinned" data-pinnable-element-id="vector-page-tools" data-pinned-container-id="vector-page-tools-pinned-container" data-unpinned-container-id="vector-page-tools-unpinned-container" > <div class="vector-pinnable-header-label">Tools</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-page-tools.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-page-tools.unpin">hide</button> </div> <div id="p-cactions" class="vector-menu mw-portlet mw-portlet-cactions emptyPortlet vector-has-collapsible-items" title="More options" > <div class="vector-menu-heading"> Actions </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-more-view" class="selected vector-more-collapsible-item mw-list-item"><a href="/wiki/Eye_tracking"><span>Read</span></a></li><li id="ca-more-edit" class="vector-more-collapsible-item mw-list-item"><a href="/w/index.php?title=Eye_tracking&action=edit" title="Edit this page [e]" accesskey="e"><span>Edit</span></a></li><li id="ca-more-history" class="vector-more-collapsible-item mw-list-item"><a href="/w/index.php?title=Eye_tracking&action=history"><span>View history</span></a></li> </ul> </div> </div> <div id="p-tb" class="vector-menu mw-portlet mw-portlet-tb" > <div class="vector-menu-heading"> General </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="t-whatlinkshere" class="mw-list-item"><a href="/wiki/Special:WhatLinksHere/Eye_tracking" title="List of all English Wikipedia pages containing links to this page [j]" accesskey="j"><span>What links here</span></a></li><li id="t-recentchangeslinked" class="mw-list-item"><a href="/wiki/Special:RecentChangesLinked/Eye_tracking" rel="nofollow" title="Recent changes in pages linked from this page [k]" accesskey="k"><span>Related changes</span></a></li><li id="t-upload" class="mw-list-item"><a href="/wiki/Wikipedia:File_Upload_Wizard" title="Upload files [u]" accesskey="u"><span>Upload file</span></a></li><li id="t-specialpages" class="mw-list-item"><a href="/wiki/Special:SpecialPages" title="A list of all special pages [q]" accesskey="q"><span>Special pages</span></a></li><li id="t-permalink" class="mw-list-item"><a href="/w/index.php?title=Eye_tracking&oldid=1256379581" title="Permanent link to this revision of this page"><span>Permanent link</span></a></li><li id="t-info" class="mw-list-item"><a href="/w/index.php?title=Eye_tracking&action=info" title="More information about this page"><span>Page information</span></a></li><li id="t-cite" class="mw-list-item"><a href="/w/index.php?title=Special:CiteThisPage&page=Eye_tracking&id=1256379581&wpFormIdentifier=titleform" title="Information on how to cite this page"><span>Cite this page</span></a></li><li id="t-urlshortener" class="mw-list-item"><a href="/w/index.php?title=Special:UrlShortener&url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FEye_tracking"><span>Get shortened URL</span></a></li><li id="t-urlshortener-qrcode" class="mw-list-item"><a href="/w/index.php?title=Special:QrCode&url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FEye_tracking"><span>Download QR code</span></a></li> </ul> </div> </div> <div id="p-coll-print_export" class="vector-menu mw-portlet mw-portlet-coll-print_export" > <div class="vector-menu-heading"> Print/export </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="coll-download-as-rl" class="mw-list-item"><a href="/w/index.php?title=Special:DownloadAsPdf&page=Eye_tracking&action=show-download-screen" title="Download this page as a PDF file"><span>Download as PDF</span></a></li><li id="t-print" class="mw-list-item"><a href="/w/index.php?title=Eye_tracking&printable=yes" title="Printable version of this page [p]" accesskey="p"><span>Printable version</span></a></li> </ul> </div> </div> <div id="p-wikibase-otherprojects" class="vector-menu mw-portlet mw-portlet-wikibase-otherprojects" > <div class="vector-menu-heading"> In other projects </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li class="wb-otherproject-link wb-otherproject-commons mw-list-item"><a href="https://commons.wikimedia.org/wiki/Category:Eye_tracking" hreflang="en"><span>Wikimedia Commons</span></a></li><li id="t-wikibase" class="wb-otherproject-link wb-otherproject-wikibase-dataitem mw-list-item"><a href="https://www.wikidata.org/wiki/Special:EntityPage/Q970687" title="Structured data on this page hosted by Wikidata [g]" accesskey="g"><span>Wikidata item</span></a></li> </ul> </div> </div> </div> </div> </div> </div> </nav> </div> </div> </div> <div class="vector-column-end"> <div class="vector-sticky-pinned-container"> <nav class="vector-page-tools-landmark" aria-label="Page tools"> <div id="vector-page-tools-pinned-container" class="vector-pinned-container"> </div> </nav> <nav class="vector-appearance-landmark" aria-label="Appearance"> <div id="vector-appearance-pinned-container" class="vector-pinned-container"> <div id="vector-appearance" class="vector-appearance vector-pinnable-element"> <div class="vector-pinnable-header vector-appearance-pinnable-header vector-pinnable-header-pinned" data-feature-name="appearance-pinned" data-pinnable-element-id="vector-appearance" data-pinned-container-id="vector-appearance-pinned-container" data-unpinned-container-id="vector-appearance-unpinned-container" > <div class="vector-pinnable-header-label">Appearance</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-appearance.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-appearance.unpin">hide</button> </div> </div> </div> </nav> </div> </div> <div id="bodyContent" class="vector-body" aria-labelledby="firstHeading" data-mw-ve-target-container> <div class="vector-body-before-content"> <div class="mw-indicators"> </div> <div id="siteSub" class="noprint">From Wikipedia, the free encyclopedia</div> </div> <div id="contentSub"><div id="mw-content-subtitle"></div></div> <div id="mw-content-text" class="mw-body-content"><div class="mw-content-ltr mw-parser-output" lang="en" dir="ltr"><div class="shortdescription nomobile noexcerpt noprint searchaux" style="display:none">Measuring the point of gaze or motion of an eye relative to the head</div> <style data-mw-deduplicate="TemplateStyles:r1236090951">.mw-parser-output .hatnote{font-style:italic}.mw-parser-output div.hatnote{padding-left:1.6em;margin-bottom:0.5em}.mw-parser-output .hatnote i{font-style:normal}.mw-parser-output .hatnote+link+.hatnote{margin-top:-0.5em}@media print{body.ns-0 .mw-parser-output .hatnote{display:none!important}}</style><div role="note" class="hatnote navigation-not-searchable">This article is about the study of eye movement. For the tendency to visually track potential prey, see <a href="/wiki/Eye-stalking" class="mw-redirect" title="Eye-stalking">eye-stalking</a>.</div> <p class="mw-empty-elt"> </p> <figure class="mw-default-size" typeof="mw:File/Thumb"><a href="/wiki/File:Eyetracker1.jpg" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/0/0f/Eyetracker1.jpg/220px-Eyetracker1.jpg" decoding="async" width="220" height="214" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/0/0f/Eyetracker1.jpg 1.5x" data-file-width="300" data-file-height="292" /></a><figcaption>Eye tracking device</figcaption></figure> <figure class="mw-default-size mw-halign-right" typeof="mw:File/Thumb"><span><video id="mwe_player_0" poster="//upload.wikimedia.org/wikipedia/commons/thumb/b/bc/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv/220px--Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv.jpg" controls="" preload="none" data-mw-tmh="" class="mw-file-element" width="220" height="165" data-durationhint="50" data-mwtitle="Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv" data-mwprovider="wikimediacommons" resource="/wiki/File:Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv"><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/b/bc/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv.480p.vp9.webm" type="video/webm; codecs="vp9, opus"" data-transcodekey="480p.vp9.webm" data-width="640" data-height="480" /><source src="//upload.wikimedia.org/wikipedia/commons/b/bc/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv" type="video/ogg; codecs="theora, vorbis"" data-width="640" data-height="480" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/b/bc/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv.144p.mjpeg.mov" type="video/quicktime" data-transcodekey="144p.mjpeg.mov" data-width="192" data-height="144" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/b/bc/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv.240p.vp9.webm" type="video/webm; codecs="vp9, opus"" data-transcodekey="240p.vp9.webm" data-width="320" data-height="240" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/b/bc/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv.360p.webm" type="video/webm; codecs="vp8, vorbis"" data-transcodekey="360p.webm" data-width="480" data-height="360" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/b/bc/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv/Exploring-Eye-Movements-in-Patients-with-Glaucoma-When-Viewing-a-Driving-Scene-pone.0009710.s001.ogv.360p.vp9.webm" type="video/webm; codecs="vp9, opus"" data-transcodekey="360p.vp9.webm" data-width="480" data-height="360" /></video></span><figcaption>Scientists track eye movements in <a href="/wiki/Glaucoma" title="Glaucoma">glaucoma</a> patients to check vision impairment while driving.</figcaption></figure> <p><b>Eye tracking</b> is the process of measuring either the point of <a href="/wiki/Gaze_(physiology)" title="Gaze (physiology)">gaze</a> (where one is looking) or the motion of an eye relative to the head. An <b>eye tracker</b> is a device for measuring eye positions and <a href="/wiki/Eye_movement_(sensory)" class="mw-redirect" title="Eye movement (sensory)">eye movement</a>. Eye trackers are used in research on the <a href="/wiki/Visual_system" title="Visual system">visual system</a>, in psychology, in <a href="/wiki/Psycholinguistics" title="Psycholinguistics">psycholinguistics</a>, marketing, as an input device for <a href="/wiki/Human-computer_interaction" class="mw-redirect" title="Human-computer interaction">human-computer interaction</a>, and in product design. In addition, eye trackers are increasingly being used for <a href="/wiki/Assistive_technology" title="Assistive technology">assistive</a> and rehabilitative applications such as controlling wheelchairs, robotic arms, and prostheses. Recently, eye tracking has been examined as a tool for the <a href="/wiki/Karen_Pierce_(scientist)" title="Karen Pierce (scientist)">early detection of autism spectrum disorder</a>. There are several methods for measuring eye movement, with the most popular variant using video images to extract eye position. Other methods use <a href="/wiki/Search_coil" class="mw-redirect" title="Search coil">search coils</a> or are based on the <a href="/wiki/Electrooculography" title="Electrooculography">electrooculogram</a>. </p> <meta property="mw:PageProp/toc" /> <div class="mw-heading mw-heading2"><h2 id="History">History</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=1" title="Edit section: History"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <figure class="mw-default-size" typeof="mw:File/Thumb"><a href="/wiki/File:Yarbus_eye_tracker.jpg" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/a/a8/Yarbus_eye_tracker.jpg/220px-Yarbus_eye_tracker.jpg" decoding="async" width="220" height="211" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/a/a8/Yarbus_eye_tracker.jpg/330px-Yarbus_eye_tracker.jpg 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/a/a8/Yarbus_eye_tracker.jpg/440px-Yarbus_eye_tracker.jpg 2x" data-file-width="756" data-file-height="724" /></a><figcaption>Yarbus eye tracker from the 1960s</figcaption></figure> <p>In the 1800s, studies of eye movement were made using direct observations. For example, <a href="/wiki/Louis_%C3%89mile_Javal" title="Louis Émile Javal">Louis Émile Javal</a> observed in 1879 that reading does not involve a smooth sweeping of the eyes along the text, as previously assumed, but a series of short stops (called <a href="/wiki/Fixation_(visual)" title="Fixation (visual)">fixations</a>) and quick <a href="/wiki/Saccade" title="Saccade">saccades</a>.<sup id="cite_ref-1" class="reference"><a href="#cite_note-1"><span class="cite-bracket">[</span>1<span class="cite-bracket">]</span></a></sup> This observation raised important questions about reading, questions which were explored during the 1900s: On which words do the eyes stop? For how long? When do they regress to already seen words? </p> <figure class="mw-default-size mw-halign-left" typeof="mw:File/Thumb"><a href="/wiki/File:Reading_Fixations_Saccades.jpg" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/e/ef/Reading_Fixations_Saccades.jpg/330px-Reading_Fixations_Saccades.jpg" decoding="async" width="330" height="248" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/e/ef/Reading_Fixations_Saccades.jpg/495px-Reading_Fixations_Saccades.jpg 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/e/ef/Reading_Fixations_Saccades.jpg/660px-Reading_Fixations_Saccades.jpg 2x" data-file-width="5760" data-file-height="4320" /></a><figcaption>An example of <a href="/wiki/Fixation_(visual)" title="Fixation (visual)">fixations</a> and <a href="/wiki/Saccade" title="Saccade">saccades</a> over text. This is the typical pattern of eye movement during reading. The eyes never move smoothly over still text.</figcaption></figure> <p>Edmund Huey<sup id="cite_ref-2" class="reference"><a href="#cite_note-2"><span class="cite-bracket">[</span>2<span class="cite-bracket">]</span></a></sup> built an early eye tracker, using a sort of contact lens with a hole for the <a href="/wiki/Pupil" title="Pupil">pupil</a>. The lens was connected to an aluminum pointer that moved in response to the movement of the eye. Huey studied and quantified regressions (only a small proportion of saccades are regressions), and he showed that some words in a sentence are not fixated. </p><p>The first non-intrusive eye-trackers were built by Guy Thomas Buswell in Chicago, using beams of light that were reflected on the eye, then recording on film. Buswell made systematic studies into reading<sup id="cite_ref-3" class="reference"><a href="#cite_note-3"><span class="cite-bracket">[</span>3<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-4" class="reference"><a href="#cite_note-4"><span class="cite-bracket">[</span>4<span class="cite-bracket">]</span></a></sup> and picture viewing.<sup id="cite_ref-5" class="reference"><a href="#cite_note-5"><span class="cite-bracket">[</span>5<span class="cite-bracket">]</span></a></sup> </p><p>In the 1950s, <a href="/wiki/Alfred_L._Yarbus" title="Alfred L. Yarbus">Alfred L. Yarbus</a><sup id="cite_ref-6" class="reference"><a href="#cite_note-6"><span class="cite-bracket">[</span>6<span class="cite-bracket">]</span></a></sup> performed eye tracking research, and his 1967 book is often quoted. He showed that the task given to a subject has a very large influence on the subject's eye movement. He also wrote about the relation between fixations and interest: </p> <style data-mw-deduplicate="TemplateStyles:r1244412712">.mw-parser-output .templatequote{overflow:hidden;margin:1em 0;padding:0 32px}.mw-parser-output .templatequotecite{line-height:1.5em;text-align:left;margin-top:0}@media(min-width:500px){.mw-parser-output .templatequotecite{padding-left:1.6em}}</style><blockquote class="templatequote"><p>All the records ... show conclusively that the character of the eye movement is either completely independent of or only very slightly dependent on the material of the picture and how it was made, provided that it is flat or nearly flat."<sup id="cite_ref-Yarbus_1967_190_7-0" class="reference"><a href="#cite_note-Yarbus_1967_190-7"><span class="cite-bracket">[</span>7<span class="cite-bracket">]</span></a></sup></p></blockquote> <p>The cyclical pattern in the examination of pictures "is dependent on not only what is shown on the picture, but also the problem facing the observer and the information that he hopes to gain from the picture."<sup id="cite_ref-8" class="reference"><a href="#cite_note-8"><span class="cite-bracket">[</span>8<span class="cite-bracket">]</span></a></sup> </p> <figure class="mw-default-size mw-halign-right" typeof="mw:File/Thumb"><a href="/wiki/File:Yarbus_The_Visitor.jpg" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/d/d2/Yarbus_The_Visitor.jpg/330px-Yarbus_The_Visitor.jpg" decoding="async" width="330" height="282" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/d/d2/Yarbus_The_Visitor.jpg/495px-Yarbus_The_Visitor.jpg 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/d/d2/Yarbus_The_Visitor.jpg/660px-Yarbus_The_Visitor.jpg 2x" data-file-width="808" data-file-height="690" /></a><figcaption>This study by <a href="#CITEREFYarbus1967">Yarbus (1967)</a> is often referred to as evidence on how the task given to a person influences his or her eye movement.</figcaption></figure> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1244412712"><blockquote class="templatequote"><p>Records of eye movements show that the observer's attention is usually held only by certain elements of the picture.... Eye movement reflects the human thought processes; so the observer's thought may be followed to some extent from records of eye movement (the thought accompanying the examination of the particular object). It is easy to determine from these records which elements attract the observer's eye (and, consequently, his thought), in what order, and how often.<sup id="cite_ref-Yarbus_1967_190_7-1" class="reference"><a href="#cite_note-Yarbus_1967_190-7"><span class="cite-bracket">[</span>7<span class="cite-bracket">]</span></a></sup></p></blockquote> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1244412712"><blockquote class="templatequote"><p>The observer's attention is frequently drawn to elements which do not give important information but which, in his opinion, may do so. Often an observer will focus his attention on elements that are unusual in the particular circumstances, unfamiliar, incomprehensible, and so on.<sup id="cite_ref-9" class="reference"><a href="#cite_note-9"><span class="cite-bracket">[</span>9<span class="cite-bracket">]</span></a></sup></p></blockquote> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1244412712"><blockquote class="templatequote"><p>... when changing its points of fixation, the observer's eye repeatedly returns to the same elements of the picture. Additional time spent on perception is not used to examine the secondary elements, but to reexamine the most important elements.<sup id="cite_ref-10" class="reference"><a href="#cite_note-10"><span class="cite-bracket">[</span>10<span class="cite-bracket">]</span></a></sup></p></blockquote> <figure class="mw-default-size" typeof="mw:File/Thumb"><a href="/wiki/File:Eye_tracking_thru_glass.JPG" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/d/de/Eye_tracking_thru_glass.JPG/220px-Eye_tracking_thru_glass.JPG" decoding="async" width="220" height="165" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/d/de/Eye_tracking_thru_glass.JPG/330px-Eye_tracking_thru_glass.JPG 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/d/de/Eye_tracking_thru_glass.JPG/440px-Eye_tracking_thru_glass.JPG 2x" data-file-width="1600" data-file-height="1200" /></a><figcaption>This study by Hunziker (1970)<sup id="cite_ref-forsch1e.htm_11-0" class="reference"><a href="#cite_note-forsch1e.htm-11"><span class="cite-bracket">[</span>11<span class="cite-bracket">]</span></a></sup> on <i>eye tracking in problem solving</i> used simple 8 mm film to track eye movement by filming the subject through a glass plate on which the visual problem was displayed.<sup id="cite_ref-12" class="reference"><a href="#cite_note-12"><span class="cite-bracket">[</span>12<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-forsch1e.htm_11-1" class="reference"><a href="#cite_note-forsch1e.htm-11"><span class="cite-bracket">[</span>11<span class="cite-bracket">]</span></a></sup></figcaption></figure> <p>In the 1970s, eye-tracking research expanded rapidly, particularly reading research. A good overview of the research in this period is given by <a href="/wiki/Dr._Keith_Rayner" class="mw-redirect" title="Dr. Keith Rayner">Rayner</a>.<sup id="cite_ref-13" class="reference"><a href="#cite_note-13"><span class="cite-bracket">[</span>13<span class="cite-bracket">]</span></a></sup> </p><p>In 1980, Just and Carpenter<sup id="cite_ref-14" class="reference"><a href="#cite_note-14"><span class="cite-bracket">[</span>14<span class="cite-bracket">]</span></a></sup> formulated the influential <i>Strong eye-mind hypothesis</i>, that "there is no appreciable lag between what is fixated and what is processed". If this hypothesis is correct, then when a subject looks at a word or object, he or she also thinks about it (process cognitively), and for exactly as long as the recorded fixation. The hypothesis is often taken for granted by researchers using eye-tracking. However, <a href="/wiki/Gaze-contingency_paradigm" title="Gaze-contingency paradigm">gaze-contingent techniques</a> offer an interesting option in order to disentangle overt and covert attentions, to differentiate what is fixated and what is processed. </p><p>During the 1980s, the eye-mind hypothesis was often questioned in light of covert attention,<sup id="cite_ref-15" class="reference"><a href="#cite_note-15"><span class="cite-bracket">[</span>15<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-16" class="reference"><a href="#cite_note-16"><span class="cite-bracket">[</span>16<span class="cite-bracket">]</span></a></sup> the attention to something that one is not looking at, which people often do. If covert attention is common during eye-tracking recordings, the resulting scan-path and fixation patterns would often show not where attention has been, but only where the eye has been looking, failing to indicate cognitive processing. </p><p>The 1980s also saw the birth of using eye-tracking to answer questions related to human-computer interaction. Specifically, researchers investigated how users search for commands in computer menus.<sup id="cite_ref-10.1.1.100.445_17-0" class="reference"><a href="#cite_note-10.1.1.100.445-17"><span class="cite-bracket">[</span>17<span class="cite-bracket">]</span></a></sup> Additionally, computers allowed researchers to use eye-tracking results in real time, primarily to help disabled users.<sup id="cite_ref-10.1.1.100.445_17-1" class="reference"><a href="#cite_note-10.1.1.100.445-17"><span class="cite-bracket">[</span>17<span class="cite-bracket">]</span></a></sup> </p><p>More recently, there has been growth in using eye tracking to study how users interact with different computer interfaces. Specific questions researchers ask are related to how easy different interfaces are for users.<sup id="cite_ref-10.1.1.100.445_17-2" class="reference"><a href="#cite_note-10.1.1.100.445-17"><span class="cite-bracket">[</span>17<span class="cite-bracket">]</span></a></sup> The results of the eye tracking research can lead to changes in design of the interface. Another recent area of research focuses on Web development. This can include how users react to drop-down menus or where they focus their attention on a website so the developer knows where to place an advertisement.<sup id="cite_ref-18" class="reference"><a href="#cite_note-18"><span class="cite-bracket">[</span>18<span class="cite-bracket">]</span></a></sup> </p><p>According to Hoffman,<sup id="cite_ref-19" class="reference"><a href="#cite_note-19"><span class="cite-bracket">[</span>19<span class="cite-bracket">]</span></a></sup> current consensus is that visual attention is always slightly (100 to 250 ms) ahead of the eye. But as soon as attention moves to a new position, the eyes will want to follow.<sup id="cite_ref-20" class="reference"><a href="#cite_note-20"><span class="cite-bracket">[</span>20<span class="cite-bracket">]</span></a></sup> </p><p>Specific cognitive processes still cannot be inferred directly from a fixation on a particular object in a scene.<sup id="cite_ref-21" class="reference"><a href="#cite_note-21"><span class="cite-bracket">[</span>21<span class="cite-bracket">]</span></a></sup> For instance, a fixation on a face in a picture may indicate recognition, liking, dislike, puzzlement etc. Therefore, eye tracking is often coupled with other methodologies, such as <a href="/wiki/Protocol_analysis" title="Protocol analysis">introspective verbal protocols</a>. </p><p>Thanks to advancement in portable electronic devices, portable head-mounted eye trackers currently can achieve excellent performance and are being increasingly used in research and market applications targeting daily life settings.<sup id="cite_ref-22" class="reference"><a href="#cite_note-22"><span class="cite-bracket">[</span>22<span class="cite-bracket">]</span></a></sup> These same advances have led to increases in the study of small eye movements that occur during fixation, both in the lab and in applied settings.<sup id="cite_ref-bop.unibe.ch_23-0" class="reference"><a href="#cite_note-bop.unibe.ch-23"><span class="cite-bracket">[</span>23<span class="cite-bracket">]</span></a></sup> </p> <figure class="mw-default-size" typeof="mw:File/Thumb"><a href="/wiki/File:Typical_cnn.png" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/6/63/Typical_cnn.png/220px-Typical_cnn.png" decoding="async" width="220" height="68" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/6/63/Typical_cnn.png/330px-Typical_cnn.png 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/6/63/Typical_cnn.png/440px-Typical_cnn.png 2x" data-file-width="1040" data-file-height="320" /></a><figcaption>The use of convolutional neural networks in eye-tracking allow for new information to be identified by artificial intelligence.</figcaption></figure> <p>In the 21st century, the use of <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">artificial intelligence</a> (AI) and <a href="/wiki/Artificial_neural_network" class="mw-redirect" title="Artificial neural network">artificial neural networks</a> has become a viable way to complete eye-tracking tasks and analysis. In particular, the <a href="/wiki/Convolutional_neural_network" title="Convolutional neural network">convolutional neural network</a> lends itself to eye-tracking, as it is designed for image-centric tasks. With AI, eye-tracking tasks and studies can yield additional information that may not have been detected by human observers. The practice of <a href="/wiki/Deep_learning" title="Deep learning">deep learning</a> also allows for a given neural network to improve at a given task when given enough sample data. This requires a relatively large supply of training data, however.<sup id="cite_ref-:0_24-0" class="reference"><a href="#cite_note-:0-24"><span class="cite-bracket">[</span>24<span class="cite-bracket">]</span></a></sup> </p><p>The potential use cases for AI in eye-tracking cover a wide range of topics from medical applications<sup id="cite_ref-25" class="reference"><a href="#cite_note-25"><span class="cite-bracket">[</span>25<span class="cite-bracket">]</span></a></sup> to driver safety<sup id="cite_ref-:0_24-1" class="reference"><a href="#cite_note-:0-24"><span class="cite-bracket">[</span>24<span class="cite-bracket">]</span></a></sup> to game theory<sup id="cite_ref-:1_26-0" class="reference"><a href="#cite_note-:1-26"><span class="cite-bracket">[</span>26<span class="cite-bracket">]</span></a></sup> and even education and training applications.<sup id="cite_ref-27" class="reference"><a href="#cite_note-27"><span class="cite-bracket">[</span>27<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-28" class="reference"><a href="#cite_note-28"><span class="cite-bracket">[</span>28<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-29" class="reference"><a href="#cite_note-29"><span class="cite-bracket">[</span>29<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Tracker_types">Tracker types</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=2" title="Edit section: Tracker types"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Eye-trackers measure rotations of the eye in one of several ways, but principally they fall into one of three categories: </p> <ol><li>measurement of the movement of an object (normally, a special contact lens) attached to the eye</li> <li>optical tracking without direct contact to the eye</li> <li>measurement of electric potentials using electrodes placed around the eyes.</li></ol> <div class="mw-heading mw-heading3"><h3 id="Eye-attached_tracking">Eye-attached tracking</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=3" title="Edit section: Eye-attached tracking"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>The first type uses an attachment to the eye, such as a special contact lens with an embedded mirror or magnetic field sensor, and the movement of the attachment is measured with the assumption that it does not slip significantly as the eye rotates. Measurements with tight-fitting contact lenses have provided extremely sensitive recordings of eye movement, and magnetic search coils are the method of choice for researchers studying the dynamics and underlying physiology of eye movement. This method allows the measurement of eye movement in horizontal, vertical and torsion directions.<sup id="cite_ref-30" class="reference"><a href="#cite_note-30"><span class="cite-bracket">[</span>30<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Optical_tracking">Optical tracking</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=4" title="Edit section: Optical tracking"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236090951"><div role="note" class="hatnote navigation-not-searchable">See also: <a href="/wiki/Video-oculography" title="Video-oculography">Video-oculography</a></div> <figure class="mw-default-size" typeof="mw:File/Thumb"><a href="/wiki/File:EYE-SYNC_eye-tracking_analyzer.JPG" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/e/e9/EYE-SYNC_eye-tracking_analyzer.JPG/220px-EYE-SYNC_eye-tracking_analyzer.JPG" decoding="async" width="220" height="165" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/e/e9/EYE-SYNC_eye-tracking_analyzer.JPG/330px-EYE-SYNC_eye-tracking_analyzer.JPG 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/e/e9/EYE-SYNC_eye-tracking_analyzer.JPG/440px-EYE-SYNC_eye-tracking_analyzer.JPG 2x" data-file-width="3264" data-file-height="2448" /></a><figcaption>An eye-tracking <a href="/wiki/Head-mounted_display" title="Head-mounted display">head-mounted display</a>. Each eye has an LED light source (gold-color metal) on the side of the display lens, and a camera under the display lens.</figcaption></figure> <p>The second broad category uses some non-contact, optical method for measuring eye motion. Light, typically infrared, is reflected from the eye and sensed by a video camera or some other specially designed optical sensor. The information is then analyzed to extract eye rotation from changes in reflections. Video-based eye trackers typically use the corneal reflection (the first <a href="/wiki/Purkinje_images" title="Purkinje images">Purkinje image</a>) and the center of the pupil as features to track over time. A more sensitive type of eye-tracker, the dual-Purkinje eye tracker,<sup id="cite_ref-31" class="reference"><a href="#cite_note-31"><span class="cite-bracket">[</span>31<span class="cite-bracket">]</span></a></sup> uses reflections from the front of the cornea (first Purkinje image) and the back of the lens (fourth Purkinje image) as features to track. A still more sensitive method of tracking is to image features from inside the eye, such as the retinal blood vessels, and follow these features as the eye rotates. Optical methods, particularly those based on video recording, are widely used for gaze-tracking and are favored for being non-invasive and inexpensive. </p> <div class="mw-heading mw-heading3"><h3 id="Electric_potential_measurement">Electric potential measurement</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=5" title="Edit section: Electric potential measurement"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>The third category uses electric potentials measured with electrodes placed around the eyes. The eyes are the origin of a steady electric potential field which can also be detected in total darkness and if the eyes are closed. It can be modelled to be generated by a dipole with its positive pole at the cornea and its negative pole at the retina. The electric signal that can be derived using two pairs of contact electrodes placed on the skin around one eye is called <a href="/wiki/EOG" class="mw-redirect" title="EOG">Electrooculogram (EOG)</a>. If the eyes move from the centre position towards the periphery, the retina approaches one electrode while the cornea approaches the opposing one. This change in the orientation of the dipole and consequently the electric potential field results in a change in the measured EOG signal. Inversely, by analysing these changes in eye movement can be tracked. Due to the discretisation given by the common electrode setup, two separate movement components – a horizontal and a vertical – can be identified. A third EOG component is the radial EOG channel,<sup id="cite_ref-32" class="reference"><a href="#cite_note-32"><span class="cite-bracket">[</span>32<span class="cite-bracket">]</span></a></sup> which is the average of the EOG channels referenced to some posterior scalp electrode. This radial EOG channel is sensitive to the saccadic spike potentials stemming from the extra-ocular muscles at the onset of saccades, and allows reliable detection of even miniature saccades.<sup id="cite_ref-33" class="reference"><a href="#cite_note-33"><span class="cite-bracket">[</span>33<span class="cite-bracket">]</span></a></sup> </p><p>Due to potential drifts and variable relations between the EOG signal amplitudes and the saccade sizes, it is challenging to use EOG for measuring slow eye movement and detecting gaze direction. EOG is, however, a very robust technique for measuring <a href="/wiki/Saccade" title="Saccade">saccadic eye movement</a> associated with gaze shifts and detecting <a href="/wiki/Blink" class="mw-redirect" title="Blink">blinks</a>. Contrary to video-based eye-trackers, EOG allows recording of eye movements even with eyes closed, and can thus be used in sleep research. It is a very light-weight approach that, in contrast to current video-based eye-trackers, requires low computational power, works under different lighting conditions and can be implemented as an embedded, self-contained <a href="/wiki/Wearable_technology" title="Wearable technology">wearable</a> system.<sup id="cite_ref-34" class="reference"><a href="#cite_note-34"><span class="cite-bracket">[</span>34<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-35" class="reference"><a href="#cite_note-35"><span class="cite-bracket">[</span>35<span class="cite-bracket">]</span></a></sup> It is thus the method of choice for measuring eye movement in mobile daily-life situations and <a href="/wiki/Rapid_eye_movement_sleep" title="Rapid eye movement sleep">REM</a> phases during sleep. The major disadvantage of EOG is its relatively poor gaze-direction accuracy compared to a video tracker. That is, it is difficult to determine with good accuracy exactly where a subject is looking, though the time of eye movements can be determined. </p> <div class="mw-heading mw-heading2"><h2 id="Technologies_and_techniques">Technologies and techniques</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=6" title="Edit section: Technologies and techniques"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>The most widely used current designs are video-based eye-trackers. A camera focuses on one or both eyes and records eye movement as the viewer looks at some kind of stimulus. Most modern eye-trackers use the center of the pupil and <a href="/wiki/Infrared" title="Infrared">infrared</a> / <a href="/wiki/Near-infrared" class="mw-redirect" title="Near-infrared">near-infrared</a> non-collimated light to create <a href="/wiki/Corneal_reflection" class="mw-redirect" title="Corneal reflection">corneal reflections</a> (CR). The vector between the pupil center and the corneal reflections can be used to compute the point of regard on surface or the gaze direction. A simple calibration procedure of the individual is usually needed before using the eye tracker.<sup id="cite_ref-36" class="reference"><a href="#cite_note-36"><span class="cite-bracket">[</span>36<span class="cite-bracket">]</span></a></sup> </p><p>Two general types of infrared / near-infrared (also known as active light) eye-tracking techniques are used: bright-pupil and dark-pupil. Their difference is based on the location of the illumination source with respect to the optics. If the illumination is <a href="/wiki/Coaxial" title="Coaxial">coaxial</a> with the optical path, then the eye acts as a <a href="/wiki/Retroreflector" title="Retroreflector">retroreflector</a> as the light reflects off the <a href="/wiki/Retina" title="Retina">retina</a> creating a bright pupil effect similar to <a href="/wiki/Red-eye_effect" title="Red-eye effect">red eye</a>. If the illumination source is offset from the optical path, then the pupil appears dark because the retroreflection from the retina is directed away from the camera.<sup id="cite_ref-gneo_37-0" class="reference"><a href="#cite_note-gneo-37"><span class="cite-bracket">[</span>37<span class="cite-bracket">]</span></a></sup> </p><p>Bright-pupil tracking creates greater iris/pupil contrast, allowing more robust eye-tracking with all iris pigmentation, and greatly reduces interference caused by eyelashes and other obscuring features.<sup id="cite_ref-38" class="reference"><a href="#cite_note-38"><span class="cite-bracket">[</span>38<span class="cite-bracket">]</span></a></sup> It also allows tracking in lighting conditions ranging from total darkness to very bright. </p><p>Another, less used, method is known as passive light. It uses visible light to illuminate, something which may cause some distractions to users.<sup id="cite_ref-gneo_37-1" class="reference"><a href="#cite_note-gneo-37"><span class="cite-bracket">[</span>37<span class="cite-bracket">]</span></a></sup> Another challenge with this method is that the contrast of the pupil is less than in the active light methods, therefore, the center of <a href="/wiki/Iris_(anatomy)" title="Iris (anatomy)">iris</a> is used for calculating the vector instead.<sup id="cite_ref-39" class="reference"><a href="#cite_note-39"><span class="cite-bracket">[</span>39<span class="cite-bracket">]</span></a></sup> This calculation needs to detect the boundary of the iris and the white <a href="/wiki/Sclera" title="Sclera">sclera</a> (<a href="/wiki/Corneal_limbus" title="Corneal limbus">limbus</a> tracking). It presents another challenge for vertical eye movements due to obstruction of eyelids.<sup id="cite_ref-40" class="reference"><a href="#cite_note-40"><span class="cite-bracket">[</span>40<span class="cite-bracket">]</span></a></sup> </p> <ul class="gallery mw-gallery-packed center"> <li class="gallerybox" style="width: 217.33333333333px"> <div class="thumb" style="width: 215.33333333333px;"><span typeof="mw:File"><a href="/wiki/File:Bright_pupil_by_infrared_or_near_infrared_illumination.jpg" class="mw-file-description" title="Infrared / near-infrared: bright pupil."><img alt="Infrared / near-infrared: bright pupil." src="//upload.wikimedia.org/wikipedia/commons/1/1c/Bright_pupil_by_infrared_or_near_infrared_illumination.jpg" decoding="async" width="216" height="120" class="mw-file-element" data-file-width="281" data-file-height="157" /></a></span></div> <div class="gallerytext">Infrared / near-infrared: bright pupil.</div> </li> <li class="gallerybox" style="width: 220.66666666667px"> <div class="thumb" style="width: 218.66666666667px;"><span typeof="mw:File"><a href="/wiki/File:Dark_pupil_by_infrared_or_near_infrared_illumination.jpg" class="mw-file-description" title="Infrared / near-infrared: dark pupil and corneal reflection."><img alt="Infrared / near-infrared: dark pupil and corneal reflection." src="//upload.wikimedia.org/wikipedia/commons/c/cb/Dark_pupil_by_infrared_or_near_infrared_illumination.jpg" decoding="async" width="219" height="120" class="mw-file-element" data-file-width="282" data-file-height="155" /></a></span></div> <div class="gallerytext">Infrared / near-infrared: dark pupil and corneal reflection.</div> </li> <li class="gallerybox" style="width: 184.66666666667px"> <div class="thumb" style="width: 182.66666666667px;"><span typeof="mw:File"><a href="/wiki/File:Visible_light_eye-tracking_algorithm.jpg" class="mw-file-description" title="Visible light: center of iris (red), corneal reflection (green), and output vector (blue)."><img alt="Visible light: center of iris (red), corneal reflection (green), and output vector (blue)." src="//upload.wikimedia.org/wikipedia/commons/thumb/a/ac/Visible_light_eye-tracking_algorithm.jpg/274px-Visible_light_eye-tracking_algorithm.jpg" decoding="async" width="183" height="120" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/a/ac/Visible_light_eye-tracking_algorithm.jpg/410px-Visible_light_eye-tracking_algorithm.jpg 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/a/ac/Visible_light_eye-tracking_algorithm.jpg/547px-Visible_light_eye-tracking_algorithm.jpg 2x" data-file-width="606" data-file-height="399" /></a></span></div> <div class="gallerytext">Visible light: center of iris (red), corneal reflection (green), and output vector (blue).</div> </li> </ul> <p>Eye-tracking setups vary greatly. Some are head-mounted, some require the head to be stable (for example, with a chin rest), and some function remotely and automatically track the head during motion. Most use a sampling rate of at least 30 Hz. Although 50/60 Hz is more common, today many video-based eye trackers run at 240, 350 or even 1000/1250 Hz, speeds needed to capture fixational eye movements or correctly measure saccade dynamics. </p><p>Eye movements are typically divided into <a href="/wiki/Fixation_(visual)" title="Fixation (visual)">fixations</a> and saccades – when the eye gaze pauses in a certain position, and when it moves to another position, respectively. The resulting series of fixations and saccades is called a <a href="/w/index.php?title=Scanpath&action=edit&redlink=1" class="new" title="Scanpath (page does not exist)">scanpath</a>. Smooth pursuit describes the eye following a moving object. Fixational eye movements include <a href="/wiki/Microsaccade" title="Microsaccade">microsaccades</a>: small, involuntary saccades that occur during attempted fixation. Most information from the eye is made available during a fixation or smooth pursuit, but not during a saccade.<sup id="cite_ref-41" class="reference"><a href="#cite_note-41"><span class="cite-bracket">[</span>41<span class="cite-bracket">]</span></a></sup> </p><p>Scanpaths are useful for analyzing cognitive intent, interest, and salience. Other biological factors (some as simple as gender) may affect the scanpath as well. Eye tracking in <a href="/wiki/Human%E2%80%93computer_interaction" title="Human–computer interaction">human–computer interaction</a> (HCI) typically investigates the scanpath for usability purposes, or as a method of input in <a href="/wiki/Gaze-contingency_paradigm" title="Gaze-contingency paradigm">gaze-contingent displays</a>, also known as <a href="/w/index.php?title=Gaze-based_interfaces&action=edit&redlink=1" class="new" title="Gaze-based interfaces (page does not exist)">gaze-based interfaces</a>.<sup id="cite_ref-42" class="reference"><a href="#cite_note-42"><span class="cite-bracket">[</span>42<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Data_presentation">Data presentation</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=7" title="Edit section: Data presentation"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Interpretation of the data that is recorded by the various types of eye-trackers employs a variety of software that animates or visually represents it, so that the visual behavior of one or more users can be graphically resumed. The video is generally manually coded to identify the AOIs (areas of interest) or recently using artificial intelligence. Graphical presentation is rarely the basis of research results, since they are limited in terms of what can be analysed - research relying on eye-tracking, for example, usually requires quantitative measures of the eye movement events and their parameters, The following visualisations are the most commonly used: </p><p><b>Animated representations of a point on the interface</b> This method is used when the visual behavior is examined individually indicating where the user focused their gaze in each moment, complemented with a small path that indicates the previous saccade movements, as seen in the image. </p><p><b>Static representations of the saccade path</b> This is fairly similar to the one described above, with the difference that this is static method. A higher level of expertise than with the animated ones is required to interpret this. </p><p><b>Heat maps</b> An alternative static representation, used mainly for the agglomerated analysis of the visual exploration patterns in a group of users. In these representations, the 'hot' zones or zones with higher density designate where the users focused their gaze (not their attention) with a higher frequency. Heat maps are the best known visualization technique for eyetracking studies.<sup id="cite_ref-43" class="reference"><a href="#cite_note-43"><span class="cite-bracket">[</span>43<span class="cite-bracket">]</span></a></sup> </p><p><b>Blind zones maps, or focus maps</b> This method is a simplified version of the heat maps where the visually less attended zones by the users are displayed clearly, thus allowing for an easier understanding of the most relevant information, that is to say, it provides more information about which zones were not seen by the users. </p><p><b>Saliency maps</b> Similar to heat maps, a saliency map illustrates areas of focus by brightly displaying the attention-grabbing objects over an initially black canvas. The more focus is given to a particular object, the brighter it will appear.<sup id="cite_ref-44" class="reference"><a href="#cite_note-44"><span class="cite-bracket">[</span>44<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Eye-tracking_vs._gaze-tracking">Eye-tracking vs. gaze-tracking</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=8" title="Edit section: Eye-tracking vs. gaze-tracking"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Eye-trackers necessarily measure the rotation of the eye with respect to some frame of reference. This is usually tied to the measuring system. Thus, if the measuring system is head-mounted, as with EOG or a video-based system mounted to a helmet, then eye-in-head angles are measured. To deduce the line of sight in world coordinates, the head must be kept in a constant position or its movements must be tracked as well. In these cases, head direction is added to eye-in-head direction to determine gaze direction. However, if the motion of the head is minor, the eye remains in constant position.<sup id="cite_ref-45" class="reference"><a href="#cite_note-45"><span class="cite-bracket">[</span>45<span class="cite-bracket">]</span></a></sup> </p><p>If the measuring system is table-mounted, as with scleral search coils or table-mounted camera (remote) systems, then gaze angles are measured directly in world coordinates. Typically, in these situations head movements are prohibited. For example, the head position is fixed using a bite bar or a forehead support. Then a head-centered reference frame is identical to a world-centered reference frame. Or colloquially, the eye-in-head position directly determines the gaze direction. </p><p>Some results are available on human eye movements under natural conditions where head movements are allowed as well.<sup id="cite_ref-46" class="reference"><a href="#cite_note-46"><span class="cite-bracket">[</span>46<span class="cite-bracket">]</span></a></sup> The relative position of eye and head, even with constant gaze direction, influences neuronal activity in higher visual areas.<sup id="cite_ref-47" class="reference"><a href="#cite_note-47"><span class="cite-bracket">[</span>47<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Practice">Practice</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=9" title="Edit section: Practice"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>A great deal of research has gone into studies of the mechanisms and dynamics of eye rotation, but the goal of eye tracking is most often to estimate gaze direction. Users may be interested in what features of an image draw the eye, for example. The eye tracker does not provide absolute gaze direction, but rather can measure only changes in gaze direction. To determine precisely what a subject is looking at, some calibration procedure is required in which the subject looks at a point or series of points, while the eye tracker records the value that corresponds to each gaze position. (Even those techniques that track features of the retina cannot provide exact gaze direction because there is no specific anatomical feature that marks the exact point where the visual axis meets the retina, if indeed there is such a single, stable point.) An accurate and reliable calibration is essential for obtaining valid and repeatable eye movement data, and this can be a significant challenge for non-verbal subjects or those who have unstable gaze. </p><p>Each method of eye-tracking has advantages and disadvantages, and the choice of an eye-tracking system depends on considerations of cost and application. There are offline methods and online procedures like <a href="/wiki/AttentionTracking" title="AttentionTracking">AttentionTracking</a>. There is a trade-off between cost and sensitivity, with the most sensitive systems costing many tens of thousands of dollars and requiring considerable expertise to operate properly. Advances in computer and video technology have led to the development of relatively low-cost systems that are useful for many applications and fairly easy to use.<sup id="cite_ref-48" class="reference"><a href="#cite_note-48"><span class="cite-bracket">[</span>48<span class="cite-bracket">]</span></a></sup> Interpretation of the results still requires some level of expertise, however, because a misaligned or poorly calibrated system can produce wildly erroneous data. </p> <div class="mw-heading mw-heading3"><h3 id="Eye-tracking_while_driving_a_car_in_a_difficult_situation">Eye-tracking while driving a car in a difficult situation</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=10" title="Edit section: Eye-tracking while driving a car in a difficult situation"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <figure class="mw-default-size mw-halign-right" typeof="mw:File/Thumb"><a href="/wiki/File:Eye_movements_of_drivers.jpg" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/en/thumb/2/20/Eye_movements_of_drivers.jpg/330px-Eye_movements_of_drivers.jpg" decoding="async" width="330" height="329" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/en/thumb/2/20/Eye_movements_of_drivers.jpg/495px-Eye_movements_of_drivers.jpg 1.5x, //upload.wikimedia.org/wikipedia/en/2/20/Eye_movements_of_drivers.jpg 2x" data-file-width="534" data-file-height="532" /></a><figcaption>Frames from narrow road eye tracking described in this section<sup id="cite_ref-49" class="reference"><a href="#cite_note-49"><span class="cite-bracket">[</span>49<span class="cite-bracket">]</span></a></sup></figcaption></figure> <p>The eye movement of two groups of drivers have been filmed with a special head camera by a team of the Swiss Federal Institute of Technology: Novice and experienced drivers had their eye-movement recorded while approaching a bend of a narrow road. The series of images has been condensed from the original film frames<sup id="cite_ref-50" class="reference"><a href="#cite_note-50"><span class="cite-bracket">[</span>50<span class="cite-bracket">]</span></a></sup> to show 2 eye fixations per image for better comprehension. </p><p>Each of these stills corresponds to approximately 0.5 seconds in real time. </p><p>The series of images shows an example of eye fixations #9 to #14 of a typical novice and of an experienced driver. </p><p>Comparison of the top images shows that the experienced driver checks the curve and even has Fixation No. 9 left to look aside while the novice driver needs to check the road and estimate his distance to the parked car. </p><p>In the middle images, the experienced driver is now fully concentrating on the location where an oncoming car could be seen. The novice driver concentrates his view on the parked car. </p><p>In the bottom image the novice is busy estimating the distance between the left wall and the parked car, while the experienced driver can use their <a href="/wiki/Peripheral_vision" title="Peripheral vision">peripheral vision</a> for that and still concentrate vision on the dangerous point of the curve: If a car appears there, the driver has to give way, i.e. stop to the right instead of passing the parked car.<sup id="cite_ref-51" class="reference"><a href="#cite_note-51"><span class="cite-bracket">[</span>51<span class="cite-bracket">]</span></a></sup> </p><p>More recent studies have also used head-mounted eye tracking to measure eye movements during real-world driving conditions.<sup id="cite_ref-52" class="reference"><a href="#cite_note-52"><span class="cite-bracket">[</span>52<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-bop.unibe.ch_23-1" class="reference"><a href="#cite_note-bop.unibe.ch-23"><span class="cite-bracket">[</span>23<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Eye-tracking_of_younger_and_elderly_people_while_walking">Eye-tracking of younger and elderly people while walking</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=11" title="Edit section: Eye-tracking of younger and elderly people while walking"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>While walking, elderly subjects depend more on <a href="/wiki/Fovea_centralis" title="Fovea centralis">foveal</a> vision than do younger subjects. Their walking speed is decreased by a limited <a href="/wiki/Visual_field" title="Visual field">visual field</a>, probably caused by a deteriorated peripheral vision. </p><p>Younger subjects make use of both their central and peripheral vision while walking. Their peripheral vision allows faster control over the process of walking.<sup id="cite_ref-53" class="reference"><a href="#cite_note-53"><span class="cite-bracket">[</span>53<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Applications">Applications</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=12" title="Edit section: Applications"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>A wide variety of disciplines use eye-tracking techniques, including <a href="/wiki/Cognitive_science" title="Cognitive science">cognitive science</a>; <a href="/wiki/Psychology" title="Psychology">psychology</a> (notably <a href="/wiki/Psycholinguistics" title="Psycholinguistics">psycholinguistics</a>; the visual world paradigm); <a href="/wiki/Human-computer_interaction" class="mw-redirect" title="Human-computer interaction">human-computer interaction</a> (HCI); <a href="/wiki/Human_factors_and_ergonomics" class="mw-redirect" title="Human factors and ergonomics">human factors and ergonomics</a>; <a href="/wiki/Marketing_research" title="Marketing research">marketing research</a> and medical research (neurological diagnosis).<sup id="cite_ref-54" class="reference"><a href="#cite_note-54"><span class="cite-bracket">[</span>54<span class="cite-bracket">]</span></a></sup> Specific applications include the tracking eye movement in <a href="/wiki/Eye_movement_in_language_reading" class="mw-redirect" title="Eye movement in language reading">language reading</a>, <a href="/wiki/Eye_movement_in_music_reading" title="Eye movement in music reading">music reading</a>, human <a href="/wiki/Activity_recognition" title="Activity recognition">activity recognition</a>, the perception of advertising, playing of sports, distraction detection and <a href="/wiki/Cognitive_load" title="Cognitive load">cognitive load</a> estimation of drivers and pilots and as a means of operating computers by people with severe motor impairment.<sup id="cite_ref-bop.unibe.ch_23-2" class="reference"><a href="#cite_note-bop.unibe.ch-23"><span class="cite-bracket">[</span>23<span class="cite-bracket">]</span></a></sup> In the field of virtual reality, eye tracking is used in head mounted displays for a variety of purposes including to reduce processing load by only rendering the graphical area within the user's gaze.<sup id="cite_ref-55" class="reference"><a href="#cite_note-55"><span class="cite-bracket">[</span>55<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Commercial_applications">Commercial applications</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=13" title="Edit section: Commercial applications"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>In recent years, the increased sophistication and accessibility of eye-tracking technologies have generated a great deal of interest in the commercial sector. Applications include <a href="/wiki/Web_usability" title="Web usability">web usability</a>, advertising, sponsorship, package design and automotive engineering. In general, commercial eye-tracking studies function by presenting a target stimulus to a sample of consumers while an eye tracker records eye activity. Examples of target stimuli may include websites, television programs, sporting events, films and commercials, magazines and newspapers, packages, shelf displays, consumer systems (ATMs, checkout systems, kiosks) and software. The resulting data can be statistically analyzed and graphically rendered to provide evidence of specific visual patterns. By examining fixations, <a href="/wiki/Saccades" class="mw-redirect" title="Saccades">saccades</a>, pupil dilation, blinks and a variety of other behaviors, researchers can determine a great deal about the effectiveness of a given medium or product. While some companies complete this type of research internally, there are many private companies that offer eye-tracking services and analysis. </p><p>One field of commercial eye-tracking research is web usability. While traditional usability techniques are often quite powerful in providing information on clicking and scrolling patterns, eye-tracking offers the ability to analyze user interaction between the clicks and how much time a user spends between clicks, thereby providing valuable insight into which features are the most eye-catching, which features cause confusion and which are ignored altogether. Specifically, eye-tracking can be used to assess search efficiency, branding, online advertisements, navigation usability, overall design and many other site components. Analyses may target a prototype or competitor site in addition to the main client site. </p><p>Eye-tracking is commonly used in a variety of different advertising media. Commercials, print ads, online ads and sponsored programs are all conducive to analysis with current eye-tracking technology. One example is the analysis of eye movements over advertisements in the <a href="/wiki/Yellow_pages" title="Yellow pages">Yellow Pages</a>. One study focused on what particular features caused people to notice an ad, whether they viewed ads in a particular order and how viewing times varied. The study revealed that ad size, graphics, color, and copy all influence attention to advertisements. Knowing this allows researchers to assess in great detail how often a sample of consumers fixates on the target logo, product or ad. Hence an advertiser can quantify the success of a given campaign in terms of actual visual attention.<sup id="cite_ref-56" class="reference"><a href="#cite_note-56"><span class="cite-bracket">[</span>56<span class="cite-bracket">]</span></a></sup> Another example of this is a study that found that in a <a href="/wiki/Search_engine_results_page" title="Search engine results page">search engine results page</a>, authorship snippets received more attention than the paid ads or even the first organic result.<sup id="cite_ref-57" class="reference"><a href="#cite_note-57"><span class="cite-bracket">[</span>57<span class="cite-bracket">]</span></a></sup> </p><p>Yet another example of commercial eye-tracking research comes from the field of recruitment. A study analyzed how recruiters screen <a href="/wiki/LinkedIn" title="LinkedIn">LinkedIn</a> profiles and presented results as <a href="/wiki/Heat_map" title="Heat map">heat maps</a>.<sup id="cite_ref-58" class="reference"><a href="#cite_note-58"><span class="cite-bracket">[</span>58<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Safety_applications">Safety applications</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=14" title="Edit section: Safety applications"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Scientists in 2017 constructed a Deep Integrated Neural Network (DINN) out of a Deep Neural Network and a convolutional neural network.<sup id="cite_ref-:0_24-2" class="reference"><a href="#cite_note-:0-24"><span class="cite-bracket">[</span>24<span class="cite-bracket">]</span></a></sup> The goal was to use <a href="/wiki/Deep_learning" title="Deep learning">deep learning</a> to examine images of drivers and determine their level of drowsiness by "classify[ing] eye states." With enough images, the proposed DINN could ideally determine when drivers blink, how often they blink, and for how long. From there, it could judge how tired a given driver appears to be, effectively conducting an eye-tracking exercise. The DINN was trained on data from over 2,400 subjects and correctly diagnosed their states 96%-99.5% of the time. Most other artificial intelligence models performed at rates above 90%.<sup id="cite_ref-:0_24-3" class="reference"><a href="#cite_note-:0-24"><span class="cite-bracket">[</span>24<span class="cite-bracket">]</span></a></sup> This technology could ideally provide another avenue for <a href="/wiki/Driver_drowsiness_detection" title="Driver drowsiness detection">driver drowsiness detection</a>. </p> <div class="mw-heading mw-heading3"><h3 id="Game_theory_applications">Game theory applications</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=15" title="Edit section: Game theory applications"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>In a 2019 study, a Convolutional Neural Network (CNN) was constructed with the ability to identify individual chess pieces the same way other CNNs can identify facial features.<sup id="cite_ref-:1_26-1" class="reference"><a href="#cite_note-:1-26"><span class="cite-bracket">[</span>26<span class="cite-bracket">]</span></a></sup> It was then fed eye-tracking input data from 30 chess players of various skill levels. With this data, the CNN used gaze estimation to determine parts of the chess board to which a player was paying close attention. It then generated a saliency map to illustrate those parts of the board. Ultimately, the CNN would combine its knowledge of the board and pieces with its saliency map to predict the players' next move. Regardless of the <a href="/wiki/Training_data_set" class="mw-redirect" title="Training data set">training dataset</a> the neural network system was trained upon, it predicted the next move more accurately than if it had selected any possible move at random, and the saliency maps drawn for any given player and situation were more than 54% similar.<sup id="cite_ref-:1_26-2" class="reference"><a href="#cite_note-:1-26"><span class="cite-bracket">[</span>26<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Assistive_technology">Assistive technology</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=16" title="Edit section: Assistive technology"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>People with severe motor impairment can use eye tracking for interacting with computers<sup id="cite_ref-59" class="reference"><a href="#cite_note-59"><span class="cite-bracket">[</span>59<span class="cite-bracket">]</span></a></sup> as it is faster than single switch scanning techniques and intuitive to operate.<sup id="cite_ref-60" class="reference"><a href="#cite_note-60"><span class="cite-bracket">[</span>60<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-61" class="reference"><a href="#cite_note-61"><span class="cite-bracket">[</span>61<span class="cite-bracket">]</span></a></sup> Motor impairment caused by Cerebral Palsy<sup id="cite_ref-62" class="reference"><a href="#cite_note-62"><span class="cite-bracket">[</span>62<span class="cite-bracket">]</span></a></sup> or <a href="/wiki/Amyotrophic_lateral_sclerosis" class="mw-redirect" title="Amyotrophic lateral sclerosis">Amyotrophic lateral sclerosis</a> often affects speech, and users with Severe Speech and Motor Impairment (SSMI) use a type of software known as Augmentative and Alternative Communication (AAC) aid,<sup id="cite_ref-63" class="reference"><a href="#cite_note-63"><span class="cite-bracket">[</span>63<span class="cite-bracket">]</span></a></sup> that displays icons, words and letters on screen<sup id="cite_ref-64" class="reference"><a href="#cite_note-64"><span class="cite-bracket">[</span>64<span class="cite-bracket">]</span></a></sup> and uses text-to-speech software to generate spoken output.<sup id="cite_ref-65" class="reference"><a href="#cite_note-65"><span class="cite-bracket">[</span>65<span class="cite-bracket">]</span></a></sup> In recent times, researchers also explored eye tracking to control robotic arms<sup id="cite_ref-66" class="reference"><a href="#cite_note-66"><span class="cite-bracket">[</span>66<span class="cite-bracket">]</span></a></sup> and powered wheelchairs.<sup id="cite_ref-67" class="reference"><a href="#cite_note-67"><span class="cite-bracket">[</span>67<span class="cite-bracket">]</span></a></sup> Eye tracking is also helpful in analysing visual search patterns,<sup id="cite_ref-68" class="reference"><a href="#cite_note-68"><span class="cite-bracket">[</span>68<span class="cite-bracket">]</span></a></sup> detecting presence of <a href="/wiki/Nystagmus" title="Nystagmus">Nystagmus</a> and detecting early signs of learning disability by analysing eye gaze movement during reading.<sup id="cite_ref-69" class="reference"><a href="#cite_note-69"><span class="cite-bracket">[</span>69<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Aviation_applications">Aviation applications</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=17" title="Edit section: Aviation applications"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Eye tracking has already been studied for flight safety by comparing scan paths and fixation duration to evaluate the progress of pilot trainees,<sup id="cite_ref-70" class="reference"><a href="#cite_note-70"><span class="cite-bracket">[</span>70<span class="cite-bracket">]</span></a></sup> for estimating pilots' skills,<sup id="cite_ref-71" class="reference"><a href="#cite_note-71"><span class="cite-bracket">[</span>71<span class="cite-bracket">]</span></a></sup> for analyzing crew's joint attention and shared situational awareness.<sup id="cite_ref-72" class="reference"><a href="#cite_note-72"><span class="cite-bracket">[</span>72<span class="cite-bracket">]</span></a></sup> Eye tracking technology was also explored to interact with helmet mounted display systems<sup id="cite_ref-deReus2012_73-0" class="reference"><a href="#cite_note-deReus2012-73"><span class="cite-bracket">[</span>73<span class="cite-bracket">]</span></a></sup> and multi-functional displays<sup id="cite_ref-74" class="reference"><a href="#cite_note-74"><span class="cite-bracket">[</span>74<span class="cite-bracket">]</span></a></sup> in military aircraft. Studies were conducted to investigate the utility of eye tracker for Head-up target locking and Head-up target acquisition in Helmet mounted display systems (HMDS).<sup id="cite_ref-deReus2012_73-1" class="reference"><a href="#cite_note-deReus2012-73"><span class="cite-bracket">[</span>73<span class="cite-bracket">]</span></a></sup> Pilots' feedback suggested that even though the technology is promising, its hardware and software components are yet to be matured.<sup id="cite_ref-deReus2012_73-2" class="reference"><a href="#cite_note-deReus2012-73"><span class="cite-bracket">[</span>73<span class="cite-bracket">]</span></a></sup> Research on interacting with multi-functional displays in simulator environment showed that eye tracking can improve the response times and perceived cognitive load significantly over existing systems. Further, research also investigated utilizing measurements of fixation and pupillary responses to estimate pilot's cognitive load. Estimating cognitive load can help to design next generation adaptive cockpits with improved flight safety.<sup id="cite_ref-75" class="reference"><a href="#cite_note-75"><span class="cite-bracket">[</span>75<span class="cite-bracket">]</span></a></sup> Eye tracking is also useful for detecting pilot fatigue.<sup id="cite_ref-76" class="reference"><a href="#cite_note-76"><span class="cite-bracket">[</span>76<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-bop.unibe.ch_23-3" class="reference"><a href="#cite_note-bop.unibe.ch-23"><span class="cite-bracket">[</span>23<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Automotive_applications">Automotive applications</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=18" title="Edit section: Automotive applications"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>In recent time, eye tracking technology is investigated in automotive domain in both passive and active ways. <a href="/wiki/National_Highway_Traffic_Safety_Administration" title="National Highway Traffic Safety Administration">National Highway Traffic Safety Administration</a> measured glance duration for undertaking secondary tasks while driving and used it to promote safety by discouraging the introduction of excessively distracting devices in vehicles<sup id="cite_ref-77" class="reference"><a href="#cite_note-77"><span class="cite-bracket">[</span>77<span class="cite-bracket">]</span></a></sup> In addition to distraction detection, eye tracking is also used to interact with IVIS.<sup id="cite_ref-78" class="reference"><a href="#cite_note-78"><span class="cite-bracket">[</span>78<span class="cite-bracket">]</span></a></sup> Though initial research<sup id="cite_ref-79" class="reference"><a href="#cite_note-79"><span class="cite-bracket">[</span>79<span class="cite-bracket">]</span></a></sup> investigated the efficacy of eye tracking system for interaction with HDD (Head Down Display), it still required drivers to take their eyes off the road while performing a secondary task. Recent studies investigated eye gaze controlled interaction with HUD (Head Up Display) that eliminates eyes-off-road distraction.<sup id="cite_ref-80" class="reference"><a href="#cite_note-80"><span class="cite-bracket">[</span>80<span class="cite-bracket">]</span></a></sup> Eye tracking is also used to monitor cognitive load of drivers to detect potential distraction. Though researchers<sup id="cite_ref-81" class="reference"><a href="#cite_note-81"><span class="cite-bracket">[</span>81<span class="cite-bracket">]</span></a></sup> explored different methods to estimate <a href="/wiki/Cognitive_load" title="Cognitive load">cognitive load</a> of drivers from different physiological parameters, usage of ocular parameters explored a new way to use the existing eye trackers to monitor cognitive load of drivers in addition to interaction with IVIS.<sup id="cite_ref-82" class="reference"><a href="#cite_note-82"><span class="cite-bracket">[</span>82<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-83" class="reference"><a href="#cite_note-83"><span class="cite-bracket">[</span>83<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Entertainment_applications">Entertainment applications</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=19" title="Edit section: Entertainment applications"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>The 2021 video game <a href="/wiki/Before_Your_Eyes" title="Before Your Eyes">Before Your Eyes</a> registers and reads the player's blinking, and uses it as the main way of interacting with the game.<sup id="cite_ref-84" class="reference"><a href="#cite_note-84"><span class="cite-bracket">[</span>84<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-85" class="reference"><a href="#cite_note-85"><span class="cite-bracket">[</span>85<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Engineering_applications">Engineering applications</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=20" title="Edit section: Engineering applications"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>The widespread use of eye-tracking technology has shed light to its use in empirical software engineering in the most recent years. The eye-tracking technology and data analysis techniques are used to investigate the understandability of software engineering concepts by the researchers. These include the understandability of business process models,<sup id="cite_ref-86" class="reference"><a href="#cite_note-86"><span class="cite-bracket">[</span>86<span class="cite-bracket">]</span></a></sup> and diagrams used in software engineering such as <a href="/wiki/Activity_diagram" title="Activity diagram">UML activity diagrams</a> and <a href="/wiki/Entity%E2%80%93relationship_model" title="Entity–relationship model">EER diagrams</a>.<sup id="cite_ref-87" class="reference"><a href="#cite_note-87"><span class="cite-bracket">[</span>87<span class="cite-bracket">]</span></a></sup> Eye-tracking metrics such as fixation, scan-path, scan-path precision, scan-path recall, fixations on area of interest/relevant region are computed, analyzed and interpreted in terms of model and diagram understandability. The findings are used to enhance the understandability of diagrams and models with proper model related solutions and by improving personal related factors such as working-memory capacity, <a href="/wiki/Cognitive_load" title="Cognitive load">cognitive-load</a>, <a href="/wiki/Learning_styles" title="Learning styles">learning style</a> and strategy of the software engineers and modelers. </p> <div class="mw-heading mw-heading3"><h3 id="Cartographic_applications">Cartographic applications</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=21" title="Edit section: Cartographic applications"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p><a href="/wiki/Cartography" title="Cartography">Cartographic</a> research has widely adopted eye tracking techniques. Researchers have used them to see how individuals perceive and interpret <a href="/wiki/Map" title="Map">maps</a>.<sup id="cite_ref-88" class="reference"><a href="#cite_note-88"><span class="cite-bracket">[</span>88<span class="cite-bracket">]</span></a></sup> For example, eye tracking has been used to study differences in perception of 2D and 3D visualization,<sup id="cite_ref-89" class="reference"><a href="#cite_note-89"><span class="cite-bracket">[</span>89<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-90" class="reference"><a href="#cite_note-90"><span class="cite-bracket">[</span>90<span class="cite-bracket">]</span></a></sup> comparison of map reading strategies between novices and experts<sup id="cite_ref-91" class="reference"><a href="#cite_note-91"><span class="cite-bracket">[</span>91<span class="cite-bracket">]</span></a></sup> or students and their geography teachers,<sup id="cite_ref-92" class="reference"><a href="#cite_note-92"><span class="cite-bracket">[</span>92<span class="cite-bracket">]</span></a></sup> and evaluation of the cartographic quality of maps.<sup id="cite_ref-93" class="reference"><a href="#cite_note-93"><span class="cite-bracket">[</span>93<span class="cite-bracket">]</span></a></sup> Besides, cartographers have employed eye tracking to investigate various factors affecting map reading, including attributes such as color or symbol density.<sup id="cite_ref-94" class="reference"><a href="#cite_note-94"><span class="cite-bracket">[</span>94<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-95" class="reference"><a href="#cite_note-95"><span class="cite-bracket">[</span>95<span class="cite-bracket">]</span></a></sup> Numerous studies about the usability of map applications took advantage of eye tracking, too.<sup id="cite_ref-96" class="reference"><a href="#cite_note-96"><span class="cite-bracket">[</span>96<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-97" class="reference"><a href="#cite_note-97"><span class="cite-bracket">[</span>97<span class="cite-bracket">]</span></a></sup> </p><p>The cartographic community's daily engagement with visual and spatial data positioned it to contribute significantly to eye tracking data visualization methods and tools.<sup id="cite_ref-:2_98-0" class="reference"><a href="#cite_note-:2-98"><span class="cite-bracket">[</span>98<span class="cite-bracket">]</span></a></sup> For example, cartographers have developed methods for integrating eye tracking data with <a href="/wiki/GIS" class="mw-redirect" title="GIS">GIS</a>, utilizing GIS software for further visualization and analysis.<sup id="cite_ref-99" class="reference"><a href="#cite_note-99"><span class="cite-bracket">[</span>99<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-100" class="reference"><a href="#cite_note-100"><span class="cite-bracket">[</span>100<span class="cite-bracket">]</span></a></sup> The community has also delivered tools for visualizing eye tracking data<sup id="cite_ref-101" class="reference"><a href="#cite_note-101"><span class="cite-bracket">[</span>101<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-:2_98-1" class="reference"><a href="#cite_note-:2-98"><span class="cite-bracket">[</span>98<span class="cite-bracket">]</span></a></sup> or a toolbox for the identification of eye fixations based on a spatial component of eye-tracking data.<sup id="cite_ref-102" class="reference"><a href="#cite_note-102"><span class="cite-bracket">[</span>102<span class="cite-bracket">]</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Privacy_concerns">Privacy concerns</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=22" title="Edit section: Privacy concerns"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>With eye tracking projected to become a common feature in various consumer electronics, including smartphones,<sup id="cite_ref-103" class="reference"><a href="#cite_note-103"><span class="cite-bracket">[</span>103<span class="cite-bracket">]</span></a></sup> laptops<sup id="cite_ref-104" class="reference"><a href="#cite_note-104"><span class="cite-bracket">[</span>104<span class="cite-bracket">]</span></a></sup> and virtual reality headsets,<sup id="cite_ref-Rogers_2019_105-0" class="reference"><a href="#cite_note-Rogers_2019-105"><span class="cite-bracket">[</span>105<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-Stein_2020_106-0" class="reference"><a href="#cite_note-Stein_2020-106"><span class="cite-bracket">[</span>106<span class="cite-bracket">]</span></a></sup> concerns have been raised about the technology's impact on consumer privacy.<sup id="cite_ref-107" class="reference"><a href="#cite_note-107"><span class="cite-bracket">[</span>107<span class="cite-bracket">]</span></a></sup><sup id="cite_ref-108" class="reference"><a href="#cite_note-108"><span class="cite-bracket">[</span>108<span class="cite-bracket">]</span></a></sup> With the aid of <a href="/wiki/Machine_learning" title="Machine learning">machine learning</a> techniques, eye tracking data may indirectly reveal information about a user's ethnicity, personality traits, fears, emotions, interests, skills, and physical and mental health condition.<sup id="cite_ref-KrögerLutz2020_109-0" class="reference"><a href="#cite_note-KrögerLutz2020-109"><span class="cite-bracket">[</span>109<span class="cite-bracket">]</span></a></sup> If such inferences are drawn without a user's awareness or approval, this can be classified as an <a href="/wiki/Inference_attack" title="Inference attack">inference attack</a>. Eye activities are not always under volitional control, e.g., "stimulus-driven glances, pupil dilation, ocular tremor, and spontaneous blinks mostly occur without conscious effort, similar to digestion and breathing”.<sup id="cite_ref-KrögerLutz2020_109-1" class="reference"><a href="#cite_note-KrögerLutz2020-109"><span class="cite-bracket">[</span>109<span class="cite-bracket">]</span></a></sup> Therefore, it can be difficult for eye tracking users to estimate or control the amount of information they reveal about themselves. </p> <div class="mw-heading mw-heading2"><h2 id="See_also">See also</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=23" title="Edit section: See also"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1184024115">.mw-parser-output .div-col{margin-top:0.3em;column-width:30em}.mw-parser-output .div-col-small{font-size:90%}.mw-parser-output .div-col-rules{column-rule:1px solid #aaa}.mw-parser-output .div-col dl,.mw-parser-output .div-col ol,.mw-parser-output .div-col ul{margin-top:0}.mw-parser-output .div-col li,.mw-parser-output .div-col dd{page-break-inside:avoid;break-inside:avoid-column}</style><div class="div-col" style="column-width: 18em;"> <ul><li><a href="/wiki/Eye_tracking_on_the_ISS" class="mw-redirect" title="Eye tracking on the ISS">Eye tracking on the ISS</a></li> <li><a href="/wiki/Foveated_imaging" title="Foveated imaging">Foveated imaging</a></li> <li><a href="/wiki/Mouse-Tracking" class="mw-redirect" title="Mouse-Tracking">Mouse-Tracking</a></li> <li><a href="/wiki/Screen_reading" title="Screen reading">Screen reading</a></li></ul> </div> <div class="mw-heading mw-heading2"><h2 id="Notes">Notes</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=24" title="Edit section: Notes"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1239543626">.mw-parser-output .reflist{margin-bottom:0.5em;list-style-type:decimal}@media screen{.mw-parser-output .reflist{font-size:90%}}.mw-parser-output .reflist .references{font-size:100%;margin-bottom:0;list-style-type:inherit}.mw-parser-output .reflist-columns-2{column-width:30em}.mw-parser-output .reflist-columns-3{column-width:25em}.mw-parser-output .reflist-columns{margin-top:0.3em}.mw-parser-output .reflist-columns ol{margin-top:0}.mw-parser-output .reflist-columns li{page-break-inside:avoid;break-inside:avoid-column}.mw-parser-output .reflist-upper-alpha{list-style-type:upper-alpha}.mw-parser-output .reflist-upper-roman{list-style-type:upper-roman}.mw-parser-output .reflist-lower-alpha{list-style-type:lower-alpha}.mw-parser-output .reflist-lower-greek{list-style-type:lower-greek}.mw-parser-output .reflist-lower-roman{list-style-type:lower-roman}</style><div class="reflist"> <div class="mw-references-wrap mw-references-columns"><ol class="references"> <li id="cite_note-1"><span class="mw-cite-backlink"><b><a href="#cite_ref-1">^</a></b></span> <span class="reference-text">Reported in <a href="#CITEREFHuey1908/1968">Huey & 1908/1968</a></span> </li> <li id="cite_note-2"><span class="mw-cite-backlink"><b><a href="#cite_ref-2">^</a></b></span> <span class="reference-text"><style data-mw-deduplicate="TemplateStyles:r1238218222">.mw-parser-output cite.citation{font-style:inherit;word-wrap:break-word}.mw-parser-output .citation q{quotes:"\"""\"""'""'"}.mw-parser-output .citation:target{background-color:rgba(0,127,255,0.133)}.mw-parser-output .id-lock-free.id-lock-free a{background:url("//upload.wikimedia.org/wikipedia/commons/6/65/Lock-green.svg")right 0.1em center/9px no-repeat}.mw-parser-output .id-lock-limited.id-lock-limited a,.mw-parser-output .id-lock-registration.id-lock-registration a{background:url("//upload.wikimedia.org/wikipedia/commons/d/d6/Lock-gray-alt-2.svg")right 0.1em center/9px no-repeat}.mw-parser-output .id-lock-subscription.id-lock-subscription a{background:url("//upload.wikimedia.org/wikipedia/commons/a/aa/Lock-red-alt-2.svg")right 0.1em center/9px no-repeat}.mw-parser-output .cs1-ws-icon a{background:url("//upload.wikimedia.org/wikipedia/commons/4/4c/Wikisource-logo.svg")right 0.1em center/12px no-repeat}body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-free a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-limited a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-registration a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-subscription a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .cs1-ws-icon a{background-size:contain;padding:0 1em 0 0}.mw-parser-output .cs1-code{color:inherit;background:inherit;border:none;padding:inherit}.mw-parser-output .cs1-hidden-error{display:none;color:var(--color-error,#d33)}.mw-parser-output .cs1-visible-error{color:var(--color-error,#d33)}.mw-parser-output .cs1-maint{display:none;color:#085;margin-left:0.3em}.mw-parser-output .cs1-kern-left{padding-left:0.2em}.mw-parser-output .cs1-kern-right{padding-right:0.2em}.mw-parser-output .citation .mw-selflink{font-weight:inherit}@media screen{.mw-parser-output .cs1-format{font-size:95%}html.skin-theme-clientpref-night .mw-parser-output .cs1-maint{color:#18911f}}@media screen and (prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .cs1-maint{color:#18911f}}</style><cite id="CITEREFHuey1908/1968" class="citation book cs1">Huey, Edmund (1968) [originally published 1908]. <i>The Psychology and Pedagogy of Reading</i> (Reprint ed.). MIT Press.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=book&rft.btitle=The+Psychology+and+Pedagogy+of+Reading&rft.edition=Reprint&rft.pub=MIT+Press&rft.date=1968&rft.aulast=Huey&rft.aufirst=Edmund&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-3"><span class="mw-cite-backlink"><b><a href="#cite_ref-3">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBuswell1922" class="citation magazine cs1">Buswell, G.T. (1922). "Fundamental reading habits: a study of their development". <i>Supplementary Educational Monographs</i>. No. 21. Chicago: University of Chicago.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Supplementary+Educational+Monographs&rft.atitle=Fundamental+reading+habits%3A+a+study+of+their+development&rft.issue=21&rft.date=1922&rft.aulast=Buswell&rft.aufirst=G.T.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-4"><span class="mw-cite-backlink"><b><a href="#cite_ref-4">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBuswell1937" class="citation magazine cs1">Buswell, G.T. (1937). "How adults read". <i>Supplementary Educational Monographs</i>. No. 45. Chicago: University of Chicago.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Supplementary+Educational+Monographs&rft.atitle=How+adults+read&rft.issue=45&rft.date=1937&rft.aulast=Buswell&rft.aufirst=G.T.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-5"><span class="mw-cite-backlink"><b><a href="#cite_ref-5">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBuswell1935" class="citation cs2">Buswell, G.T. (1935), <i>How people look at pictures: a study of the psychology and perception in art</i>, University of Chicago Press, <a href="/wiki/Trove" title="Trove">Trove</a> <a rel="nofollow" class="external text" href="https://trove.nla.gov.au/work/12223957">12223957</a></cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=book&rft.btitle=How+people+look+at+pictures%3A+a+study+of+the+psychology+and+perception+in+art&rft.pub=University+of+Chicago+Press&rft.date=1935&rft.aulast=Buswell&rft.aufirst=G.T.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-6"><span class="mw-cite-backlink"><b><a href="#cite_ref-6">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFYarbus1967" class="citation book cs1">Yarbus, Alfred L. (1967). <a rel="nofollow" class="external text" href="http://wexler.free.fr/library/files/yarbus%20%281967%29%20eye%20movements%20and%20vision.pdf"><i>Eye movements and vision</i></a> <span class="cs1-format">(PDF)</span>. New York: Plenum Press. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/978-1-4899-5379-7" title="Special:BookSources/978-1-4899-5379-7"><bdi>978-1-4899-5379-7</bdi></a><span class="reference-accessdate">. Retrieved <span class="nowrap">24 March</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=book&rft.btitle=Eye+movements+and+vision&rft.place=New+York&rft.pub=Plenum+Press&rft.date=1967&rft.isbn=978-1-4899-5379-7&rft.aulast=Yarbus&rft.aufirst=Alfred+L.&rft_id=http%3A%2F%2Fwexler.free.fr%2Flibrary%2Ffiles%2Fyarbus%2520%25281967%2529%2520eye%2520movements%2520and%2520vision.pdf&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-Yarbus_1967_190-7"><span class="mw-cite-backlink">^ <a href="#cite_ref-Yarbus_1967_190_7-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Yarbus_1967_190_7-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><a href="#CITEREFYarbus1967">Yarbus 1967</a>, p. 190</span> </li> <li id="cite_note-8"><span class="mw-cite-backlink"><b><a href="#cite_ref-8">^</a></b></span> <span class="reference-text"><a href="#CITEREFYarbus1967">Yarbus 1967</a>, p. 194</span> </li> <li id="cite_note-9"><span class="mw-cite-backlink"><b><a href="#cite_ref-9">^</a></b></span> <span class="reference-text"><a href="#CITEREFYarbus1967">Yarbus 1967</a>, p. 191</span> </li> <li id="cite_note-10"><span class="mw-cite-backlink"><b><a href="#cite_ref-10">^</a></b></span> <span class="reference-text"><a href="#CITEREFYarbus1967">Yarbus 1967</a>, p. 193</span> </li> <li id="cite_note-forsch1e.htm-11"><span class="mw-cite-backlink">^ <a href="#cite_ref-forsch1e.htm_11-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-forsch1e.htm_11-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="http://www.learning-systems.ch/multimedia/forsch1e.htm">"Visual Perception: Eye Movements in Problem Solving"</a>. <i>www.learning-systems.ch</i>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=unknown&rft.jtitle=www.learning-systems.ch&rft.atitle=Visual+Perception%3A+Eye+Movements+in+Problem+Solving.&rft_id=http%3A%2F%2Fwww.learning-systems.ch%2Fmultimedia%2Fforsch1e.htm&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-12"><span class="mw-cite-backlink"><b><a href="#cite_ref-12">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external autonumber" href="http://www.learning-systems.ch/multimedia/eye%20movements%20problem%20solving.swf">[1]</a> <a rel="nofollow" class="external text" href="https://web.archive.org/web/20110706235011/http://www.learning-systems.ch/multimedia/eye%20movements%20problem%20solving.swf">Archived</a> 2011-07-06 at the <a href="/wiki/Wayback_Machine" title="Wayback Machine">Wayback Machine</a></span> </li> <li id="cite_note-13"><span class="mw-cite-backlink"><b><a href="#cite_ref-13">^</a></b></span> <span class="reference-text"><a href="#CITEREFRayner1978">Rayner 1978</a></span> </li> <li id="cite_note-14"><span class="mw-cite-backlink"><b><a href="#cite_ref-14">^</a></b></span> <span class="reference-text"><a href="#CITEREFJustCarpenter1980">Just & Carpenter 1980</a></span> </li> <li id="cite_note-15"><span class="mw-cite-backlink"><b><a href="#cite_ref-15">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPosner1980" class="citation journal cs1">Posner, Michael I. (1980). "Orienting of Attention". <i>Quarterly Journal of Experimental Psychology</i>. <b>32</b> (1). SAGE Publications: 3–25. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F00335558008248231">10.1080/00335558008248231</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0033-555X">0033-555X</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/7367577">7367577</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:2842391">2842391</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Quarterly+Journal+of+Experimental+Psychology&rft.atitle=Orienting+of+Attention&rft.volume=32&rft.issue=1&rft.pages=3-25&rft.date=1980&rft.issn=0033-555X&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A2842391%23id-name%3DS2CID&rft_id=info%3Apmid%2F7367577&rft_id=info%3Adoi%2F10.1080%2F00335558008248231&rft.aulast=Posner&rft.aufirst=Michael+I.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-16"><span class="mw-cite-backlink"><b><a href="#cite_ref-16">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWrightWard2008" class="citation book cs1">Wright, R.D.; Ward, L.M. (2008). <a rel="nofollow" class="external text" href="https://books.google.com/books?id=b_HSJKidixAC"><i>Orienting of Attention</i></a>. Oxford University Press. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/978-0-19-802997-7" title="Special:BookSources/978-0-19-802997-7"><bdi>978-0-19-802997-7</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=book&rft.btitle=Orienting+of+Attention&rft.pub=Oxford+University+Press&rft.date=2008&rft.isbn=978-0-19-802997-7&rft.aulast=Wright&rft.aufirst=R.D.&rft.au=Ward%2C+L.M.&rft_id=https%3A%2F%2Fbooks.google.com%2Fbooks%3Fid%3Db_HSJKidixAC&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-10.1.1.100.445-17"><span class="mw-cite-backlink">^ <a href="#cite_ref-10.1.1.100.445_17-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-10.1.1.100.445_17-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-10.1.1.100.445_17-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRobert_J._K._JacobKeith_S._Karn2003" class="citation book cs1">Robert J. K. Jacob; Keith S. Karn (2003). <span class="id-lock-registration" title="Free registration required"><a rel="nofollow" class="external text" href="https://archive.org/details/mindseyecognitiv0000unse">"Eye Tracking in Human–Computer Interaction and Usability Research: Ready to Deliver the Promises"</a></span>. In Hyona; Radach; Deubel (eds.). <i>The Mind's Eye: Cognitive and Applied Aspects of Eye Movement Research</i>. Oxford, England: Elsevier Science BV. <a href="/wiki/CiteSeerX_(identifier)" class="mw-redirect" title="CiteSeerX (identifier)">CiteSeerX</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.100.445">10.1.1.100.445</a></span>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/0-444-51020-6" title="Special:BookSources/0-444-51020-6"><bdi>0-444-51020-6</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=bookitem&rft.atitle=Eye+Tracking+in+Human%E2%80%93Computer+Interaction+and+Usability+Research%3A+Ready+to+Deliver+the+Promises&rft.btitle=The+Mind%27s+Eye%3A+Cognitive+and+Applied+Aspects+of+Eye+Movement+Research&rft.place=Oxford%2C+England&rft.pub=Elsevier+Science+BV&rft.date=2003&rft_id=https%3A%2F%2Fciteseerx.ist.psu.edu%2Fviewdoc%2Fsummary%3Fdoi%3D10.1.1.100.445%23id-name%3DCiteSeerX&rft.isbn=0-444-51020-6&rft.au=Robert+J.+K.+Jacob&rft.au=Keith+S.+Karn&rft_id=https%3A%2F%2Farchive.org%2Fdetails%2Fmindseyecognitiv0000unse&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-18"><span class="mw-cite-backlink"><b><a href="#cite_ref-18">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSchiesslDudaThölkeFischer" class="citation web cs1">Schiessl, Michael; Duda, Sabrina; Thölke, Andreas; Fischer, Rico. <a rel="nofollow" class="external text" href="http://www.mmi-interaktiv.de/uploads/media/MMI-Interaktiv0303_SchiesslDudaThoelkeFischer.pdf">"Eye tracking and its application in usability and media research"</a> <span class="cs1-format">(PDF)</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=unknown&rft.btitle=Eye+tracking+and+its+application+in+usability+and+media+research&rft.aulast=Schiessl&rft.aufirst=Michael&rft.au=Duda%2C+Sabrina&rft.au=Th%C3%B6lke%2C+Andreas&rft.au=Fischer%2C+Rico&rft_id=http%3A%2F%2Fwww.mmi-interaktiv.de%2Fuploads%2Fmedia%2FMMI-Interaktiv0303_SchiesslDudaThoelkeFischer.pdf&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-19"><span class="mw-cite-backlink"><b><a href="#cite_ref-19">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHoffman2016" class="citation book cs1">Hoffman, James E. (2016). <a rel="nofollow" class="external text" href="https://books.google.com/books?id=Yep5CwAAQBAJ&pg=PA119">"Visual attention and eye movements"</a>. In Pashler, H. (ed.). <i>Attention</i>. Studies in Cognition. Taylor & Francis. pp. 119–153. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/978-1-317-71549-8" title="Special:BookSources/978-1-317-71549-8"><bdi>978-1-317-71549-8</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=bookitem&rft.atitle=Visual+attention+and+eye+movements&rft.btitle=Attention&rft.series=Studies+in+Cognition&rft.pages=119-153&rft.pub=Taylor+%26+Francis&rft.date=2016&rft.isbn=978-1-317-71549-8&rft.aulast=Hoffman&rft.aufirst=James+E.&rft_id=https%3A%2F%2Fbooks.google.com%2Fbooks%3Fid%3DYep5CwAAQBAJ%26pg%3DPA119&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-20"><span class="mw-cite-backlink"><b><a href="#cite_ref-20">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFDeubel1996" class="citation journal cs1">Deubel, Heiner (1996). <a rel="nofollow" class="external text" href="https://doi.org/10.1016%2F0042-6989%2895%2900294-4">"Saccade target selection and object recognition: Evidence for a common attentional mechanism"</a>. <i>Vision Research</i>. <b>36</b> (12): 1827–1837. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1016%2F0042-6989%2895%2900294-4">10.1016/0042-6989(95)00294-4</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/8759451">8759451</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:16916037">16916037</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Vision+Research&rft.atitle=Saccade+target+selection+and+object+recognition%3A+Evidence+for+a+common+attentional+mechanism&rft.volume=36&rft.issue=12&rft.pages=1827-1837&rft.date=1996&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A16916037%23id-name%3DS2CID&rft_id=info%3Apmid%2F8759451&rft_id=info%3Adoi%2F10.1016%2F0042-6989%2895%2900294-4&rft.aulast=Deubel&rft.aufirst=Heiner&rft_id=https%3A%2F%2Fdoi.org%2F10.1016%252F0042-6989%252895%252900294-4&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-21"><span class="mw-cite-backlink"><b><a href="#cite_ref-21">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHolsanova2007" class="citation book cs1 cs1-prop-foreign-lang-source">Holsanova, Jana (2007). <a rel="nofollow" class="external text" href="https://www.researchgate.net/publication/237665654">"Användares interaktion med multimodala texter"</a> [User interaction with multimodal texts]. In L. Gunnarsson; A.-M. Karlsson (eds.). <i>Ett vidgat textbegrepp</i> (in Swedish). pp. 41–58.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=bookitem&rft.atitle=Anv%C3%A4ndares+interaktion+med+multimodala+texter&rft.btitle=Ett+vidgat+textbegrepp&rft.pages=41-58&rft.date=2007&rft.aulast=Holsanova&rft.aufirst=Jana&rft_id=https%3A%2F%2Fwww.researchgate.net%2Fpublication%2F237665654&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-22"><span class="mw-cite-backlink"><b><a href="#cite_ref-22">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCognolatoAtzoriMüller2018" class="citation journal cs1">Cognolato M, Atzori M, Müller H (2018). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6453044">"Head-mounted eye gaze tracking devices: An overview of modern devices and recent advances"</a>. <i>Journal of Rehabilitation and Assistive Technologies Engineering</i>. <b>5</b>: 205566831877399. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F2055668318773991">10.1177/2055668318773991</a>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6453044">6453044</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/31191938">31191938</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Rehabilitation+and+Assistive+Technologies+Engineering&rft.atitle=Head-mounted+eye+gaze+tracking+devices%3A+An+overview+of+modern+devices+and+recent+advances.&rft.volume=5&rft.pages=205566831877399&rft.date=2018&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC6453044%23id-name%3DPMC&rft_id=info%3Apmid%2F31191938&rft_id=info%3Adoi%2F10.1177%2F2055668318773991&rft.aulast=Cognolato&rft.aufirst=M&rft.au=Atzori%2C+M&rft.au=M%C3%BCller%2C+H&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC6453044&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-bop.unibe.ch-23"><span class="mw-cite-backlink">^ <a href="#cite_ref-bop.unibe.ch_23-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-bop.unibe.ch_23-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-bop.unibe.ch_23-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-bop.unibe.ch_23-3"><sup><i><b>d</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFAlexanderMacknikMartinez-Conde2020" class="citation journal cs1">Alexander, Robert; Macknik, Stephen; Martinez-Conde, Susana (2020). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7962687">"Microsaccades in applied environments: Real-world applications of fixational eye movement measurements"</a>. <i>Journal of Eye Movement Research</i>. <b>12</b> (6). <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.16910%2Fjemr.12.6.15">10.16910/jemr.12.6.15</a></span>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7962687">7962687</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/33828760">33828760</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Eye+Movement+Research&rft.atitle=Microsaccades+in+applied+environments%3A+Real-world+applications+of+fixational+eye+movement+measurements&rft.volume=12&rft.issue=6&rft.date=2020&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC7962687%23id-name%3DPMC&rft_id=info%3Apmid%2F33828760&rft_id=info%3Adoi%2F10.16910%2Fjemr.12.6.15&rft.aulast=Alexander&rft.aufirst=Robert&rft.au=Macknik%2C+Stephen&rft.au=Martinez-Conde%2C+Susana&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC7962687&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-:0-24"><span class="mw-cite-backlink">^ <a href="#cite_ref-:0_24-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:0_24-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-:0_24-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-:0_24-3"><sup><i><b>d</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFZhaoWangZhangQi2017" class="citation journal cs1">Zhao, Lei; Wang, Zengcai; Zhang, Guoxin; Qi, Yazhou; Wang, Xiaojin (15 November 2017). "Eye state recognition based on deep integrated neural network and transfer learning". <i>Multimedia Tools and Applications</i>. <b>77</b> (15): 19415–19438. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs11042-017-5380-8">10.1007/s11042-017-5380-8</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1380-7501">1380-7501</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:20691291">20691291</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Multimedia+Tools+and+Applications&rft.atitle=Eye+state+recognition+based+on+deep+integrated+neural+network+and+transfer+learning&rft.volume=77&rft.issue=15&rft.pages=19415-19438&rft.date=2017-11-15&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A20691291%23id-name%3DS2CID&rft.issn=1380-7501&rft_id=info%3Adoi%2F10.1007%2Fs11042-017-5380-8&rft.aulast=Zhao&rft.aufirst=Lei&rft.au=Wang%2C+Zengcai&rft.au=Zhang%2C+Guoxin&rft.au=Qi%2C+Yazhou&rft.au=Wang%2C+Xiaojin&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-25"><span class="mw-cite-backlink"><b><a href="#cite_ref-25">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFStemberCelikKrupinskiChang2019" class="citation journal cs1">Stember, J. N.; Celik, H.; Krupinski, E.; Chang, P. D.; Mutasa, S.; Wood, B. J.; Lignelli, A.; Moonis, G.; Schwartz, L. H.; Jambawalikar, S.; Bagci, U. (August 2019). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6646645">"Eye Tracking for Deep Learning Segmentation Using Convolutional Neural Networks"</a>. <i>Journal of Digital Imaging</i>. <b>32</b> (4): 597–604. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs10278-019-00220-4">10.1007/s10278-019-00220-4</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0897-1889">0897-1889</a>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6646645">6646645</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/31044392">31044392</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Digital+Imaging&rft.atitle=Eye+Tracking+for+Deep+Learning+Segmentation+Using+Convolutional+Neural+Networks&rft.volume=32&rft.issue=4&rft.pages=597-604&rft.date=2019-08&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC6646645%23id-name%3DPMC&rft.issn=0897-1889&rft_id=info%3Apmid%2F31044392&rft_id=info%3Adoi%2F10.1007%2Fs10278-019-00220-4&rft.aulast=Stember&rft.aufirst=J.+N.&rft.au=Celik%2C+H.&rft.au=Krupinski%2C+E.&rft.au=Chang%2C+P.+D.&rft.au=Mutasa%2C+S.&rft.au=Wood%2C+B.+J.&rft.au=Lignelli%2C+A.&rft.au=Moonis%2C+G.&rft.au=Schwartz%2C+L.+H.&rft.au=Jambawalikar%2C+S.&rft.au=Bagci%2C+U.&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC6646645&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-:1-26"><span class="mw-cite-backlink">^ <a href="#cite_ref-:1_26-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:1_26-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-:1_26-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLouedecGuntzCrowleyVaufreydaz2019" class="citation book cs1">Louedec, Justin Le; Guntz, Thomas; Crowley, James L.; Vaufreydaz, Dominique (2019). "Deep learning investigation for chess player attention prediction using eye-tracking and game data". <i>Proceedings of the 11th ACM Symposium on Eye Tracking Research & Applications</i>. New York, New York, USA: ACM Press. pp. 1–9. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/1904.08155">1904.08155</a></span>. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2019arXiv190408155L">2019arXiv190408155L</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F3314111.3319827">10.1145/3314111.3319827</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/978-1-4503-6709-7" title="Special:BookSources/978-1-4503-6709-7"><bdi>978-1-4503-6709-7</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:118688325">118688325</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=bookitem&rft.atitle=Deep+learning+investigation+for+chess+player+attention+prediction+using+eye-tracking+and+game+data&rft.btitle=Proceedings+of+the+11th+ACM+Symposium+on+Eye+Tracking+Research+%26+Applications&rft.place=New+York%2C+New+York%2C+USA&rft.pages=1-9&rft.pub=ACM+Press&rft.date=2019&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A118688325%23id-name%3DS2CID&rft_id=info%3Abibcode%2F2019arXiv190408155L&rft_id=info%3Aarxiv%2F1904.08155&rft_id=info%3Adoi%2F10.1145%2F3314111.3319827&rft.isbn=978-1-4503-6709-7&rft.aulast=Louedec&rft.aufirst=Justin+Le&rft.au=Guntz%2C+Thomas&rft.au=Crowley%2C+James+L.&rft.au=Vaufreydaz%2C+Dominique&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-27"><span class="mw-cite-backlink"><b><a href="#cite_ref-27">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFNadu2015" class="citation journal cs1">Nadu, T (2015). "A review: Towards quality improvement in real time eye-tracking and gaze detection". <i>International Journal of Applied Engineering Research</i>. <b>10</b> (6).</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=International+Journal+of+Applied+Engineering+Research&rft.atitle=A+review%3A+Towards+quality+improvement+in+real+time+eye-tracking+and+gaze+detection&rft.volume=10&rft.issue=6&rft.date=2015&rft.aulast=Nadu&rft.aufirst=T&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-28"><span class="mw-cite-backlink"><b><a href="#cite_ref-28">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFNückles2021" class="citation journal cs1">Nückles, M (2021). <a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs10648-020-09567-5">"Investigating visual perception in teaching and learning with advanced eye-tracking methodologies: Rewards and challenges of an innovative research paradigm"</a>. <i>Educational Psychology Review</i>. <b>33</b> (1): 149–167. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs10648-020-09567-5">10.1007/s10648-020-09567-5</a></span>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:225345884">225345884</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Educational+Psychology+Review&rft.atitle=Investigating+visual+perception+in+teaching+and+learning+with+advanced+eye-tracking+methodologies%3A+Rewards+and+challenges+of+an+innovative+research+paradigm&rft.volume=33&rft.issue=1&rft.pages=149-167&rft.date=2021&rft_id=info%3Adoi%2F10.1007%2Fs10648-020-09567-5&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A225345884%23id-name%3DS2CID&rft.aulast=N%C3%BCckles&rft.aufirst=M&rft_id=https%3A%2F%2Fdoi.org%2F10.1007%252Fs10648-020-09567-5&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-29"><span class="mw-cite-backlink"><b><a href="#cite_ref-29">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFAlexanderWaiteMacknikMartinez-Conde2020" class="citation journal cs1">Alexander, RG; Waite, S; Macknik, SL; Martinez-Conde, S (2020). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7571277">"What do radiologists look for? Advances and limitations of perceptual learning in radiologic search"</a>. <i>Journal of Vision</i>. <b>20</b> (10): 17. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1167%2Fjov.20.10.17">10.1167/jov.20.10.17</a>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7571277">7571277</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/33057623">33057623</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Vision&rft.atitle=What+do+radiologists+look+for%3F+Advances+and+limitations+of+perceptual+learning+in+radiologic+search.&rft.volume=20&rft.issue=10&rft.pages=17&rft.date=2020&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC7571277%23id-name%3DPMC&rft_id=info%3Apmid%2F33057623&rft_id=info%3Adoi%2F10.1167%2Fjov.20.10.17&rft.aulast=Alexander&rft.aufirst=RG&rft.au=Waite%2C+S&rft.au=Macknik%2C+SL&rft.au=Martinez-Conde%2C+S&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC7571277&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-30"><span class="mw-cite-backlink"><b><a href="#cite_ref-30">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRobinson1963" class="citation journal cs1">Robinson, David A. (October 1963). "A Method of Measuring Eye Movemnent Using a Scieral Search Coil in a Magnetic Field". <i>IEEE Transactions on Bio-medical Electronics</i>. <b>10</b> (4). Institute of Electrical and Electronics Engineers: 137–145. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2Ftbmel.1963.4322822">10.1109/tbmel.1963.4322822</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0096-0616">0096-0616</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/14121113">14121113</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=IEEE+Transactions+on+Bio-medical+Electronics&rft.atitle=A+Method+of+Measuring+Eye+Movemnent+Using+a+Scieral+Search+Coil+in+a+Magnetic+Field&rft.volume=10&rft.issue=4&rft.pages=137-145&rft.date=1963-10&rft.issn=0096-0616&rft_id=info%3Apmid%2F14121113&rft_id=info%3Adoi%2F10.1109%2Ftbmel.1963.4322822&rft.aulast=Robinson&rft.aufirst=David+A.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-31"><span class="mw-cite-backlink"><b><a href="#cite_ref-31">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCraneSteele,_C.M.1985" class="citation journal cs1">Crane, H.D.; Steele, C.M. (1985). "Generation-V dual-Purkinje-image eyetracker". <i>Applied Optics</i>. <b>24</b> (4): 527–537. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/1985ApOpt..24..527C">1985ApOpt..24..527C</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1364%2FAO.24.000527">10.1364/AO.24.000527</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/18216982">18216982</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:10595433">10595433</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Applied+Optics&rft.atitle=Generation-V+dual-Purkinje-image+eyetracker&rft.volume=24&rft.issue=4&rft.pages=527-537&rft.date=1985&rft_id=info%3Adoi%2F10.1364%2FAO.24.000527&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A10595433%23id-name%3DS2CID&rft_id=info%3Apmid%2F18216982&rft_id=info%3Abibcode%2F1985ApOpt..24..527C&rft.aulast=Crane&rft.aufirst=H.D.&rft.au=Steele%2C+C.M.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-32"><span class="mw-cite-backlink"><b><a href="#cite_ref-32">^</a></b></span> <span class="reference-text">Elbert, T., Lutzenberger, W., Rockstroh, B., Birbaumer, N., 1985. Removal of ocular artifacts from the EEG. A biophysical approach to the EOG. Electroencephalogr Clin Neurophysiol 60, 455-463.</span> </li> <li id="cite_note-33"><span class="mw-cite-backlink"><b><a href="#cite_ref-33">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKerenYuval-GreenbergDeouell2010" class="citation journal cs1">Keren, A.S.; Yuval-Greenberg, S.; Deouell, L.Y. (2010). "Saccadic spike potentials in gamma-band EEG: Characterization, detection and suppression". <i>NeuroImage</i>. <b>49</b> (3): 2248–2263. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.neuroimage.2009.10.057">10.1016/j.neuroimage.2009.10.057</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/19874901">19874901</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:7106696">7106696</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=NeuroImage&rft.atitle=Saccadic+spike+potentials+in+gamma-band+EEG%3A+Characterization%2C+detection+and+suppression&rft.volume=49&rft.issue=3&rft.pages=2248-2263&rft.date=2010&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A7106696%23id-name%3DS2CID&rft_id=info%3Apmid%2F19874901&rft_id=info%3Adoi%2F10.1016%2Fj.neuroimage.2009.10.057&rft.aulast=Keren&rft.aufirst=A.S.&rft.au=Yuval-Greenberg%2C+S.&rft.au=Deouell%2C+L.Y.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-34"><span class="mw-cite-backlink"><b><a href="#cite_ref-34">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBullingRoggen,_D.Tröster,_G.2009" class="citation journal cs1">Bulling, A.; Roggen, D.; Tröster, G. (2009). "Wearable EOG goggles: Seamless sensing and context-awareness in everyday environments". <i>Journal of Ambient Intelligence and Smart Environments</i>. <b>1</b> (2): 157–171. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.3233%2FAIS-2009-0020">10.3233/AIS-2009-0020</a>. <a href="/wiki/Hdl_(identifier)" class="mw-redirect" title="Hdl (identifier)">hdl</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://hdl.handle.net/20.500.11850%2F352886">20.500.11850/352886</a></span>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:18423163">18423163</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Ambient+Intelligence+and+Smart+Environments&rft.atitle=Wearable+EOG+goggles%3A+Seamless+sensing+and+context-awareness+in+everyday+environments&rft.volume=1&rft.issue=2&rft.pages=157-171&rft.date=2009&rft_id=info%3Ahdl%2F20.500.11850%2F352886&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A18423163%23id-name%3DS2CID&rft_id=info%3Adoi%2F10.3233%2FAIS-2009-0020&rft.aulast=Bulling&rft.aufirst=A.&rft.au=Roggen%2C+D.&rft.au=Tr%C3%B6ster%2C+G.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-35"><span class="mw-cite-backlink"><b><a href="#cite_ref-35">^</a></b></span> <span class="reference-text">Sopic, D., Aminifar, A., & Atienza, D. (2018). e-glass: A wearable system for real-time detection of epileptic seizures. In IEEE International Symposium on Circuits and Systems (ISCAS).</span> </li> <li id="cite_note-36"><span class="mw-cite-backlink"><b><a href="#cite_ref-36">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWitzner_HansenQiang_Ji2010" class="citation journal cs1">Witzner Hansen, Dan; Qiang Ji (March 2010). <a rel="nofollow" class="external text" href="http://dl.acm.org/citation.cfm?id=1729561">"In the Eye of the Beholder: A Survey of Models for Eyes and Gaze"</a>. <i>IEEE Trans. Pattern Anal. Mach. Intell</i>. <b>32</b> (3): 478–500. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2Ftpami.2009.30">10.1109/tpami.2009.30</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/20075473">20075473</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:16489508">16489508</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=IEEE+Trans.+Pattern+Anal.+Mach.+Intell.&rft.atitle=In+the+Eye+of+the+Beholder%3A+A+Survey+of+Models+for+Eyes+and+Gaze&rft.volume=32&rft.issue=3&rft.pages=478-500&rft.date=2010-03&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A16489508%23id-name%3DS2CID&rft_id=info%3Apmid%2F20075473&rft_id=info%3Adoi%2F10.1109%2Ftpami.2009.30&rft.aulast=Witzner+Hansen&rft.aufirst=Dan&rft.au=Qiang+Ji&rft_id=http%3A%2F%2Fdl.acm.org%2Fcitation.cfm%3Fid%3D1729561&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-gneo-37"><span class="mw-cite-backlink">^ <a href="#cite_ref-gneo_37-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-gneo_37-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGneoSchmidConfortoD’Alessio2012" class="citation journal cs1">Gneo, Massimo; Schmid, Maurizio; Conforto, Silvia; D’Alessio, Tommaso (2012). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3543256">"A free geometry model-independent neural eye-gaze tracking system"</a>. <i>Journal of NeuroEngineering and Rehabilitation</i>. <b>9</b> (1): 82. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1186%2F1743-0003-9-82">10.1186/1743-0003-9-82</a></span>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3543256">3543256</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/23158726">23158726</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+NeuroEngineering+and+Rehabilitation&rft.atitle=A+free+geometry+model-independent+neural+eye-gaze+tracking+system&rft.volume=9&rft.issue=1&rft.pages=82&rft.date=2012&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC3543256%23id-name%3DPMC&rft_id=info%3Apmid%2F23158726&rft_id=info%3Adoi%2F10.1186%2F1743-0003-9-82&rft.aulast=Gneo&rft.aufirst=Massimo&rft.au=Schmid%2C+Maurizio&rft.au=Conforto%2C+Silvia&rft.au=D%E2%80%99Alessio%2C+Tommaso&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC3543256&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-38"><span class="mw-cite-backlink"><b><a href="#cite_ref-38">^</a></b></span> <span class="reference-text">The Eye: A Survey of Human Vision; Wikimedia Foundation</span> </li> <li id="cite_note-39"><span class="mw-cite-backlink"><b><a href="#cite_ref-39">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSigutSidha2011" class="citation journal cs1">Sigut, J; Sidha, SA (February 2011). "Iris center corneal reflection method for gaze tracking using visible light". <i>IEEE Transactions on Bio-Medical Engineering</i>. <b>58</b> (2): 411–9. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2Ftbme.2010.2087330">10.1109/tbme.2010.2087330</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/20952326">20952326</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:206611506">206611506</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=IEEE+Transactions+on+Bio-Medical+Engineering&rft.atitle=Iris+center+corneal+reflection+method+for+gaze+tracking+using+visible+light.&rft.volume=58&rft.issue=2&rft.pages=411-9&rft.date=2011-02&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A206611506%23id-name%3DS2CID&rft_id=info%3Apmid%2F20952326&rft_id=info%3Adoi%2F10.1109%2Ftbme.2010.2087330&rft.aulast=Sigut&rft.aufirst=J&rft.au=Sidha%2C+SA&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-40"><span class="mw-cite-backlink"><b><a href="#cite_ref-40">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHuaKrishnaswamyRolland2006" class="citation journal cs1">Hua, H; Krishnaswamy, P; Rolland, JP (15 May 2006). <a rel="nofollow" class="external text" href="https://stars.library.ucf.edu/facultybib2000/6233">"Video-based eyetracking methods and algorithms in head-mounted displays"</a>. <i>Optics Express</i>. <b>14</b> (10): 4328–50. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2006OExpr..14.4328H">2006OExpr..14.4328H</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1364%2Foe.14.004328">10.1364/oe.14.004328</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/19516585">19516585</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Optics+Express&rft.atitle=Video-based+eyetracking+methods+and+algorithms+in+head-mounted+displays.&rft.volume=14&rft.issue=10&rft.pages=4328-50&rft.date=2006-05-15&rft_id=info%3Apmid%2F19516585&rft_id=info%3Adoi%2F10.1364%2Foe.14.004328&rft_id=info%3Abibcode%2F2006OExpr..14.4328H&rft.aulast=Hua&rft.aufirst=H&rft.au=Krishnaswamy%2C+P&rft.au=Rolland%2C+JP&rft_id=https%3A%2F%2Fstars.library.ucf.edu%2Ffacultybib2000%2F6233&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-41"><span class="mw-cite-backlink"><b><a href="#cite_ref-41">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPurves2001" class="citation book cs1">Purves, D; et al. (2001). "What Eye Movements Accomplish". <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/books/NBK11156/"><i>Neuroscience</i></a> (2nd ed.). Sunderland, MA: Sinauer Assocs.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=bookitem&rft.atitle=What+Eye+Movements+Accomplish&rft.btitle=Neuroscience&rft.place=Sunderland%2C+MA&rft.edition=2nd&rft.pub=Sinauer+Assocs&rft.date=2001&rft.aulast=Purves&rft.aufirst=D&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fbooks%2FNBK11156%2F&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-42"><span class="mw-cite-backlink"><b><a href="#cite_ref-42">^</a></b></span> <span class="reference-text">Majaranta, P., Aoki, H., Donegan, M., Hansen, D.W., Hansen, J.P., Hyrskykari, A., Räihä, K.J., <i>Gaze Interaction and Applications of Eye Tracking: Advances in Assistive Technologies</i>, IGI Global, 2011</span> </li> <li id="cite_note-43"><span class="mw-cite-backlink"><b><a href="#cite_ref-43">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFNielsenPernice2010" class="citation book cs1">Nielsen, J.; Pernice, K. (2010). <a rel="nofollow" class="external text" href="https://books.google.com/books?id=EeQhHqjgQosC"><i>Eyetracking Web Usability</i></a>. New Rideres Publishing. p. 11. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/978-0-321-71407-7" title="Special:BookSources/978-0-321-71407-7"><bdi>978-0-321-71407-7</bdi></a><span class="reference-accessdate">. Retrieved <span class="nowrap">28 October</span> 2013</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=book&rft.btitle=Eyetracking+Web+Usability&rft.pages=11&rft.pub=New+Rideres+Publishing&rft.date=2010&rft.isbn=978-0-321-71407-7&rft.aulast=Nielsen&rft.aufirst=J.&rft.au=Pernice%2C+K.&rft_id=https%3A%2F%2Fbooks.google.com%2Fbooks%3Fid%3DEeQhHqjgQosC&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-44"><span class="mw-cite-backlink"><b><a href="#cite_ref-44">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLe_MeurBaccino2013" class="citation journal cs1">Le Meur, O; Baccino, T (2013). "Methods for comparing scanpaths and saliency maps: strengths and weaknesses". <i>Behavior Research Methods</i>. <b>45</b> (1).</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Behavior+Research+Methods&rft.atitle=Methods+for+comparing+scanpaths+and+saliency+maps%3A+strengths+and+weaknesses&rft.volume=45&rft.issue=1&rft.date=2013&rft.aulast=Le+Meur&rft.aufirst=O&rft.au=Baccino%2C+T&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-45"><span class="mw-cite-backlink"><b><a href="#cite_ref-45">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFAharonsonCoopooGovenderPostema2020" class="citation journal cs1">Aharonson V, Coopoo V, Govender K, Postema M (2020). <a rel="nofollow" class="external text" href="https://doi.org/10.23919%2FSAIEE.2020.9142605">"Automatic pupil detection and gaze estimation using the vestibulo-ocular reflex in a low-cost eye-tracking setup"</a>. <i>SAIEE Africa Research Journal</i>. <b>111</b> (3): 120–124. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.23919%2FSAIEE.2020.9142605">10.23919/SAIEE.2020.9142605</a></span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=SAIEE+Africa+Research+Journal&rft.atitle=Automatic+pupil+detection+and+gaze+estimation+using+the+vestibulo-ocular+reflex+in+a+low-cost+eye-tracking+setup&rft.volume=111&rft.issue=3&rft.pages=120-124&rft.date=2020&rft_id=info%3Adoi%2F10.23919%2FSAIEE.2020.9142605&rft.aulast=Aharonson&rft.aufirst=V&rft.au=Coopoo%2C+V&rft.au=Govender%2C+K&rft.au=Postema%2C+M&rft_id=https%3A%2F%2Fdoi.org%2F10.23919%252FSAIEE.2020.9142605&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-46"><span class="mw-cite-backlink"><b><a href="#cite_ref-46">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFEinhäuserSchumannBardinsBartl2007" class="citation journal cs1">Einhäuser, W; Schumann, F; Bardins, S; Bartl, K; Böning, G; Schneider, E; König, P (2007). "Human eye-head co-ordination in natural exploration". <i>Network: Computation in Neural Systems</i>. <b>18</b> (3): 267–297. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F09548980701671094">10.1080/09548980701671094</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/17926195">17926195</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:1812177">1812177</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Network%3A+Computation+in+Neural+Systems&rft.atitle=Human+eye-head+co-ordination+in+natural+exploration&rft.volume=18&rft.issue=3&rft.pages=267-297&rft.date=2007&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A1812177%23id-name%3DS2CID&rft_id=info%3Apmid%2F17926195&rft_id=info%3Adoi%2F10.1080%2F09548980701671094&rft.aulast=Einh%C3%A4user&rft.aufirst=W&rft.au=Schumann%2C+F&rft.au=Bardins%2C+S&rft.au=Bartl%2C+K&rft.au=B%C3%B6ning%2C+G&rft.au=Schneider%2C+E&rft.au=K%C3%B6nig%2C+P&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-47"><span class="mw-cite-backlink"><b><a href="#cite_ref-47">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFAndersenBracewellBarashGnadt1990" class="citation journal cs1">Andersen, R. A.; Bracewell, R. M.; Barash, S.; Gnadt, J. W.; Fogassi, L. (1990). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6570201">"Eye position effects on visual, memory, and saccade-related activity in areas LIP and 7a of macaque"</a>. <i>Journal of Neuroscience</i>. <b>10</b> (4): 1176–1196. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1523%2FJNEUROSCI.10-04-01176.1990">10.1523/JNEUROSCI.10-04-01176.1990</a>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6570201">6570201</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/2329374">2329374</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:18817768">18817768</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Neuroscience&rft.atitle=Eye+position+effects+on+visual%2C+memory%2C+and+saccade-related+activity+in+areas+LIP+and+7a+of+macaque&rft.volume=10&rft.issue=4&rft.pages=1176-1196&rft.date=1990&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC6570201%23id-name%3DPMC&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A18817768%23id-name%3DS2CID&rft_id=info%3Apmid%2F2329374&rft_id=info%3Adoi%2F10.1523%2FJNEUROSCI.10-04-01176.1990&rft.aulast=Andersen&rft.aufirst=R.+A.&rft.au=Bracewell%2C+R.+M.&rft.au=Barash%2C+S.&rft.au=Gnadt%2C+J.+W.&rft.au=Fogassi%2C+L.&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC6570201&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-48"><span class="mw-cite-backlink"><b><a href="#cite_ref-48">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFerhatVilariño2016" class="citation journal cs1">Ferhat, Onur; Vilariño, Fernando (2016). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4808529">"Low Cost Eye Tracking: The Current Panorama"</a>. <i>Computational Intelligence and Neuroscience</i>. <b>2016</b>: 1–14. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1155%2F2016%2F8680541">10.1155/2016/8680541</a></span>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4808529">4808529</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/27034653">27034653</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Computational+Intelligence+and+Neuroscience&rft.atitle=Low+Cost+Eye+Tracking%3A+The+Current+Panorama&rft.volume=2016&rft.pages=1-14&rft.date=2016&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC4808529%23id-name%3DPMC&rft_id=info%3Apmid%2F27034653&rft_id=info%3Adoi%2F10.1155%2F2016%2F8680541&rft.aulast=Ferhat&rft.aufirst=Onur&rft.au=Vilari%C3%B1o%2C+Fernando&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC4808529&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-49"><span class="mw-cite-backlink"><b><a href="#cite_ref-49">^</a></b></span> <span class="reference-text"><a href="#CITEREFHunziker2006">Hunziker 2006</a>. Based on data from: Cohen, A. S. (1983). Informationsaufnahme beim Befahren von Kurven, Psychologie für die Praxis 2/83, Bulletin der Schweizerischen Stiftung für Angewandte Psychologie.</span> </li> <li id="cite_note-50"><span class="mw-cite-backlink"><b><a href="#cite_ref-50">^</a></b></span> <span class="reference-text">Cohen, A. S. (1983). Informationsaufnahme beim Befahren von Kurven, Psychologie für die Praxis 2/83, Bulletin der Schweizerischen Stiftung für Angewandte Psychologie</span> </li> <li id="cite_note-51"><span class="mw-cite-backlink"><b><a href="#cite_ref-51">^</a></b></span> <span class="reference-text">Pictures from <a href="#CITEREFHunziker2006">Hunziker 2006</a></span> </li> <li id="cite_note-52"><span class="mw-cite-backlink"><b><a href="#cite_ref-52">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGrünerAnsorge2017" class="citation journal cs1">Grüner, M; Ansorge, U (2017). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7141062">"Mobile eye tracking during real-world night driving: A selective review of findings and recommendations for future research"</a>. <i>Journal of Eye Movement Research</i>. <b>10</b> (2). <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.16910%2FJEMR.10.2.1">10.16910/JEMR.10.2.1</a></span>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7141062">7141062</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/33828651">33828651</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Eye+Movement+Research&rft.atitle=Mobile+eye+tracking+during+real-world+night+driving%3A+A+selective+review+of+findings+and+recommendations+for+future+research&rft.volume=10&rft.issue=2&rft.date=2017&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC7141062%23id-name%3DPMC&rft_id=info%3Apmid%2F33828651&rft_id=info%3Adoi%2F10.16910%2FJEMR.10.2.1&rft.aulast=Gr%C3%BCner&rft.aufirst=M&rft.au=Ansorge%2C+U&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC7141062&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-53"><span class="mw-cite-backlink"><b><a href="#cite_ref-53">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFItohFukuda2002" class="citation journal cs1">Itoh, Nana; Fukuda, Tadahiko (2002). "Comparative Study of Eye Movements in Extent of Central and Peripheral Vision and Use by Young and Elderly Walkers". <i>Perceptual and Motor Skills</i>. <b>94</b> (3_suppl): 1283–1291. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.2466%2Fpms.2002.94.3c.1283">10.2466/pms.2002.94.3c.1283</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/12186250">12186250</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:1058879">1058879</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Perceptual+and+Motor+Skills&rft.atitle=Comparative+Study+of+Eye+Movements+in+Extent+of+Central+and+Peripheral+Vision+and+Use+by+Young+and+Elderly+Walkers&rft.volume=94&rft.issue=3_suppl&rft.pages=1283-1291&rft.date=2002&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A1058879%23id-name%3DS2CID&rft_id=info%3Apmid%2F12186250&rft_id=info%3Adoi%2F10.2466%2Fpms.2002.94.3c.1283&rft.aulast=Itoh&rft.aufirst=Nana&rft.au=Fukuda%2C+Tadahiko&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-54"><span class="mw-cite-backlink"><b><a href="#cite_ref-54">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFDuchowski2002" class="citation journal cs1">Duchowski, A. T. (2002). <a rel="nofollow" class="external text" href="https://doi.org/10.3758%2FBF03195475">"A breadth-first survey of eye-tracking applications"</a>. <i>Behavior Research Methods, Instruments, & Computers</i>. <b>34</b> (4): 455–470. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.3758%2FBF03195475">10.3758/BF03195475</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/12564550">12564550</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:4361938">4361938</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Behavior+Research+Methods%2C+Instruments%2C+%26+Computers&rft.atitle=A+breadth-first+survey+of+eye-tracking+applications&rft.volume=34&rft.issue=4&rft.pages=455-470&rft.date=2002&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A4361938%23id-name%3DS2CID&rft_id=info%3Apmid%2F12564550&rft_id=info%3Adoi%2F10.3758%2FBF03195475&rft.aulast=Duchowski&rft.aufirst=A.+T.&rft_id=https%3A%2F%2Fdoi.org%2F10.3758%252FBF03195475&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-55"><span class="mw-cite-backlink"><b><a href="#cite_ref-55">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRogers" class="citation web cs1">Rogers, Sol. <a rel="nofollow" class="external text" href="https://www.forbes.com/sites/solrogers/2019/02/05/seven-reasons-why-eye-tracking-will-fundamentally-change-vr/">"Seven Reasons Why Eye-tracking Will Fundamentally Change VR"</a>. <i>Forbes</i><span class="reference-accessdate">. Retrieved <span class="nowrap">16 December</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=unknown&rft.jtitle=Forbes&rft.atitle=Seven+Reasons+Why+Eye-tracking+Will+Fundamentally+Change+VR&rft.aulast=Rogers&rft.aufirst=Sol&rft_id=https%3A%2F%2Fwww.forbes.com%2Fsites%2Fsolrogers%2F2019%2F02%2F05%2Fseven-reasons-why-eye-tracking-will-fundamentally-change-vr%2F&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-56"><span class="mw-cite-backlink"><b><a href="#cite_ref-56">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLohseWu2001" class="citation journal cs1">Lohse, Gerald; Wu, D. J. (1 February 2001). "Eye Movement Patterns on Chinese Yellow Pages Advertising". <i>Electronic Markets</i>. <b>11</b> (2): 87–96. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F101967801300197007">10.1080/101967801300197007</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:1064385">1064385</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Electronic+Markets&rft.atitle=Eye+Movement+Patterns+on+Chinese+Yellow+Pages+Advertising&rft.volume=11&rft.issue=2&rft.pages=87-96&rft.date=2001-02-01&rft_id=info%3Adoi%2F10.1080%2F101967801300197007&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A1064385%23id-name%3DS2CID&rft.aulast=Lohse&rft.aufirst=Gerald&rft.au=Wu%2C+D.+J.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-57"><span class="mw-cite-backlink"><b><a href="#cite_ref-57">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="http://www.searchenginejournal.com/eye-tracking-study-importance-using-google-authorship-search-results/71207/">"Eye Tracking Study: The Importance of Using Google Authorship in Search Results"</a></span> </li> <li id="cite_note-58"><span class="mw-cite-backlink"><b><a href="#cite_ref-58">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1 cs1-prop-foreign-lang-source"><a rel="nofollow" class="external text" href="https://elementapp.ai/blog/3-seconds-to-screen-candidate-profile-biometric-research-results/">"3 seconds is enough to screen candidate's profile. Eye tracking research results"</a>. <i>Element's Blog - nowości ze świata rekrutacji, HR Tech i Element</i> (in Polish). 21 February 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">3 April</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=unknown&rft.jtitle=Element%27s+Blog+-+nowo%C5%9Bci+ze+%C5%9Bwiata+rekrutacji%2C+HR+Tech+i+Element&rft.atitle=3+seconds+is+enough+to+screen+candidate%27s+profile.+Eye+tracking+research+results.&rft.date=2019-02-21&rft_id=https%3A%2F%2Felementapp.ai%2Fblog%2F3-seconds-to-screen-candidate-profile-biometric-research-results%2F&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-59"><span class="mw-cite-backlink"><b><a href="#cite_ref-59">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCornoFarinettiSignorile2002" class="citation book cs1">Corno, F.; Farinetti, L.; Signorile, I. (August 2002). <a rel="nofollow" class="external text" href="https://ieeexplore.ieee.org/document/1035632">"A cost-effective solution for eye-gaze assistive technology"</a>. <i>Proceedings. IEEE International Conference on Multimedia and Expo</i>. Vol. 2. pp. 433–436. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FICME.2002.1035632">10.1109/ICME.2002.1035632</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/0-7803-7304-9" title="Special:BookSources/0-7803-7304-9"><bdi>0-7803-7304-9</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:42361339">42361339</a><span class="reference-accessdate">. Retrieved <span class="nowrap">5 August</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=bookitem&rft.atitle=A+cost-effective+solution+for+eye-gaze+assistive+technology&rft.btitle=Proceedings.+IEEE+International+Conference+on+Multimedia+and+Expo&rft.pages=433-436&rft.date=2002-08&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A42361339%23id-name%3DS2CID&rft_id=info%3Adoi%2F10.1109%2FICME.2002.1035632&rft.isbn=0-7803-7304-9&rft.aulast=Corno&rft.aufirst=F.&rft.au=Farinetti%2C+L.&rft.au=Signorile%2C+I.&rft_id=https%3A%2F%2Fieeexplore.ieee.org%2Fdocument%2F1035632&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-60"><span class="mw-cite-backlink"><b><a href="#cite_ref-60">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPinheiroNavesPinoLesson2011" class="citation journal cs1">Pinheiro, C.; Naves, E. L.; Pino, P.; Lesson, E.; Andrade, A.O.; Bourhis, G. (July 2011). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3103465">"Alternative communication systems for people with severe motor disabilities: a survey"</a>. <i>BioMedical Engineering OnLine</i>. <b>10</b> (1): 31. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1186%2F1475-925X-10-31">10.1186/1475-925X-10-31</a></span>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3103465">3103465</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/21507236">21507236</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=BioMedical+Engineering+OnLine&rft.atitle=Alternative+communication+systems+for+people+with+severe+motor+disabilities%3A+a+survey&rft.volume=10&rft.issue=1&rft.pages=31&rft.date=2011-07&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC3103465%23id-name%3DPMC&rft_id=info%3Apmid%2F21507236&rft_id=info%3Adoi%2F10.1186%2F1475-925X-10-31&rft.aulast=Pinheiro&rft.aufirst=C.&rft.au=Naves%2C+E.+L.&rft.au=Pino%2C+P.&rft.au=Lesson%2C+E.&rft.au=Andrade%2C+A.O.&rft.au=Bourhis%2C+G.&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC3103465&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-61"><span class="mw-cite-backlink"><b><a href="#cite_ref-61">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSaundersSmagnerSaunders2003" class="citation journal cs1">Saunders, M.D.; Smagner, J.P.; Saunders, R.R. (August 2003). "Improving methodological and technological analyses of adaptive switch use of individuals with profound multiple impairments". <i>Behavioral Interventions</i>. <b>18</b> (4): 227–243. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1002%2Fbin.141">10.1002/bin.141</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Behavioral+Interventions&rft.atitle=Improving+methodological+and+technological+analyses+of+adaptive+switch+use+of+individuals+with+profound+multiple+impairments&rft.volume=18&rft.issue=4&rft.pages=227-243&rft.date=2003-08&rft_id=info%3Adoi%2F10.1002%2Fbin.141&rft.aulast=Saunders&rft.aufirst=M.D.&rft.au=Smagner%2C+J.P.&rft.au=Saunders%2C+R.R.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-62"><span class="mw-cite-backlink"><b><a href="#cite_ref-62">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.cdc.gov/ncbddd/cp/facts.html">"Cerebral Palsy (CP)"</a><span class="reference-accessdate">. Retrieved <span class="nowrap">4 August</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=unknown&rft.btitle=Cerebral+Palsy+%28CP%29&rft_id=https%3A%2F%2Fwww.cdc.gov%2Fncbddd%2Fcp%2Ffacts.html&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-63"><span class="mw-cite-backlink"><b><a href="#cite_ref-63">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWilkinsonMitchell2014" class="citation journal cs1">Wilkinson, K.M.; Mitchell, T. (March 2014). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4327869">"Eye tracking research to answer questions about augmentative and alternative communication assessment and intervention"</a>. <i>Augmentative and Alternative Communication</i>. <b>30</b> (2): 106–119. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.3109%2F07434618.2014.904435">10.3109/07434618.2014.904435</a>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4327869">4327869</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/24758526">24758526</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Augmentative+and+Alternative+Communication&rft.atitle=Eye+tracking+research+to+answer+questions+about+augmentative+and+alternative+communication+assessment+and+intervention&rft.volume=30&rft.issue=2&rft.pages=106-119&rft.date=2014-03&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC4327869%23id-name%3DPMC&rft_id=info%3Apmid%2F24758526&rft_id=info%3Adoi%2F10.3109%2F07434618.2014.904435&rft.aulast=Wilkinson&rft.aufirst=K.M.&rft.au=Mitchell%2C+T.&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC4327869&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-64"><span class="mw-cite-backlink"><b><a href="#cite_ref-64">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGalanteMenezes2012" class="citation journal cs1">Galante, A.; Menezes, P. (June 2012). <a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.protcy.2012.09.099">"A gaze-based interaction system for people with cerebral palsy"</a>. <i>Procedia Technology</i>. <b>5</b>: 895–902. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.protcy.2012.09.099">10.1016/j.protcy.2012.09.099</a></span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Procedia+Technology&rft.atitle=A+gaze-based+interaction+system+for+people+with+cerebral+palsy&rft.volume=5&rft.pages=895-902&rft.date=2012-06&rft_id=info%3Adoi%2F10.1016%2Fj.protcy.2012.09.099&rft.aulast=Galante&rft.aufirst=A.&rft.au=Menezes%2C+P.&rft_id=https%3A%2F%2Fdoi.org%2F10.1016%252Fj.protcy.2012.09.099&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-65"><span class="mw-cite-backlink"><b><a href="#cite_ref-65">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBLISCHAKLOMBARDINODYSON2003" class="citation journal cs1">BLISCHAK, D.; LOMBARDINO, L.; DYSON, A. (June 2003). "Use of speech-generating devices: In support of natural speech". <i>Augmentative and Alternative Communication</i>. <b>19</b> (1): 29–35. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F0743461032000056478">10.1080/0743461032000056478</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/28443791">28443791</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:205581902">205581902</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Augmentative+and+Alternative+Communication&rft.atitle=Use+of+speech-generating+devices%3A+In+support+of+natural+speech&rft.volume=19&rft.issue=1&rft.pages=29-35&rft.date=2003-06&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A205581902%23id-name%3DS2CID&rft_id=info%3Apmid%2F28443791&rft_id=info%3Adoi%2F10.1080%2F0743461032000056478&rft.aulast=BLISCHAK&rft.aufirst=D.&rft.au=LOMBARDINO%2C+L.&rft.au=DYSON%2C+A.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-66"><span class="mw-cite-backlink"><b><a href="#cite_ref-66">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSharmaMurthySingh_SalujaMollyn2020" class="citation journal cs1">Sharma, V.K.; Murthy, L. R. D.; Singh Saluja, K.; Mollyn, V.; Sharma, G.; Biswas, Pradipta (August 2020). <a rel="nofollow" class="external text" href="https://content.iospress.com/articles/technology-and-disability/tad200264">"Webcam controlled robotic arm for persons with SSMI"</a>. <i>Technology and Disability</i>. <b>32</b> (3): 179–197. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/2005.11994">2005.11994</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.3233%2FTAD-200264">10.3233/TAD-200264</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:218870304">218870304</a><span class="reference-accessdate">. Retrieved <span class="nowrap">5 August</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Technology+and+Disability&rft.atitle=Webcam+controlled+robotic+arm+for+persons+with+SSMI&rft.volume=32&rft.issue=3&rft.pages=179-197&rft.date=2020-08&rft_id=info%3Aarxiv%2F2005.11994&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A218870304%23id-name%3DS2CID&rft_id=info%3Adoi%2F10.3233%2FTAD-200264&rft.aulast=Sharma&rft.aufirst=V.K.&rft.au=Murthy%2C+L.+R.+D.&rft.au=Singh+Saluja%2C+K.&rft.au=Mollyn%2C+V.&rft.au=Sharma%2C+G.&rft.au=Biswas%2C+Pradipta&rft_id=https%3A%2F%2Fcontent.iospress.com%2Farticles%2Ftechnology-and-disability%2Ftad200264&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-67"><span class="mw-cite-backlink"><b><a href="#cite_ref-67">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFEidGiakoumidisEl_Saddik2016" class="citation journal cs1">Eid, M.A.; Giakoumidis, N.; El Saddik, A. (July 2016). <a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FACCESS.2016.2520093">"A novel eye-gaze-controlled wheelchair system for navigating unknown environments: case study with a person with ALS"</a>. <i>IEEE Access</i>. <b>4</b>: 558–573. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2016IEEEA...4..558E">2016IEEEA...4..558E</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FACCESS.2016.2520093">10.1109/ACCESS.2016.2520093</a></span>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:28210837">28210837</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=IEEE+Access&rft.atitle=A+novel+eye-gaze-controlled+wheelchair+system+for+navigating+unknown+environments%3A+case+study+with+a+person+with+ALS&rft.volume=4&rft.pages=558-573&rft.date=2016-07&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A28210837%23id-name%3DS2CID&rft_id=info%3Adoi%2F10.1109%2FACCESS.2016.2520093&rft_id=info%3Abibcode%2F2016IEEEA...4..558E&rft.aulast=Eid&rft.aufirst=M.A.&rft.au=Giakoumidis%2C+N.&rft.au=El+Saddik%2C+A.&rft_id=https%3A%2F%2Fdoi.org%2F10.1109%252FACCESS.2016.2520093&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-68"><span class="mw-cite-backlink"><b><a href="#cite_ref-68">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFJeevithashreeSalujaBiswas2019" class="citation journal cs1">Jeevithashree, D. V.; Saluja, K.S.; Biswas, Pradipta (December 2019). <a rel="nofollow" class="external text" href="https://content.iospress.com/articles/technology-and-disability/tad180206">"A case study of developing gaze-controlled interface for users with severe speech and motor impairment"</a>. <i>Technology and Disability</i>. <b>31</b> (1–2): 63–76. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.3233%2FTAD-180206">10.3233/TAD-180206</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:199083245">199083245</a><span class="reference-accessdate">. Retrieved <span class="nowrap">5 August</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Technology+and+Disability&rft.atitle=A+case+study+of+developing+gaze-controlled+interface+for+users+with+severe+speech+and+motor+impairment&rft.volume=31&rft.issue=1%E2%80%932&rft.pages=63-76&rft.date=2019-12&rft_id=info%3Adoi%2F10.3233%2FTAD-180206&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A199083245%23id-name%3DS2CID&rft.aulast=Jeevithashree&rft.aufirst=D.+V.&rft.au=Saluja%2C+K.S.&rft.au=Biswas%2C+Pradipta&rft_id=https%3A%2F%2Fcontent.iospress.com%2Farticles%2Ftechnology-and-disability%2Ftad180206&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-69"><span class="mw-cite-backlink"><b><a href="#cite_ref-69">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFJonesObregónKellyBranigan2008" class="citation journal cs1">Jones, M.W.; Obregón, M.; Kelly, M.L.; Branigan, H.P. (May 2008). <a rel="nofollow" class="external text" href="https://www.sciencedirect.com/science/article/abs/pii/S0010027708002230">"Elucidating the component processes involved in dyslexic and non-dyslexic reading fluency: An eye-tracking study"</a>. <i>Cognition</i>. <b>109</b> (3): 389–407. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.cognition.2008.10.005">10.1016/j.cognition.2008.10.005</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/19019349">19019349</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:29389144">29389144</a><span class="reference-accessdate">. Retrieved <span class="nowrap">5 August</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Cognition&rft.atitle=Elucidating+the+component+processes+involved+in+dyslexic+and+non-dyslexic+reading+fluency%3A+An+eye-tracking+study&rft.volume=109&rft.issue=3&rft.pages=389-407&rft.date=2008-05&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A29389144%23id-name%3DS2CID&rft_id=info%3Apmid%2F19019349&rft_id=info%3Adoi%2F10.1016%2Fj.cognition.2008.10.005&rft.aulast=Jones&rft.aufirst=M.W.&rft.au=Obreg%C3%B3n%2C+M.&rft.au=Kelly%2C+M.L.&rft.au=Branigan%2C+H.P.&rft_id=https%3A%2F%2Fwww.sciencedirect.com%2Fscience%2Farticle%2Fabs%2Fpii%2FS0010027708002230&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-70"><span class="mw-cite-backlink"><b><a href="#cite_ref-70">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCalhounJanson1991" class="citation journal cs1">Calhoun, G. L; Janson (1991). "Eye line-of-sight control compared to manual selection of discrete switches". <i>Armstrong Laboratory Report AL-TR-1991-0015</i>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Armstrong+Laboratory+Report+AL-TR-1991-0015&rft.atitle=Eye+line-of-sight+control+compared+to+manual+selection+of+discrete+switches&rft.date=1991&rft.aulast=Calhoun&rft.aufirst=G.+L&rft.au=Janson&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-71"><span class="mw-cite-backlink"><b><a href="#cite_ref-71">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFittsJonesMilton1950" class="citation journal cs1">Fitts, P.M.; Jones, R.E.; Milton, J.L (1950). <a rel="nofollow" class="external text" href="https://psycnet.apa.org/record/1950-05519-001">"Eye movements of aircraft pilots during instrument-landing approaches"</a>. <i>Aeronaut. Eng. Rev</i><span class="reference-accessdate">. Retrieved <span class="nowrap">20 July</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Aeronaut.+Eng.+Rev.&rft.atitle=Eye+movements+of+aircraft+pilots+during+instrument-landing+approaches&rft.date=1950&rft.aulast=Fitts&rft.aufirst=P.M.&rft.au=Jones%2C+R.E.&rft.au=Milton%2C+J.L&rft_id=https%3A%2F%2Fpsycnet.apa.org%2Frecord%2F1950-05519-001&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-72"><span class="mw-cite-backlink"><b><a href="#cite_ref-72">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPeysakhovichLefrançoisDehaisCausse2018" class="citation journal cs1">Peysakhovich, V.; Lefrançois, O.; Dehais, F.; Causse, M. (2018). <a rel="nofollow" class="external text" href="https://doi.org/10.3390%2Fsafety4010008">"The neuroergonomics of aircraft cockpits: the four stages of eye-tracking integration to enhance flight safety"</a>. <i>Safety</i>. <b>4</b> (1): 8. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.3390%2Fsafety4010008">10.3390/safety4010008</a></span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Safety&rft.atitle=The+neuroergonomics+of+aircraft+cockpits%3A+the+four+stages+of+eye-tracking+integration+to+enhance+flight+safety.&rft.volume=4&rft.issue=1&rft.pages=8&rft.date=2018&rft_id=info%3Adoi%2F10.3390%2Fsafety4010008&rft.aulast=Peysakhovich&rft.aufirst=V.&rft.au=Lefran%C3%A7ois%2C+O.&rft.au=Dehais%2C+F.&rft.au=Causse%2C+M.&rft_id=https%3A%2F%2Fdoi.org%2F10.3390%252Fsafety4010008&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-deReus2012-73"><span class="mw-cite-backlink">^ <a href="#cite_ref-deReus2012_73-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-deReus2012_73-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-deReus2012_73-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFde_ReusZonOuwerkerk2012" class="citation journal cs1">de Reus, A.J.C.; Zon, R.; Ouwerkerk, R. (November 2012). "Exploring the use of an eye tracker in a helmet mounted display". <i>National Aerospace Laboratory Technical Report NLR-TP-2012-001</i>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=National+Aerospace+Laboratory+Technical+Report+NLR-TP-2012-001&rft.atitle=Exploring+the+use+of+an+eye+tracker+in+a+helmet+mounted+display&rft.date=2012-11&rft.aulast=de+Reus&rft.aufirst=A.J.C.&rft.au=Zon%2C+R.&rft.au=Ouwerkerk%2C+R.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-74"><span class="mw-cite-backlink"><b><a href="#cite_ref-74">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFDVMurthySalujaBiswas2018" class="citation journal cs1">DV, JeevithaShree; Murthy, L R.D.; Saluja, K. S.; Biswas, P. (2018). <a rel="nofollow" class="external text" href="https://docs.lib.purdue.edu/jate/vol8/iss1/4/">"Operating different displays in military fast jets using eye gaze tracker"</a>. <i>Journal of Aviation Technology and Engineering</i>. <b>8</b> (4)<span class="reference-accessdate">. Retrieved <span class="nowrap">24 July</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Aviation+Technology+and+Engineering&rft.atitle=Operating+different+displays+in+military+fast+jets+using+eye+gaze+tracker&rft.volume=8&rft.issue=4&rft.date=2018&rft.aulast=DV&rft.aufirst=JeevithaShree&rft.au=Murthy%2C+L+R.D.&rft.au=Saluja%2C+K.+S.&rft.au=Biswas%2C+P.&rft_id=https%3A%2F%2Fdocs.lib.purdue.edu%2Fjate%2Fvol8%2Fiss1%2F4%2F&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-75"><span class="mw-cite-backlink"><b><a href="#cite_ref-75">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBabuD_VPrabhakarSaluja2019" class="citation journal cs1">Babu, M.; D V, JeevithaShree; Prabhakar, G.; Saluja, K.P.; Pashilkar, A.; Biswas, P. (2019). <a rel="nofollow" class="external text" href="https://bop.unibe.ch/JEMR/article/view/JEMR.12.3.3">"Estimating pilots' cognitive load from ocular parameters through simulation and in-flight studies"</a>. <i>Journal of Eye Movement Research</i>. <b>12</b> (3). <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.16910%2Fjemr.12.3.3">10.16910/jemr.12.3.3</a>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7880144">7880144</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/33828735">33828735</a><span class="reference-accessdate">. Retrieved <span class="nowrap">3 August</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Eye+Movement+Research&rft.atitle=Estimating+pilots%27+cognitive+load+from+ocular+parameters+through+simulation+and+in-flight+studies&rft.volume=12&rft.issue=3&rft.date=2019&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC7880144%23id-name%3DPMC&rft_id=info%3Apmid%2F33828735&rft_id=info%3Adoi%2F10.16910%2Fjemr.12.3.3&rft.aulast=Babu&rft.aufirst=M.&rft.au=D+V%2C+JeevithaShree&rft.au=Prabhakar%2C+G.&rft.au=Saluja%2C+K.P.&rft.au=Pashilkar%2C+A.&rft.au=Biswas%2C+P.&rft_id=https%3A%2F%2Fbop.unibe.ch%2FJEMR%2Farticle%2Fview%2FJEMR.12.3.3&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-76"><span class="mw-cite-backlink"><b><a href="#cite_ref-76">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPeißlWickensBaruah2018" class="citation journal cs1">Peißl, S.; Wickens, C. D.; Baruah, R. (2018). <a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F24721840.2018.1514978">"Eye-tracking measures in aviation: A selective literature review"</a>. <i>The International Journal of Aerospace Psychology</i>. <b>28</b> (3–4): 98–112. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F24721840.2018.1514978">10.1080/24721840.2018.1514978</a></span>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:70016458">70016458</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=The+International+Journal+of+Aerospace+Psychology&rft.atitle=Eye-tracking+measures+in+aviation%3A+A+selective+literature+review&rft.volume=28&rft.issue=3%E2%80%934&rft.pages=98-112&rft.date=2018&rft_id=info%3Adoi%2F10.1080%2F24721840.2018.1514978&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A70016458%23id-name%3DS2CID&rft.aulast=Pei%C3%9Fl&rft.aufirst=S.&rft.au=Wickens%2C+C.+D.&rft.au=Baruah%2C+R.&rft_id=https%3A%2F%2Fdoi.org%2F10.1080%252F24721840.2018.1514978&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-77"><span class="mw-cite-backlink"><b><a href="#cite_ref-77">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.federalregister.gov/documents/2013/04/26/2013-09883/visual-manual-nhtsa-driver-distraction-guidelines-for-in-vehicle-electronic-devices">"Visual-Manual NHTSA Driver Distraction Guidelines for In-Vehicle Electronic Devices"</a>. 26 April 2013.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=unknown&rft.btitle=Visual-Manual+NHTSA+Driver+Distraction+Guidelines+for+In-Vehicle+Electronic+Devices&rft.date=2013-04-26&rft_id=https%3A%2F%2Fwww.federalregister.gov%2Fdocuments%2F2013%2F04%2F26%2F2013-09883%2Fvisual-manual-nhtsa-driver-distraction-guidelines-for-in-vehicle-electronic-devices&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-78"><span class="mw-cite-backlink"><b><a href="#cite_ref-78">^</a></b></span> <span class="reference-text"><style data-mw-deduplicate="TemplateStyles:r1041539562">.mw-parser-output .citation{word-wrap:break-word}.mw-parser-output .citation:target{background-color:rgba(0,127,255,0.133)}</style><span class="citation patent" id="CITEREFMondragon,_Christopher_K.Bleacher,_Brett2015"><a rel="nofollow" class="external text" href="https://patents.google.com/patent/US8928585B2/en">US patent 8928585B2</a>, Mondragon, Christopher K. & Bleacher, Brett, "Eye tracking control of vehicle entertainment systems", issued 2015-01-06,  assigned to Thales Avionics Inc</span><span class="Z3988" title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Apatent&rft.number=8928585B2&rft.cc=US&rft.title=Eye+tracking+control+of+vehicle+entertainment+systems&rft.inventor=Mondragon%2C+Christopher+K.&rft.assignee=Thales+Avionics+Inc&rft.date=2015-01-06&rft.appldate=2012-09-06&rft.prioritydate=2011-09-09"><span style="display: none;"> </span></span></span> </li> <li id="cite_note-79"><span class="mw-cite-backlink"><b><a href="#cite_ref-79">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPoitschkeLaquaiStambolievRigoll2011" class="citation book cs1">Poitschke, T.; Laquai, F.; Stamboliev, S.; Rigoll, G. (2011). <a rel="nofollow" class="external text" href="https://mediatum.ub.tum.de/doc/1107278/document.pdf">"Gaze-based interaction on multiple displays in an automotive environment"</a> <span class="cs1-format">(PDF)</span>. <i>2011 IEEE International Conference on Systems, Man, and Cybernetics</i>. pp. 543–548. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FICSMC.2011.6083740">10.1109/ICSMC.2011.6083740</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/978-1-4577-0653-0" title="Special:BookSources/978-1-4577-0653-0"><bdi>978-1-4577-0653-0</bdi></a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1062-922X">1062-922X</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:9362329">9362329</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=bookitem&rft.atitle=Gaze-based+interaction+on+multiple+displays+in+an+automotive+environment&rft.btitle=2011+IEEE+International+Conference+on+Systems%2C+Man%2C+and+Cybernetics&rft.pages=543-548&rft.date=2011&rft_id=info%3Adoi%2F10.1109%2FICSMC.2011.6083740&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A9362329%23id-name%3DS2CID&rft.issn=1062-922X&rft.isbn=978-1-4577-0653-0&rft.aulast=Poitschke&rft.aufirst=T.&rft.au=Laquai%2C+F.&rft.au=Stamboliev%2C+S.&rft.au=Rigoll%2C+G.&rft_id=http%3A%2F%2Fmediatum.ub.tum.de%2Fdoc%2F1107278%2Fdocument.pdf&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-80"><span class="mw-cite-backlink"><b><a href="#cite_ref-80">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPrabhakarRamakrishnanMurthySharma2020" class="citation journal cs1">Prabhakar, G.; Ramakrishnan, A.; Murthy, L.; Sharma, V.K.; Madan, M.; Deshmukh, S.; Biswas, P. (2020). "Interactive Gaze & Finger controlled HUD for Cars". <i>Journal of Multimodal User Interface</i>. <b>14</b>: 101–121. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs12193-019-00316-9">10.1007/s12193-019-00316-9</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:208261516">208261516</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Multimodal+User+Interface&rft.atitle=Interactive+Gaze+%26+Finger+controlled+HUD+for+Cars&rft.volume=14&rft.pages=101-121&rft.date=2020&rft_id=info%3Adoi%2F10.1007%2Fs12193-019-00316-9&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A208261516%23id-name%3DS2CID&rft.aulast=Prabhakar&rft.aufirst=G.&rft.au=Ramakrishnan%2C+A.&rft.au=Murthy%2C+L.&rft.au=Sharma%2C+V.K.&rft.au=Madan%2C+M.&rft.au=Deshmukh%2C+S.&rft.au=Biswas%2C+P.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-81"><span class="mw-cite-backlink"><b><a href="#cite_ref-81">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMarshall2002" class="citation book cs1">Marshall, S. (2002). "The Index of Cognitive Activity: Measuring cognitive workload". <i>Proceedings of the IEEE 7th Conference on Human Factors and Power Plants</i>. pp. 7-5–7-9. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FHFPP.2002.1042860">10.1109/HFPP.2002.1042860</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/0-7803-7450-9" title="Special:BookSources/0-7803-7450-9"><bdi>0-7803-7450-9</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:44561112">44561112</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=bookitem&rft.atitle=The+Index+of+Cognitive+Activity%3A+Measuring+cognitive+workload&rft.btitle=Proceedings+of+the+IEEE+7th+Conference+on+Human+Factors+and+Power+Plants&rft.pages=7-5-7-9&rft.date=2002&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A44561112%23id-name%3DS2CID&rft_id=info%3Adoi%2F10.1109%2FHFPP.2002.1042860&rft.isbn=0-7803-7450-9&rft.aulast=Marshall&rft.aufirst=S.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-82"><span class="mw-cite-backlink"><b><a href="#cite_ref-82">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFDuchowskiBieleNiedzielskaKrejtz2018" class="citation journal cs1">Duchowski, A. T.; Biele, C.; Niedzielska, A.; Krejtz, K.; Krejtz, I.; Kiefer, P.; Raubal, M.; Giannopoulos, I. (2018). <a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F3173574.3173856">"The Index of Pupillary Activity Measuring Cognitive Load vis-à-vis Task Difficulty with Pupil Oscillation"</a>. <i>ACM SIGCHI Conference on Human Factors</i>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F3173574.3173856">10.1145/3173574.3173856</a></span>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:5064488">5064488</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=ACM+SIGCHI+Conference+on+Human+Factors&rft.atitle=The+Index+of+Pupillary+Activity+Measuring+Cognitive+Load+vis-%C3%A0-vis+Task+Difficulty+with+Pupil+Oscillation&rft.date=2018&rft_id=info%3Adoi%2F10.1145%2F3173574.3173856&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A5064488%23id-name%3DS2CID&rft.aulast=Duchowski&rft.aufirst=A.+T.&rft.au=Biele%2C+C.&rft.au=Niedzielska%2C+A.&rft.au=Krejtz%2C+K.&rft.au=Krejtz%2C+I.&rft.au=Kiefer%2C+P.&rft.au=Raubal%2C+M.&rft.au=Giannopoulos%2C+I.&rft_id=https%3A%2F%2Fdoi.org%2F10.1145%252F3173574.3173856&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-83"><span class="mw-cite-backlink"><b><a href="#cite_ref-83">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPrabhakarMukhopadhyayMurthyModiksha2020" class="citation journal cs1">Prabhakar, G.; Mukhopadhyay, A.; Murthy, L.; Modiksha, M. A. D. A. N.; Biswas, P. (2020). <a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.treng.2020.100008">"Cognitive load estimation using Ocular Parameters in Automotive"</a>. <i>Transportation Engineering</i>. <b>2</b>: 100008. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.treng.2020.100008">10.1016/j.treng.2020.100008</a></span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Transportation+Engineering&rft.atitle=Cognitive+load+estimation+using+Ocular+Parameters+in+Automotive&rft.volume=2&rft.pages=100008&rft.date=2020&rft_id=info%3Adoi%2F10.1016%2Fj.treng.2020.100008&rft.aulast=Prabhakar&rft.aufirst=G.&rft.au=Mukhopadhyay%2C+A.&rft.au=Murthy%2C+L.&rft.au=Modiksha%2C+M.+A.+D.+A.+N.&rft.au=Biswas%2C+P.&rft_id=https%3A%2F%2Fdoi.org%2F10.1016%252Fj.treng.2020.100008&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-84"><span class="mw-cite-backlink"><b><a href="#cite_ref-84">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMcGuire2021" class="citation web cs1">McGuire, Keegan (8 April 2021). <a rel="nofollow" class="external text" href="https://www.looper.com/377254/what-the-critics-are-saying-about-before-your-eyes/">"What The Critics Are Saying About Before Your Eyes"</a>. <i><a href="/wiki/Looper.com" class="mw-redirect" title="Looper.com">looper.com</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210423200906/https://www.looper.com/377254/what-the-critics-are-saying-about-before-your-eyes/">Archived</a> from the original on 23 April 2021.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=unknown&rft.jtitle=looper.com&rft.atitle=What+The+Critics+Are+Saying+About+Before+Your+Eyes&rft.date=2021-04-08&rft.aulast=McGuire&rft.aufirst=Keegan&rft_id=https%3A%2F%2Fwww.looper.com%2F377254%2Fwhat-the-critics-are-saying-about-before-your-eyes%2F&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-85"><span class="mw-cite-backlink"><b><a href="#cite_ref-85">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFvon_Au2021" class="citation web cs1 cs1-prop-foreign-lang-source">von Au, Caspar (24 April 2021). <a rel="nofollow" class="external text" href="https://www.br.de/nachrichten/kultur/computerspiel-before-your-eyes-wird-mit-den-augen-gesteuert,SVPcdxN">"Computerspiel "Before Your Eyes" wird mit den Augen gesteuert"</a> [Video game "Before Your Eyes" is controlled with your eyes]. <i><a href="/wiki/Bayerischer_Rundfunk" title="Bayerischer Rundfunk">Bayerischer Rundfunk</a></i> (in German). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210426135949/https://www.br.de/nachrichten/kultur/computerspiel-before-your-eyes-wird-mit-den-augen-gesteuert,SVPcdxN">Archived</a> from the original on 26 April 2021.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=unknown&rft.jtitle=Bayerischer+Rundfunk&rft.atitle=Computerspiel+%22Before+Your+Eyes%22+wird+mit+den+Augen+gesteuert&rft.date=2021-04-24&rft.aulast=von+Au&rft.aufirst=Caspar&rft_id=https%3A%2F%2Fwww.br.de%2Fnachrichten%2Fkultur%2Fcomputerspiel-before-your-eyes-wird-mit-den-augen-gesteuert%2CSVPcdxN&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-86"><span class="mw-cite-backlink"><b><a href="#cite_ref-86">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPetruselMendlingReijers2017" class="citation journal cs1">Petrusel, Razvan; Mendling, Jan; Reijers, Hajo A. (2017). <a rel="nofollow" class="external text" href="https://www.infona.pl//resource/bwmeta1.element.elsevier-54b21976-68ee-30d8-ab5f-4de5109c8a26">"How visual cognition influences process model comprehension"</a>. <i>Decision Support Systems</i>. <b>C</b> (96): 1–16. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.dss.2017.01.005">10.1016/j.dss.2017.01.005</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0167-9236">0167-9236</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Decision+Support+Systems&rft.atitle=How+visual+cognition+influences+process+model+comprehension&rft.volume=C&rft.issue=96&rft.pages=1-16&rft.date=2017&rft_id=info%3Adoi%2F10.1016%2Fj.dss.2017.01.005&rft.issn=0167-9236&rft.aulast=Petrusel&rft.aufirst=Razvan&rft.au=Mendling%2C+Jan&rft.au=Reijers%2C+Hajo+A.&rft_id=https%3A%2F%2Fwww.infona.pl%2F%2Fresource%2Fbwmeta1.element.elsevier-54b21976-68ee-30d8-ab5f-4de5109c8a26&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-87"><span class="mw-cite-backlink"><b><a href="#cite_ref-87">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSözenSayKılıç2020" class="citation journal cs1">Sözen, Nergiz; Say, Bilge; Kılıç, Özkan (27 November 2020). <a rel="nofollow" class="external text" href="https://doi.org/10.18421%2Ftem94-09">"An Experimental Study Towards Investigating the Effect of Working Memory Capacity on Complex Diagram Understandability"</a>. <i>TEM Journal</i>. Association for Information Communication Technology Education and Science: 1384–1395. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.18421%2Ftem94-09">10.18421/tem94-09</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2217-8333">2217-8333</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:229386117">229386117</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=TEM+Journal&rft.atitle=An+Experimental+Study+Towards+Investigating+the+Effect+of+Working+Memory+Capacity+on+Complex+Diagram+Understandability&rft.pages=1384-1395&rft.date=2020-11-27&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A229386117%23id-name%3DS2CID&rft.issn=2217-8333&rft_id=info%3Adoi%2F10.18421%2Ftem94-09&rft.aulast=S%C3%B6zen&rft.aufirst=Nergiz&rft.au=Say%2C+Bilge&rft.au=K%C4%B1l%C4%B1%C3%A7%2C+%C3%96zkan&rft_id=https%3A%2F%2Fdoi.org%2F10.18421%252Ftem94-09&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-88"><span class="mw-cite-backlink"><b><a href="#cite_ref-88">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKrassanakisCybulski2021" class="citation journal cs1">Krassanakis, Vassilios; Cybulski, Paweł (14 June 2021). <a rel="nofollow" class="external text" href="https://doi.org/10.3390%2Fijgi10060411">"Eye Tracking Research in Cartography: Looking into the Future"</a>. <i>ISPRS International Journal of Geo-Information</i>. <b>10</b> (6): 411. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2021IJGI...10..411K">2021IJGI...10..411K</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.3390%2Fijgi10060411">10.3390/ijgi10060411</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2220-9964">2220-9964</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=ISPRS+International+Journal+of+Geo-Information&rft.atitle=Eye+Tracking+Research+in+Cartography%3A+Looking+into+the+Future&rft.volume=10&rft.issue=6&rft.pages=411&rft.date=2021-06-14&rft.issn=2220-9964&rft_id=info%3Adoi%2F10.3390%2Fijgi10060411&rft_id=info%3Abibcode%2F2021IJGI...10..411K&rft.aulast=Krassanakis&rft.aufirst=Vassilios&rft.au=Cybulski%2C+Pawe%C5%82&rft_id=https%3A%2F%2Fdoi.org%2F10.3390%252Fijgi10060411&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-89"><span class="mw-cite-backlink"><b><a href="#cite_ref-89">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPopelkaBrychtova2013" class="citation journal cs1">Popelka, Stanislav; Brychtova, Alzbeta (2013). <a rel="nofollow" class="external text" href="http://www.tandfonline.com/doi/full/10.1179/1743277413Y.0000000058">"Eye-tracking Study on Different Perception of 2D and 3D Terrain Visualisation"</a>. <i><a href="/wiki/The_Cartographic_Journal" title="The Cartographic Journal">The Cartographic Journal</a></i>. <b>50</b> (3): 240–246. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2013CartJ..50..240P">2013CartJ..50..240P</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1179%2F1743277413Y.0000000058">10.1179/1743277413Y.0000000058</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0008-7041">0008-7041</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:128975149">128975149</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=The+Cartographic+Journal&rft.atitle=Eye-tracking+Study+on+Different+Perception+of+2D+and+3D+Terrain+Visualisation&rft.volume=50&rft.issue=3&rft.pages=240-246&rft.date=2013&rft_id=info%3Adoi%2F10.1179%2F1743277413Y.0000000058&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A128975149%23id-name%3DS2CID&rft.issn=0008-7041&rft_id=info%3Abibcode%2F2013CartJ..50..240P&rft.aulast=Popelka&rft.aufirst=Stanislav&rft.au=Brychtova%2C+Alzbeta&rft_id=http%3A%2F%2Fwww.tandfonline.com%2Fdoi%2Ffull%2F10.1179%2F1743277413Y.0000000058&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-90"><span class="mw-cite-backlink"><b><a href="#cite_ref-90">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHermanPopelkaHejlova2017" class="citation journal cs1">Herman, Lukas; Popelka, Stanislav; Hejlova, Vendula (31 May 2017). <a rel="nofollow" class="external text" href="https://bop.unibe.ch/JEMR/article/view/3533">"Eye-tracking Analysis of Interactive 3D Geovisualization"</a>. <i>Journal of Eye Movement Research</i>. <b>10</b> (3). <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.16910%2Fjemr.10.3.2">10.16910/jemr.10.3.2</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1995-8692">1995-8692</a>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7141050">7141050</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/33828655">33828655</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Eye+Movement+Research&rft.atitle=Eye-tracking+Analysis+of+Interactive+3D+Geovisualization&rft.volume=10&rft.issue=3&rft.date=2017-05-31&rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC7141050%23id-name%3DPMC&rft.issn=1995-8692&rft_id=info%3Apmid%2F33828655&rft_id=info%3Adoi%2F10.16910%2Fjemr.10.3.2&rft.aulast=Herman&rft.aufirst=Lukas&rft.au=Popelka%2C+Stanislav&rft.au=Hejlova%2C+Vendula&rft_id=https%3A%2F%2Fbop.unibe.ch%2FJEMR%2Farticle%2Fview%2F3533&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-91"><span class="mw-cite-backlink"><b><a href="#cite_ref-91">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFOomsDe_MaeyerFack2013" class="citation journal cs1">Ooms, K.; De Maeyer, P.; Fack, V. (22 November 2013). <a rel="nofollow" class="external text" href="https://dx.doi.org/10.1080/15230406.2013.860255">"Study of the attentive behavior of novice and expert map users using eye tracking"</a>. <i>Cartography and Geographic Information Science</i>. <b>41</b> (1): 37–54. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F15230406.2013.860255">10.1080/15230406.2013.860255</a>. <a href="/wiki/Hdl_(identifier)" class="mw-redirect" title="Hdl (identifier)">hdl</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://hdl.handle.net/1854%2FLU-4252541">1854/LU-4252541</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1523-0406">1523-0406</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:11087520">11087520</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Cartography+and+Geographic+Information+Science&rft.atitle=Study+of+the+attentive+behavior+of+novice+and+expert+map+users+using+eye+tracking&rft.volume=41&rft.issue=1&rft.pages=37-54&rft.date=2013-11-22&rft_id=info%3Ahdl%2F1854%2FLU-4252541&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A11087520%23id-name%3DS2CID&rft.issn=1523-0406&rft_id=info%3Adoi%2F10.1080%2F15230406.2013.860255&rft.aulast=Ooms&rft.aufirst=K.&rft.au=De+Maeyer%2C+P.&rft.au=Fack%2C+V.&rft_id=http%3A%2F%2Fdx.doi.org%2F10.1080%2F15230406.2013.860255&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-92"><span class="mw-cite-backlink"><b><a href="#cite_ref-92">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBeitlovaPopelkaVozenilek2020" class="citation journal cs1">Beitlova, Marketa; Popelka, Stanislav; Vozenilek, Vit (19 August 2020). <a rel="nofollow" class="external text" href="https://doi.org/10.3390%2Fijgi9090492">"Differences in Thematic Map Reading by Students and Their Geography Teacher"</a>. <i>ISPRS International Journal of Geo-Information</i>. <b>9</b> (9): 492. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2020IJGI....9..492B">2020IJGI....9..492B</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.3390%2Fijgi9090492">10.3390/ijgi9090492</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2220-9964">2220-9964</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=ISPRS+International+Journal+of+Geo-Information&rft.atitle=Differences+in+Thematic+Map+Reading+by+Students+and+Their+Geography+Teacher&rft.volume=9&rft.issue=9&rft.pages=492&rft.date=2020-08-19&rft.issn=2220-9964&rft_id=info%3Adoi%2F10.3390%2Fijgi9090492&rft_id=info%3Abibcode%2F2020IJGI....9..492B&rft.aulast=Beitlova&rft.aufirst=Marketa&rft.au=Popelka%2C+Stanislav&rft.au=Vozenilek%2C+Vit&rft_id=https%3A%2F%2Fdoi.org%2F10.3390%252Fijgi9090492&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-93"><span class="mw-cite-backlink"><b><a href="#cite_ref-93">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBurianPopelkaBeitlova2018" class="citation journal cs1">Burian, Jaroslav; Popelka, Stanislav; Beitlova, Marketa (17 May 2018). <a rel="nofollow" class="external text" href="https://doi.org/10.3390%2Fijgi7050192">"Evaluation of the Cartographical Quality of Urban Plans by Eye-Tracking"</a>. <i>ISPRS International Journal of Geo-Information</i>. <b>7</b> (5): 192. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2018IJGI....7..192B">2018IJGI....7..192B</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.3390%2Fijgi7050192">10.3390/ijgi7050192</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2220-9964">2220-9964</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=ISPRS+International+Journal+of+Geo-Information&rft.atitle=Evaluation+of+the+Cartographical+Quality+of+Urban+Plans+by+Eye-Tracking&rft.volume=7&rft.issue=5&rft.pages=192&rft.date=2018-05-17&rft.issn=2220-9964&rft_id=info%3Adoi%2F10.3390%2Fijgi7050192&rft_id=info%3Abibcode%2F2018IJGI....7..192B&rft.aulast=Burian&rft.aufirst=Jaroslav&rft.au=Popelka%2C+Stanislav&rft.au=Beitlova%2C+Marketa&rft_id=https%3A%2F%2Fdoi.org%2F10.3390%252Fijgi7050192&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-94"><span class="mw-cite-backlink"><b><a href="#cite_ref-94">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBrychtovaColtekin2016" class="citation journal cs1">Brychtova, Alzbeta; Coltekin, Arzu (30 June 2016). <a rel="nofollow" class="external text" href="https://dx.doi.org/10.1179/1743277414y.0000000103">"An Empirical User Study for Measuring the Influence of Colour Distance and Font Size in Map Reading Using Eye Tracking"</a>. <i>The Cartographic Journal</i>. <b>53</b> (3): 202–212. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2016CartJ..53..202B">2016CartJ..53..202B</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1179%2F1743277414y.0000000103">10.1179/1743277414y.0000000103</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0008-7041">0008-7041</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:18911777">18911777</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=The+Cartographic+Journal&rft.atitle=An+Empirical+User+Study+for+Measuring+the+Influence+of+Colour+Distance+and+Font+Size+in+Map+Reading+Using+Eye+Tracking&rft.volume=53&rft.issue=3&rft.pages=202-212&rft.date=2016-06-30&rft_id=info%3Adoi%2F10.1179%2F1743277414y.0000000103&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A18911777%23id-name%3DS2CID&rft.issn=0008-7041&rft_id=info%3Abibcode%2F2016CartJ..53..202B&rft.aulast=Brychtova&rft.aufirst=Alzbeta&rft.au=Coltekin%2C+Arzu&rft_id=http%3A%2F%2Fdx.doi.org%2F10.1179%2F1743277414y.0000000103&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-95"><span class="mw-cite-backlink"><b><a href="#cite_ref-95">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCybulski2020" class="citation journal cs1">Cybulski, Paweł (9 January 2020). <a rel="nofollow" class="external text" href="https://dx.doi.org/10.1080/15230406.2019.1702102">"Spatial distance and cartographic background complexity in graduated point symbol map-reading task"</a>. <i>Cartography and Geographic Information Science</i>. <b>47</b> (3): 244–260. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2020CGISc..47..244C">2020CGISc..47..244C</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F15230406.2019.1702102">10.1080/15230406.2019.1702102</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1523-0406">1523-0406</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:213161788">213161788</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Cartography+and+Geographic+Information+Science&rft.atitle=Spatial+distance+and+cartographic+background+complexity+in+graduated+point+symbol+map-reading+task&rft.volume=47&rft.issue=3&rft.pages=244-260&rft.date=2020-01-09&rft_id=info%3Adoi%2F10.1080%2F15230406.2019.1702102&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A213161788%23id-name%3DS2CID&rft.issn=1523-0406&rft_id=info%3Abibcode%2F2020CGISc..47..244C&rft.aulast=Cybulski&rft.aufirst=Pawe%C5%82&rft_id=http%3A%2F%2Fdx.doi.org%2F10.1080%2F15230406.2019.1702102&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-96"><span class="mw-cite-backlink"><b><a href="#cite_ref-96">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMansonKneDykeShannon2012" class="citation journal cs1">Manson, Steven M.; Kne, Len; Dyke, Kevin R.; Shannon, Jerry; Eria, Sami (2012). <a rel="nofollow" class="external text" href="https://dx.doi.org/10.1559/1523040639148">"Using Eye-tracking and Mouse Metrics to Test Usability of Web Mapping Navigation"</a>. <i>Cartography and Geographic Information Science</i>. <b>39</b> (1): 48–60. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2012CGISc..39...48M">2012CGISc..39...48M</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1559%2F1523040639148">10.1559/1523040639148</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1523-0406">1523-0406</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:131449617">131449617</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Cartography+and+Geographic+Information+Science&rft.atitle=Using+Eye-tracking+and+Mouse+Metrics+to+Test+Usability+of+Web+Mapping+Navigation&rft.volume=39&rft.issue=1&rft.pages=48-60&rft.date=2012&rft_id=info%3Adoi%2F10.1559%2F1523040639148&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A131449617%23id-name%3DS2CID&rft.issn=1523-0406&rft_id=info%3Abibcode%2F2012CGISc..39...48M&rft.aulast=Manson&rft.aufirst=Steven+M.&rft.au=Kne%2C+Len&rft.au=Dyke%2C+Kevin+R.&rft.au=Shannon%2C+Jerry&rft.au=Eria%2C+Sami&rft_id=http%3A%2F%2Fdx.doi.org%2F10.1559%2F1523040639148&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-97"><span class="mw-cite-backlink"><b><a href="#cite_ref-97">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPopelkaVondrakovaHujnakova2019" class="citation journal cs1">Popelka, Stanislav; Vondrakova, Alena; Hujnakova, Petra (30 May 2019). <a rel="nofollow" class="external text" href="https://doi.org/10.3390%2Fijgi8060256">"Eye-tracking Evaluation of Weather Web Maps"</a>. <i>ISPRS International Journal of Geo-Information</i>. <b>8</b> (6): 256. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2019IJGI....8..256P">2019IJGI....8..256P</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.3390%2Fijgi8060256">10.3390/ijgi8060256</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2220-9964">2220-9964</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=ISPRS+International+Journal+of+Geo-Information&rft.atitle=Eye-tracking+Evaluation+of+Weather+Web+Maps&rft.volume=8&rft.issue=6&rft.pages=256&rft.date=2019-05-30&rft.issn=2220-9964&rft_id=info%3Adoi%2F10.3390%2Fijgi8060256&rft_id=info%3Abibcode%2F2019IJGI....8..256P&rft.aulast=Popelka&rft.aufirst=Stanislav&rft.au=Vondrakova%2C+Alena&rft.au=Hujnakova%2C+Petra&rft_id=https%3A%2F%2Fdoi.org%2F10.3390%252Fijgi8060256&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-:2-98"><span class="mw-cite-backlink">^ <a href="#cite_ref-:2_98-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:2_98-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFVojtechovskaPopelka2023" class="citation journal cs1">Vojtechovska, Michaela; Popelka, Stanislav (12 August 2023). <a rel="nofollow" class="external text" href="https://doi.org/10.5194%2Fica-abs-6-264-2023">"GazePlotter – tool for eye movement sequences visualization"</a>. <i>Abstracts of the ICA</i>. <b>6</b>: 264–. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2023AbICA...6..264V">2023AbICA...6..264V</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.5194%2Fica-abs-6-264-2023">10.5194/ica-abs-6-264-2023</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2570-2106">2570-2106</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Abstracts+of+the+ICA&rft.atitle=GazePlotter+%E2%80%93+tool+for+eye+movement+sequences+visualization&rft.volume=6&rft.pages=264-&rft.date=2023-08-12&rft.issn=2570-2106&rft_id=info%3Adoi%2F10.5194%2Fica-abs-6-264-2023&rft_id=info%3Abibcode%2F2023AbICA...6..264V&rft.aulast=Vojtechovska&rft.aufirst=Michaela&rft.au=Popelka%2C+Stanislav&rft_id=https%3A%2F%2Fdoi.org%2F10.5194%252Fica-abs-6-264-2023&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-99"><span class="mw-cite-backlink"><b><a href="#cite_ref-99">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSultanPopelkaStrobl2022" class="citation journal cs1">Sultan, Minha Noor; Popelka, Stanislav; Strobl, Josef (24 June 2022). <a rel="nofollow" class="external text" href="https://dx.doi.org/10.1007/s12145-022-00832-5">"ET2Spatial – software for georeferencing of eye movement data"</a>. <i>Earth Science Informatics</i>. <b>15</b> (3): 2031–2049. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2022EScIn..15.2031S">2022EScIn..15.2031S</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs12145-022-00832-5">10.1007/s12145-022-00832-5</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1865-0473">1865-0473</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:249961269">249961269</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Earth+Science+Informatics&rft.atitle=ET2Spatial+%E2%80%93+software+for+georeferencing+of+eye+movement+data&rft.volume=15&rft.issue=3&rft.pages=2031-2049&rft.date=2022-06-24&rft_id=info%3Adoi%2F10.1007%2Fs12145-022-00832-5&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A249961269%23id-name%3DS2CID&rft.issn=1865-0473&rft_id=info%3Abibcode%2F2022EScIn..15.2031S&rft.aulast=Sultan&rft.aufirst=Minha+Noor&rft.au=Popelka%2C+Stanislav&rft.au=Strobl%2C+Josef&rft_id=http%3A%2F%2Fdx.doi.org%2F10.1007%2Fs12145-022-00832-5&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-100"><span class="mw-cite-backlink"><b><a href="#cite_ref-100">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGöbelKieferRaubal2019" class="citation journal cs1">Göbel, Fabian; Kiefer, Peter; Raubal, Martin (2 May 2019). <a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs10707-019-00352-3">"Correction to: FeaturEyeTrack: automatic matching of eye tracking data with map features on interactive maps"</a>. <i>GeoInformatica</i>. <b>24</b> (4): 1061–1062. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs10707-019-00352-3">10.1007/s10707-019-00352-3</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1384-6175">1384-6175</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:155184852">155184852</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=GeoInformatica&rft.atitle=Correction+to%3A+FeaturEyeTrack%3A+automatic+matching+of+eye+tracking+data+with+map+features+on+interactive+maps&rft.volume=24&rft.issue=4&rft.pages=1061-1062&rft.date=2019-05-02&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A155184852%23id-name%3DS2CID&rft.issn=1384-6175&rft_id=info%3Adoi%2F10.1007%2Fs10707-019-00352-3&rft.aulast=G%C3%B6bel&rft.aufirst=Fabian&rft.au=Kiefer%2C+Peter&rft.au=Raubal%2C+Martin&rft_id=https%3A%2F%2Fdoi.org%2F10.1007%252Fs10707-019-00352-3&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-101"><span class="mw-cite-backlink"><b><a href="#cite_ref-101">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFDolezalovaPopelka2016" class="citation journal cs1">Dolezalova, Jitka; Popelka, Stanislav (5 August 2016). <a rel="nofollow" class="external text" href="https://bop.unibe.ch/JEMR/article/view/2522">"ScanGraph: A Novel Scanpath Comparison Method Using Visualisation of Graph Cliques"</a>. <i>Journal of Eye Movement Research</i>. <b>9</b> (4). <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.16910%2Fjemr.9.4.5">10.16910/jemr.9.4.5</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1995-8692">1995-8692</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Eye+Movement+Research&rft.atitle=ScanGraph%3A+A+Novel+Scanpath+Comparison+Method+Using+Visualisation+of+Graph+Cliques&rft.volume=9&rft.issue=4&rft.date=2016-08-05&rft_id=info%3Adoi%2F10.16910%2Fjemr.9.4.5&rft.issn=1995-8692&rft.aulast=Dolezalova&rft.aufirst=Jitka&rft.au=Popelka%2C+Stanislav&rft_id=https%3A%2F%2Fbop.unibe.ch%2FJEMR%2Farticle%2Fview%2F2522&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-102"><span class="mw-cite-backlink"><b><a href="#cite_ref-102">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKrassanakisFilippakopoulouNakos2014" class="citation journal cs1">Krassanakis, Vassilios; Filippakopoulou, Vassiliki; Nakos, Byron (21 February 2014). <a rel="nofollow" class="external text" href="https://doi.org/10.16910%2Fjemr.7.1.1">"EyeMMV toolbox: An eye movement post-analysis tool based on a two-step spatial dispersion threshold for fixation identification"</a>. <i>Journal of Eye Movement Research</i>. <b>7</b> (1). <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.16910%2Fjemr.7.1.1">10.16910/jemr.7.1.1</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1995-8692">1995-8692</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:38319871">38319871</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Eye+Movement+Research&rft.atitle=EyeMMV+toolbox%3A+An+eye+movement+post-analysis+tool+based+on+a+two-step+spatial+dispersion+threshold+for+fixation+identification&rft.volume=7&rft.issue=1&rft.date=2014-02-21&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A38319871%23id-name%3DS2CID&rft.issn=1995-8692&rft_id=info%3Adoi%2F10.16910%2Fjemr.7.1.1&rft.aulast=Krassanakis&rft.aufirst=Vassilios&rft.au=Filippakopoulou%2C+Vassiliki&rft.au=Nakos%2C+Byron&rft_id=https%3A%2F%2Fdoi.org%2F10.16910%252Fjemr.7.1.1&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-103"><span class="mw-cite-backlink"><b><a href="#cite_ref-103">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFDickson2017" class="citation web cs1">Dickson, Ben (19 February 2017). <a rel="nofollow" class="external text" href="https://techcrunch.com/2017/02/19/unlocking-the-potential-of-eye-tracking-technology/">"Unlocking the potential of eye tracking technology"</a>. <i>TechCrunch</i><span class="reference-accessdate">. Retrieved <span class="nowrap">8 April</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=unknown&rft.jtitle=TechCrunch&rft.atitle=Unlocking+the+potential+of+eye+tracking+technology&rft.date=2017-02-19&rft.aulast=Dickson&rft.aufirst=Ben&rft_id=https%3A%2F%2Ftechcrunch.com%2F2017%2F02%2F19%2Funlocking-the-potential-of-eye-tracking-technology%2F&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-104"><span class="mw-cite-backlink"><b><a href="#cite_ref-104">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFReddy2019" class="citation web cs1">Reddy, Venkateshwar (15 April 2019). <a rel="nofollow" class="external text" href="https://www.industryarc.com/PressRelease/838/Eye-Tracking-Market-Research.html">"Eye Tracking Technology: Applications & Future Scope"</a>. <i>IndustryARC</i><span class="reference-accessdate">. Retrieved <span class="nowrap">8 April</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=unknown&rft.jtitle=IndustryARC&rft.atitle=Eye+Tracking+Technology%3A+Applications+%26+Future+Scope&rft.date=2019-04-15&rft.aulast=Reddy&rft.aufirst=Venkateshwar&rft_id=https%3A%2F%2Fwww.industryarc.com%2FPressRelease%2F838%2FEye-Tracking-Market-Research.html&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-Rogers_2019-105"><span class="mw-cite-backlink"><b><a href="#cite_ref-Rogers_2019_105-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRogers2019" class="citation web cs1">Rogers, Sol (5 February 2019). <a rel="nofollow" class="external text" href="https://www.forbes.com/sites/solrogers/2019/02/05/seven-reasons-why-eye-tracking-will-fundamentally-change-vr/">"Seven Reasons Why Eye-tracking Will Fundamentally Change VR"</a>. <i>Forbes</i><span class="reference-accessdate">. Retrieved <span class="nowrap">13 May</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=unknown&rft.jtitle=Forbes&rft.atitle=Seven+Reasons+Why+Eye-tracking+Will+Fundamentally+Change+VR&rft.date=2019-02-05&rft.aulast=Rogers&rft.aufirst=Sol&rft_id=https%3A%2F%2Fwww.forbes.com%2Fsites%2Fsolrogers%2F2019%2F02%2F05%2Fseven-reasons-why-eye-tracking-will-fundamentally-change-vr%2F&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-Stein_2020-106"><span class="mw-cite-backlink"><b><a href="#cite_ref-Stein_2020_106-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFStein2020" class="citation web cs1">Stein, Scott (31 January 2020). <a rel="nofollow" class="external text" href="https://www.cnet.com/news/eye-tracking-is-the-next-phase-for-vr-ready-or-not/">"Eye tracking is the next phase for VR, ready or not"</a>. <i>CNET</i><span class="reference-accessdate">. Retrieved <span class="nowrap">8 April</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=unknown&rft.jtitle=CNET&rft.atitle=Eye+tracking+is+the+next+phase+for+VR%2C+ready+or+not&rft.date=2020-01-31&rft.aulast=Stein&rft.aufirst=Scott&rft_id=https%3A%2F%2Fwww.cnet.com%2Fnews%2Feye-tracking-is-the-next-phase-for-vr-ready-or-not%2F&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-107"><span class="mw-cite-backlink"><b><a href="#cite_ref-107">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFStanley2013" class="citation web cs1">Stanley, Jay (6 May 2013). <a rel="nofollow" class="external text" href="https://www.aclu.org/blog/national-security/privacy-and-surveillance/privacy-invading-potential-eye-tracking-technology">"The Privacy-Invading Potential of Eye Tracking Technology"</a>. <i>American Civil Liberties Union</i><span class="reference-accessdate">. Retrieved <span class="nowrap">8 April</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=unknown&rft.jtitle=American+Civil+Liberties+Union&rft.atitle=The+Privacy-Invading+Potential+of+Eye+Tracking+Technology&rft.date=2013-05-06&rft.aulast=Stanley&rft.aufirst=Jay&rft_id=https%3A%2F%2Fwww.aclu.org%2Fblog%2Fnational-security%2Fprivacy-and-surveillance%2Fprivacy-invading-potential-eye-tracking-technology&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-108"><span class="mw-cite-backlink"><b><a href="#cite_ref-108">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBlain2021" class="citation web cs1">Blain, Loz (29 March 2021). <a rel="nofollow" class="external text" href="https://newatlas.com/science/science/eye-tracking-privacy/">"Eye tracking can reveal an unbelievable amount of information about you"</a>. <i>New Atlas</i><span class="reference-accessdate">. Retrieved <span class="nowrap">8 April</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=unknown&rft.jtitle=New+Atlas&rft.atitle=Eye+tracking+can+reveal+an+unbelievable+amount+of+information+about+you&rft.date=2021-03-29&rft.aulast=Blain&rft.aufirst=Loz&rft_id=https%3A%2F%2Fnewatlas.com%2Fscience%2Fscience%2Feye-tracking-privacy%2F&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> <li id="cite_note-KrögerLutz2020-109"><span class="mw-cite-backlink">^ <a href="#cite_ref-KrögerLutz2020_109-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-KrögerLutz2020_109-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKrögerLutzMüller2020" class="citation book cs1">Kröger, Jacob Leon; Lutz, Otto Hans-Martin; Müller, Florian (2020). "What Does Your Gaze Reveal About You? On the Privacy Implications of Eye Tracking". <i>Privacy and Identity Management. Data for Better Living: AI and Privacy</i>. IFIP Advances in Information and Communication Technology. Vol. 576. Cham: Springer International Publishing. pp. 226–241. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1007%2F978-3-030-42504-3_15">10.1007/978-3-030-42504-3_15</a></span>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/978-3-030-42503-6" title="Special:BookSources/978-3-030-42503-6"><bdi>978-3-030-42503-6</bdi></a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a> <a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1868-4238">1868-4238</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=bookitem&rft.atitle=What+Does+Your+Gaze+Reveal+About+You%3F+On+the+Privacy+Implications+of+Eye+Tracking&rft.btitle=Privacy+and+Identity+Management.+Data+for+Better+Living%3A+AI+and+Privacy&rft.place=Cham&rft.series=IFIP+Advances+in+Information+and+Communication+Technology&rft.pages=226-241&rft.pub=Springer+International+Publishing&rft.date=2020&rft.issn=1868-4238&rft_id=info%3Adoi%2F10.1007%2F978-3-030-42504-3_15&rft.isbn=978-3-030-42503-6&rft.aulast=Kr%C3%B6ger&rft.aufirst=Jacob+Leon&rft.au=Lutz%2C+Otto+Hans-Martin&rft.au=M%C3%BCller%2C+Florian&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></span> </li> </ol></div></div> <div class="mw-heading mw-heading2"><h2 id="References">References</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=25" title="Edit section: References"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1235681985">.mw-parser-output .side-box{margin:4px 0;box-sizing:border-box;border:1px solid #aaa;font-size:88%;line-height:1.25em;background-color:var(--background-color-interactive-subtle,#f8f9fa);display:flow-root}.mw-parser-output .side-box-abovebelow,.mw-parser-output .side-box-text{padding:0.25em 0.9em}.mw-parser-output .side-box-image{padding:2px 0 2px 0.9em;text-align:center}.mw-parser-output .side-box-imageright{padding:2px 0.9em 2px 0;text-align:center}@media(min-width:500px){.mw-parser-output .side-box-flex{display:flex;align-items:center}.mw-parser-output .side-box-text{flex:1;min-width:0}}@media(min-width:720px){.mw-parser-output .side-box{width:238px}.mw-parser-output .side-box-right{clear:right;float:right;margin-left:1em}.mw-parser-output .side-box-left{margin-right:1em}}</style><style data-mw-deduplicate="TemplateStyles:r1237033735">@media print{body.ns-0 .mw-parser-output .sistersitebox{display:none!important}}@media screen{html.skin-theme-clientpref-night .mw-parser-output .sistersitebox img[src*="Wiktionary-logo-en-v2.svg"]{background-color:white}}@media screen and (prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .sistersitebox img[src*="Wiktionary-logo-en-v2.svg"]{background-color:white}}</style><div class="side-box side-box-right plainlinks sistersitebox"><style data-mw-deduplicate="TemplateStyles:r1126788409">.mw-parser-output .plainlist ol,.mw-parser-output .plainlist ul{line-height:inherit;list-style:none;margin:0;padding:0}.mw-parser-output .plainlist ol li,.mw-parser-output .plainlist ul li{margin-bottom:0}</style> <div class="side-box-flex"> <div class="side-box-image"><span class="noviewer" typeof="mw:File"><span><img alt="" src="//upload.wikimedia.org/wikipedia/en/thumb/4/4a/Commons-logo.svg/30px-Commons-logo.svg.png" decoding="async" width="30" height="40" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/en/thumb/4/4a/Commons-logo.svg/45px-Commons-logo.svg.png 1.5x, //upload.wikimedia.org/wikipedia/en/thumb/4/4a/Commons-logo.svg/59px-Commons-logo.svg.png 2x" data-file-width="1024" data-file-height="1376" /></span></span></div> <div class="side-box-text plainlist">Wikimedia Commons has media related to <span style="font-weight: bold; font-style: italic;"><a href="https://commons.wikimedia.org/wiki/Category:Eye_tracking" class="extiw" title="commons:Category:Eye tracking">Eye tracking</a></span>.</div></div> </div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1235681985"><div class="side-box metadata side-box-right"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1126788409"> <div class="side-box-flex"> <div class="side-box-image"><span class="noviewer" typeof="mw:File"><span><img alt="" src="//upload.wikimedia.org/wikipedia/commons/thumb/3/32/Scholia_logo.svg/40px-Scholia_logo.svg.png" decoding="async" width="40" height="39" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/3/32/Scholia_logo.svg/60px-Scholia_logo.svg.png 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/3/32/Scholia_logo.svg/80px-Scholia_logo.svg.png 2x" data-file-width="107" data-file-height="104" /></span></span></div> <div class="side-box-text plainlist"><a href="https://www.wikidata.org/wiki/Wikidata:Scholia" class="extiw" title="d:Wikidata:Scholia">Scholia</a> has a <i>topic</i> profile for <i><b><a href="https://iw.toolforge.org/scholia/topic/Q970687" class="extiw" title="toolforge:scholia/topic/Q970687">Eye tracking</a></b></i>.</div></div> </div> <ul><li><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCornsweetCrane1973" class="citation journal cs1">Cornsweet, TN; Crane, HD (1973). <a rel="nofollow" class="external text" href="http://www.escholarship.org/uc/item/8hg953zz">"Accurate two-dimensional eye tracker using first and fourth Purkinje images"</a>. <i>J Opt Soc Am</i>. <b>63</b> (8): 921–8. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/1973JOSA...63..921C">1973JOSA...63..921C</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1364%2Fjosa.63.000921">10.1364/josa.63.000921</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/4722578">4722578</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:14866408">14866408</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=J+Opt+Soc+Am&rft.atitle=Accurate+two-dimensional+eye+tracker+using+first+and+fourth+Purkinje+images&rft.volume=63&rft.issue=8&rft.pages=921-8&rft.date=1973&rft_id=info%3Adoi%2F10.1364%2Fjosa.63.000921&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A14866408%23id-name%3DS2CID&rft_id=info%3Apmid%2F4722578&rft_id=info%3Abibcode%2F1973JOSA...63..921C&rft.aulast=Cornsweet&rft.aufirst=TN&rft.au=Crane%2C+HD&rft_id=http%3A%2F%2Fwww.escholarship.org%2Fuc%2Fitem%2F8hg953zz&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></li> <li><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCornsweet1958" class="citation journal cs1"><a href="/wiki/Tom_N._Cornsweet" class="mw-redirect" title="Tom N. Cornsweet">Cornsweet, TN</a> (1958). <a rel="nofollow" class="external text" href="http://www.escholarship.org/uc/item/9kg0b3nf">"New technique for the measurement of small eye movements"</a>. <i>JOSA</i>. <b>48</b> (11): 808–811. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/1958JOSA...48..808C">1958JOSA...48..808C</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1364%2Fjosa.48.000808">10.1364/josa.48.000808</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/13588456">13588456</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=JOSA&rft.atitle=New+technique+for+the+measurement+of+small+eye+movements&rft.volume=48&rft.issue=11&rft.pages=808-811&rft.date=1958&rft_id=info%3Apmid%2F13588456&rft_id=info%3Adoi%2F10.1364%2Fjosa.48.000808&rft_id=info%3Abibcode%2F1958JOSA...48..808C&rft.aulast=Cornsweet&rft.aufirst=TN&rft_id=http%3A%2F%2Fwww.escholarship.org%2Fuc%2Fitem%2F9kg0b3nf&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></li> <li><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHunziker2006" class="citation book cs1 cs1-prop-foreign-lang-source">Hunziker, Hans-Werner (2006). <i>Im Auge des Lesers: foveale und periphere Wahrnehmung – vom Buchstabieren zur Lesefreude</i> [<i>In the eye of the reader: foveal and peripheral perception – from letter recognition to the joy of reading</i>] (in German). Transmedia Stäubli Verlag Zürich. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/978-3-7266-0068-6" title="Special:BookSources/978-3-7266-0068-6"><bdi>978-3-7266-0068-6</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&rft.genre=book&rft.btitle=Im+Auge+des+Lesers%3A+foveale+und+periphere+Wahrnehmung+%E2%80%93+vom+Buchstabieren+zur+Lesefreude&rft.pub=Transmedia+St%C3%A4ubli+Verlag+Z%C3%BCrich&rft.date=2006&rft.isbn=978-3-7266-0068-6&rft.aulast=Hunziker&rft.aufirst=Hans-Werner&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></li> <li><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFJustCarpenter1980" class="citation journal cs1">Just, MA; Carpenter, PA (1980). <a rel="nofollow" class="external text" href="http://studentsuccess.aua.am/files/2013/10/A-Theory-of-Reading-From-Eye-Fixation-to-Comprehension-Just-and-Carpenter.pdf">"A theory of reading: from eye fixation to comprehension"</a> <span class="cs1-format">(PDF)</span>. <i>Psychol Rev</i>. <b>87</b> (4): 329–354. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1037%2F0033-295x.87.4.329">10.1037/0033-295x.87.4.329</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/7413885">7413885</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:3793521">3793521</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Psychol+Rev&rft.atitle=A+theory+of+reading%3A+from+eye+fixation+to+comprehension&rft.volume=87&rft.issue=4&rft.pages=329-354&rft.date=1980&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A3793521%23id-name%3DS2CID&rft_id=info%3Apmid%2F7413885&rft_id=info%3Adoi%2F10.1037%2F0033-295x.87.4.329&rft.aulast=Just&rft.aufirst=MA&rft.au=Carpenter%2C+PA&rft_id=http%3A%2F%2Fstudentsuccess.aua.am%2Ffiles%2F2013%2F10%2FA-Theory-of-Reading-From-Eye-Fixation-to-Comprehension-Just-and-Carpenter.pdf&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></li> <li><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRayner1978" class="citation journal cs1">Rayner, K (1978). "Eye movements in reading and information processing". <i>Psychological Bulletin</i>. <b>85</b> (3): 618–660. <a href="/wiki/CiteSeerX_(identifier)" class="mw-redirect" title="CiteSeerX (identifier)">CiteSeerX</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.294.4262">10.1.1.294.4262</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1037%2F0033-2909.85.3.618">10.1037/0033-2909.85.3.618</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/353867">353867</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Psychological+Bulletin&rft.atitle=Eye+movements+in+reading+and+information+processing&rft.volume=85&rft.issue=3&rft.pages=618-660&rft.date=1978&rft_id=https%3A%2F%2Fciteseerx.ist.psu.edu%2Fviewdoc%2Fsummary%3Fdoi%3D10.1.1.294.4262%23id-name%3DCiteSeerX&rft_id=info%3Apmid%2F353867&rft_id=info%3Adoi%2F10.1037%2F0033-2909.85.3.618&rft.aulast=Rayner&rft.aufirst=K&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></li> <li><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRayner1998" class="citation journal cs1">Rayner, K (1998). "Eye movements in reading and information processing: 20 years of research". <i>Psychological Bulletin</i>. <b>124</b> (3): 372–422. <a href="/wiki/CiteSeerX_(identifier)" class="mw-redirect" title="CiteSeerX (identifier)">CiteSeerX</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.211.3546">10.1.1.211.3546</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1037%2F0033-2909.124.3.372">10.1037/0033-2909.124.3.372</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a> <a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/9849112">9849112</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Psychological+Bulletin&rft.atitle=Eye+movements+in+reading+and+information+processing%3A+20+years+of+research&rft.volume=124&rft.issue=3&rft.pages=372-422&rft.date=1998&rft_id=https%3A%2F%2Fciteseerx.ist.psu.edu%2Fviewdoc%2Fsummary%3Fdoi%3D10.1.1.211.3546%23id-name%3DCiteSeerX&rft_id=info%3Apmid%2F9849112&rft_id=info%3Adoi%2F10.1037%2F0033-2909.124.3.372&rft.aulast=Rayner&rft.aufirst=K&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></li> <li>Romano Bergstrom, Jennifer (2014). Eye Tracking in User Experience Design. Morgan Kaufmann. <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/978-0-12-408138-3" title="Special:BookSources/978-0-12-408138-3">978-0-12-408138-3</a>.</li> <li>Bojko, Aga (2013). Eye Tracking The User Experience (A Practical Guide to Research). Rosenfeld Media. <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a> <a href="/wiki/Special:BookSources/978-1-933820-10-1" title="Special:BookSources/978-1-933820-10-1">978-1-933820-10-1</a>.</li></ul> <div class="mw-heading mw-heading3"><h3 id="Commercial_eye_tracking">Commercial eye tracking</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Eye_tracking&action=edit&section=26" title="Edit section: Commercial eye tracking"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <ul><li><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPietersWedel2007" class="citation journal cs1">Pieters, R.; Wedel, M. (2007). "Goal Control of Visual Attention to Advertising: The Yarbus Implication". <i>Journal of Consumer Research</i>. <b>34</b> (2): 224–233. <a href="/wiki/CiteSeerX_(identifier)" class="mw-redirect" title="CiteSeerX (identifier)">CiteSeerX</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.524.9550">10.1.1.524.9550</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1086%2F519150">10.1086/519150</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Consumer+Research&rft.atitle=Goal+Control+of+Visual+Attention+to+Advertising%3A+The+Yarbus+Implication&rft.volume=34&rft.issue=2&rft.pages=224-233&rft.date=2007&rft_id=https%3A%2F%2Fciteseerx.ist.psu.edu%2Fviewdoc%2Fsummary%3Fdoi%3D10.1.1.524.9550%23id-name%3DCiteSeerX&rft_id=info%3Adoi%2F10.1086%2F519150&rft.aulast=Pieters&rft.aufirst=R.&rft.au=Wedel%2C+M.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></li> <li><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPietersWedel2004" class="citation journal cs1">Pieters, R.; Wedel, M. (2004). "Attention Capture and Transfer by elements of Advertisements". <i>Journal of Marketing</i>. <b>68</b> (2): 36–50. <a href="/wiki/CiteSeerX_(identifier)" class="mw-redirect" title="CiteSeerX (identifier)">CiteSeerX</a> <span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.115.3006">10.1.1.115.3006</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1509%2Fjmkg.68.2.36.27794">10.1509/jmkg.68.2.36.27794</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a> <a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:15259684">15259684</a>.</cite><span title="ctx_ver=Z39.88-2004&rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&rft.genre=article&rft.jtitle=Journal+of+Marketing&rft.atitle=Attention+Capture+and+Transfer+by+elements+of+Advertisements&rft.volume=68&rft.issue=2&rft.pages=36-50&rft.date=2004&rft_id=https%3A%2F%2Fciteseerx.ist.psu.edu%2Fviewdoc%2Fsummary%3Fdoi%3D10.1.1.115.3006%23id-name%3DCiteSeerX&rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A15259684%23id-name%3DS2CID&rft_id=info%3Adoi%2F10.1509%2Fjmkg.68.2.36.27794&rft.aulast=Pieters&rft.aufirst=R.&rft.au=Wedel%2C+M.&rfr_id=info%3Asid%2Fen.wikipedia.org%3AEye+tracking" class="Z3988"></span></li></ul> <div class="navbox-styles"><style data-mw-deduplicate="TemplateStyles:r1129693374">.mw-parser-output .hlist dl,.mw-parser-output .hlist ol,.mw-parser-output .hlist ul{margin:0;padding:0}.mw-parser-output .hlist dd,.mw-parser-output .hlist dt,.mw-parser-output .hlist li{margin:0;display:inline}.mw-parser-output .hlist.inline,.mw-parser-output .hlist.inline dl,.mw-parser-output .hlist.inline ol,.mw-parser-output .hlist.inline ul,.mw-parser-output .hlist dl dl,.mw-parser-output .hlist dl ol,.mw-parser-output .hlist dl ul,.mw-parser-output .hlist ol dl,.mw-parser-output .hlist ol ol,.mw-parser-output .hlist ol ul,.mw-parser-output .hlist ul dl,.mw-parser-output .hlist ul ol,.mw-parser-output .hlist ul ul{display:inline}.mw-parser-output .hlist .mw-empty-li{display:none}.mw-parser-output .hlist dt::after{content:": "}.mw-parser-output .hlist dd::after,.mw-parser-output .hlist li::after{content:" · ";font-weight:bold}.mw-parser-output .hlist dd:last-child::after,.mw-parser-output .hlist dt:last-child::after,.mw-parser-output .hlist li:last-child::after{content:none}.mw-parser-output .hlist dd dd:first-child::before,.mw-parser-output .hlist dd dt:first-child::before,.mw-parser-output .hlist dd li:first-child::before,.mw-parser-output .hlist dt dd:first-child::before,.mw-parser-output .hlist dt dt:first-child::before,.mw-parser-output .hlist dt li:first-child::before,.mw-parser-output .hlist li dd:first-child::before,.mw-parser-output .hlist li dt:first-child::before,.mw-parser-output .hlist li li:first-child::before{content:" (";font-weight:normal}.mw-parser-output .hlist dd dd:last-child::after,.mw-parser-output .hlist dd dt:last-child::after,.mw-parser-output .hlist dd li:last-child::after,.mw-parser-output .hlist dt dd:last-child::after,.mw-parser-output .hlist dt dt:last-child::after,.mw-parser-output .hlist dt li:last-child::after,.mw-parser-output .hlist li dd:last-child::after,.mw-parser-output .hlist li dt:last-child::after,.mw-parser-output .hlist li li:last-child::after{content:")";font-weight:normal}.mw-parser-output .hlist ol{counter-reset:listitem}.mw-parser-output .hlist ol>li{counter-increment:listitem}.mw-parser-output .hlist ol>li::before{content:" "counter(listitem)"\a0 "}.mw-parser-output .hlist dd ol>li:first-child::before,.mw-parser-output .hlist dt ol>li:first-child::before,.mw-parser-output .hlist li ol>li:first-child::before{content:" ("counter(listitem)"\a0 "}</style><style data-mw-deduplicate="TemplateStyles:r1236075235">.mw-parser-output .navbox{box-sizing:border-box;border:1px solid #a2a9b1;width:100%;clear:both;font-size:88%;text-align:center;padding:1px;margin:1em auto 0}.mw-parser-output .navbox .navbox{margin-top:0}.mw-parser-output .navbox+.navbox,.mw-parser-output .navbox+.navbox-styles+.navbox{margin-top:-1px}.mw-parser-output .navbox-inner,.mw-parser-output .navbox-subgroup{width:100%}.mw-parser-output .navbox-group,.mw-parser-output .navbox-title,.mw-parser-output .navbox-abovebelow{padding:0.25em 1em;line-height:1.5em;text-align:center}.mw-parser-output .navbox-group{white-space:nowrap;text-align:right}.mw-parser-output .navbox,.mw-parser-output .navbox-subgroup{background-color:#fdfdfd}.mw-parser-output .navbox-list{line-height:1.5em;border-color:#fdfdfd}.mw-parser-output .navbox-list-with-group{text-align:left;border-left-width:2px;border-left-style:solid}.mw-parser-output tr+tr>.navbox-abovebelow,.mw-parser-output tr+tr>.navbox-group,.mw-parser-output tr+tr>.navbox-image,.mw-parser-output tr+tr>.navbox-list{border-top:2px solid #fdfdfd}.mw-parser-output .navbox-title{background-color:#ccf}.mw-parser-output .navbox-abovebelow,.mw-parser-output .navbox-group,.mw-parser-output .navbox-subgroup .navbox-title{background-color:#ddf}.mw-parser-output .navbox-subgroup .navbox-group,.mw-parser-output .navbox-subgroup .navbox-abovebelow{background-color:#e6e6ff}.mw-parser-output .navbox-even{background-color:#f7f7f7}.mw-parser-output .navbox-odd{background-color:transparent}.mw-parser-output .navbox .hlist td dl,.mw-parser-output .navbox .hlist td ol,.mw-parser-output .navbox .hlist td ul,.mw-parser-output .navbox td.hlist dl,.mw-parser-output .navbox td.hlist ol,.mw-parser-output .navbox td.hlist ul{padding:0.125em 0}.mw-parser-output .navbox .navbar{display:block;font-size:100%}.mw-parser-output .navbox-title .navbar{float:left;text-align:left;margin-right:0.5em}body.skin--responsive .mw-parser-output .navbox-image img{max-width:none!important}@media print{body.ns-0 .mw-parser-output .navbox{display:none!important}}</style></div><div role="navigation" class="navbox" aria-labelledby="Self-driving_cars,_self-driving_vehicles_and_enabling_technologies" style="padding:3px"><table class="nowraplinks hlist mw-collapsible mw-collapsed navbox-inner" style="border-spacing:0;background:transparent;color:inherit"><tbody><tr><th scope="col" class="navbox-title" colspan="2"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><style data-mw-deduplicate="TemplateStyles:r1239400231">.mw-parser-output .navbar{display:inline;font-size:88%;font-weight:normal}.mw-parser-output .navbar-collapse{float:left;text-align:left}.mw-parser-output .navbar-boxtext{word-spacing:0}.mw-parser-output .navbar ul{display:inline-block;white-space:nowrap;line-height:inherit}.mw-parser-output .navbar-brackets::before{margin-right:-0.125em;content:"[ "}.mw-parser-output .navbar-brackets::after{margin-left:-0.125em;content:" ]"}.mw-parser-output .navbar li{word-spacing:-0.125em}.mw-parser-output .navbar a>span,.mw-parser-output .navbar a>abbr{text-decoration:inherit}.mw-parser-output .navbar-mini abbr{font-variant:small-caps;border-bottom:none;text-decoration:none;cursor:inherit}.mw-parser-output .navbar-ct-full{font-size:114%;margin:0 7em}.mw-parser-output .navbar-ct-mini{font-size:114%;margin:0 4em}html.skin-theme-clientpref-night .mw-parser-output .navbar li a abbr{color:var(--color-base)!important}@media(prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .navbar li a abbr{color:var(--color-base)!important}}@media print{.mw-parser-output .navbar{display:none!important}}</style><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Self-driving_cars_and_enabling_technologies" title="Template:Self-driving cars and enabling technologies"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Self-driving_cars_and_enabling_technologies" title="Template talk:Self-driving cars and enabling technologies"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Self-driving_cars_and_enabling_technologies" title="Special:EditPage/Template:Self-driving cars and enabling technologies"><abbr title="Edit this template">e</abbr></a></li></ul></div><div id="Self-driving_cars,_self-driving_vehicles_and_enabling_technologies" style="font-size:114%;margin:0 4em"><a href="/wiki/Self-driving_car" title="Self-driving car">Self-driving cars</a>, self-driving vehicles and enabling technologies</div></th></tr><tr><th scope="row" class="navbox-group" style="width:1%">Overview and <br />context</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/History_of_self-driving_cars" title="History of self-driving cars">History of self-driving cars</a></li> <li><a href="/wiki/Impact_of_self-driving_cars" title="Impact of self-driving cars">Impact of self-driving cars</a></li> <li><a href="/wiki/Intelligent_transportation_system" title="Intelligent transportation system">Intelligent transportation system</a></li> <li><a href="/wiki/Context-aware_pervasive_systems" title="Context-aware pervasive systems">Context-aware pervasive systems</a></li> <li><a href="/wiki/Mobile_computing" title="Mobile computing">Mobile computing</a></li> <li><a href="/wiki/Smart,_connected_products" class="mw-redirect" title="Smart, connected products">Smart, connected products</a></li> <li><a href="/wiki/Ubiquitous_computing" title="Ubiquitous computing">Ubiquitous computing</a></li> <li><a href="/wiki/Ambient_intelligence" title="Ambient intelligence">Ambient intelligence</a></li> <li><a href="/wiki/Internet_of_things" title="Internet of things">Internet of things</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Self-driving_car#Levels_of_driving_automation" title="Self-driving car">SAE Levels</a></th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="row" class="navbox-group" style="width:9em">Human driver monitors <br />the driving environment<br />(Levels 0,1,2)</th><td class="navbox-list-with-group navbox-list navbox-even" style="padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Lane_departure_warning_system" title="Lane departure warning system">Lane departure warning system</a></li> <li><a href="/wiki/Automatic_parking" title="Automatic parking">Automatic parking</a></li> <li><a href="/wiki/Automated_emergency_braking_system" title="Automated emergency braking system">Automated emergency braking system</a></li> <li><a href="/wiki/Collision_avoidance_system" title="Collision avoidance system">Collision avoidance system</a></li> <li><a href="/wiki/Cruise_control" title="Cruise control">Cruise control</a> <ul><li><a href="/wiki/Adaptive_cruise_control" title="Adaptive cruise control">Adaptive cruise control</a></li></ul></li> <li><a href="/wiki/Advanced_driver-assistance_system" title="Advanced driver-assistance system">Advanced driver-assistance system</a></li> <li><a href="/wiki/Driver_drowsiness_detection" title="Driver drowsiness detection">Driver drowsiness detection</a></li> <li><a href="/wiki/Intelligent_speed_adaptation" class="mw-redirect" title="Intelligent speed adaptation">Intelligent speed adaptation</a></li> <li><a href="/wiki/Blind_spot_monitor" title="Blind spot monitor">Blind spot monitor</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:9em">System monitors <br />the driving environment<br />(Levels 3,4,5)</th><td class="navbox-list-with-group navbox-list navbox-odd" style="padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Automated_Lane_Keeping_Systems" class="mw-redirect" title="Automated Lane Keeping Systems">Automated Lane Keeping Systems</a></li> <li><a href="/wiki/Vehicular_ad_hoc_network" title="Vehicular ad hoc network">Vehicular ad hoc network</a> (V2V)</li> <li><a href="/wiki/Connected_car" title="Connected car">Connected car</a></li> <li><a href="/wiki/Automotive_navigation_system" title="Automotive navigation system">Automotive navigation system</a></li></ul> </div></td></tr></tbody></table><div></div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Vehicles</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="row" class="navbox-group" style="width:9em">Cars</th><td class="navbox-list-with-group navbox-list navbox-even" style="padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/VaMP" title="VaMP">VaMP</a> (1994)</li> <li><a href="/wiki/Spirit_of_Berlin" title="Spirit of Berlin">Spirit of Berlin</a> (2007)</li> <li><a href="/wiki/General_Motors_EN-V" title="General Motors EN-V">General Motors EN-V</a> (2010)</li> <li><a href="/wiki/MadeInGermany" title="MadeInGermany">MadeInGermany</a> (2011)</li> <li><a href="/wiki/Waymo" title="Waymo">Waymo</a>, formerly Google Car (2012)</li> <li><a href="/wiki/Tesla_Model_S#Autopilot" title="Tesla Model S">Tesla Model S <i>with</i> Autopilot</a> (2015)</li> <li><a href="/wiki/LUTZ_Pathfinder" title="LUTZ Pathfinder">LUTZ Pathfinder</a> (2015)</li> <li><a href="/wiki/Avride" title="Avride">Avride</a> (2017)</li> <li><a href="/wiki/Honda_Legend" title="Honda Legend">Honda Legend</a> (2021)</li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:9em">Buses and commercial <br />vehicles</th><td class="navbox-list-with-group navbox-list navbox-odd" style="padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Automated_guideway_transit" title="Automated guideway transit">Automated guideway transit</a></li> <li><a href="/wiki/CAVForth" title="CAVForth">CAVForth</a></li> <li><a href="/wiki/ParkShuttle" title="ParkShuttle">ParkShuttle</a></li> <li><a href="/wiki/Navia_(vehicle)" title="Navia (vehicle)">Navia shuttle</a></li> <li><a href="/wiki/NuTonomy" title="NuTonomy">NuTonomy</a> taxi</li> <li><a href="/wiki/Freightliner_Inspiration" title="Freightliner Inspiration">Freightliner Inspiration</a></li> <li><a href="/wiki/Driverless_tractor" title="Driverless tractor">Driverless tractor</a></li> <li><a href="/wiki/Self-driving_truck" title="Self-driving truck">Self-driving truck</a></li> <li><a href="/wiki/Mobility_as_a_service_(transport)" class="mw-redirect" title="Mobility as a service (transport)">Mobility as a service</a></li></ul> </div></td></tr></tbody></table><div></div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Regulation_of_self-driving_cars" title="Regulation of self-driving cars">Regulation</a></th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Self-driving_car#Regulation" title="Self-driving car">Legislation</a></li> <li><a href="/wiki/IEEE_802.11p" title="IEEE 802.11p">IEEE 802.11p</a></li> <li><a href="/wiki/Assured_clear_distance_ahead" title="Assured clear distance ahead"> Safe speed automotive common law</a></li> <li><a href="/wiki/Automated_Lane_Keeping_System" class="mw-redirect" title="Automated Lane Keeping System">Automated lane keeping system</a> (unece regulation 157)</li> <li>Regulation (EU) 2019/2144</li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Liability</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"><a href="/wiki/Self-driving_car_liability" title="Self-driving car liability">Self-driving car liability</a></div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Enabling <br />technologies</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Radar" title="Radar">Radar</a></li> <li><a href="/wiki/Laser" title="Laser">Laser</a></li> <li><a href="/wiki/LIDAR" class="mw-redirect" title="LIDAR">LIDAR</a></li> <li><a href="/wiki/Artificial_neural_network" class="mw-redirect" title="Artificial neural network">Artificial neural network</a></li> <li><a href="/wiki/Computer_stereo_vision" title="Computer stereo vision">Computer stereo vision</a></li> <li><a href="/wiki/Computer_vision#Recognition" title="Computer vision">Image recognition</a></li> <li><a href="/wiki/Dedicated_short-range_communications" title="Dedicated short-range communications">Dedicated short-range communications</a></li> <li><a href="/wiki/Real-time_Control_System" title="Real-time Control System">Real-time Control System</a></li> <li><a href="/wiki/RFpro" title="RFpro">rFpro</a></li> <li><a class="mw-selflink selflink">Eye tracking</a></li> <li><a href="/wiki/Radio-frequency_identification" title="Radio-frequency identification">Radio-frequency identification</a></li> <li><a href="/wiki/Automotive_navigation_system" title="Automotive navigation system">Automotive navigation system</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Organizations, <br />Projects & <br />People</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="row" class="navbox-group" style="width:9em">Organizations, <br />projects <br />and events</th><td class="navbox-list-with-group navbox-list navbox-odd" style="padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/American_Center_for_Mobility" title="American Center for Mobility">American Center for Mobility</a></li> <li><a href="/wiki/DAVI" title="DAVI">DAVI</a></li> <li><a href="/wiki/European_Land-Robot_Trial" title="European Land-Robot Trial">European Land-Robot Trial</a></li> <li><a href="/wiki/Navlab" title="Navlab">Navlab</a></li> <li><a href="/wiki/DARPA_Grand_Challenge" title="DARPA Grand Challenge">DARPA Grand Challenge</a></li> <li><a href="/wiki/VisLab_Intercontinental_Autonomous_Challenge" title="VisLab Intercontinental Autonomous Challenge">VisLab Intercontinental Autonomous Challenge</a></li> <li><a href="/wiki/Eureka_Prometheus_Project" title="Eureka Prometheus Project">Eureka Prometheus Project</a></li> <li><a href="/wiki/IEEE_Intelligent_Transportation_Systems_Society" class="mw-redirect" title="IEEE Intelligent Transportation Systems Society">IEEE Intelligent Transportation Systems Society</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:9em">People</th><td class="navbox-list-with-group navbox-list navbox-even" style="padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Harold_Goddijn" title="Harold Goddijn">Harold Goddijn</a></li> <li><a href="/wiki/Alberto_Broggi" title="Alberto Broggi">Alberto Broggi</a></li> <li><a href="/wiki/Anthony_Levandowski" title="Anthony Levandowski">Anthony Levandowski</a></li></ul> </div></td></tr></tbody></table><div></div></td></tr></tbody></table></div> <div class="navbox-styles"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236075235"></div><div role="navigation" class="navbox" aria-labelledby="Extended_reality_(XR)" style="padding:3px"><table class="nowraplinks hlist mw-collapsible autocollapse navbox-inner" style="border-spacing:0;background:transparent;color:inherit"><tbody><tr><th scope="col" class="navbox-title" colspan="2"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1239400231"><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Extended_reality" title="Template:Extended reality"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Extended_reality" title="Template talk:Extended reality"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Extended_reality" title="Special:EditPage/Template:Extended reality"><abbr title="Edit this template">e</abbr></a></li></ul></div><div id="Extended_reality_(XR)" style="font-size:114%;margin:0 4em"><a href="/wiki/Extended_reality" title="Extended reality">Extended reality (XR)</a></div></th></tr><tr><td class="navbox-abovebelow" colspan="2"><div> <ul><li><a href="/wiki/Augmented_reality" title="Augmented reality">Augmented reality (AR)</a></li> <li><a href="/wiki/Mixed_reality" title="Mixed reality">Mixed reality (MR)</a></li> <li><a href="/wiki/Virtual_reality" title="Virtual reality">Virtual reality (VR)</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Concepts</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Cinematic_virtual_reality" title="Cinematic virtual reality">Cinematic virtual reality</a></li> <li><a href="/wiki/Computer-mediated_reality" title="Computer-mediated reality">Computer-mediated reality</a></li> <li><a href="/wiki/Immersion_(virtual_reality)" title="Immersion (virtual reality)">Immersion</a></li> <li><a href="/wiki/Metaverse" title="Metaverse">Metaverse</a></li> <li><a href="/wiki/On-set_virtual_production" title="On-set virtual production">On-set virtual production</a></li> <li><a href="/wiki/Persistent_world" title="Persistent world">Persistent world</a></li> <li><a href="/wiki/Projection_augmented_model" title="Projection augmented model">Projection augmented model</a></li> <li><a href="/wiki/Real_life" title="Real life">Real life</a></li> <li><a href="/wiki/Room-scale" title="Room-scale">Room-scale</a></li> <li><a href="/wiki/Simulation_hypothesis" title="Simulation hypothesis">Simulation hypothesis</a></li> <li><a href="/wiki/Six_degrees_of_freedom" title="Six degrees of freedom">Six degrees of freedom</a></li> <li><a href="/wiki/Spatial_computing" title="Spatial computing">Spatial computing</a></li> <li><a href="/wiki/Telepresence" title="Telepresence">Telepresence</a></li> <li><a href="/wiki/Virtual_reality_applications" title="Virtual reality applications">Virtual reality applications</a></li> <li><a href="/wiki/Virtual_reality_sickness" title="Virtual reality sickness">Virtual reality sickness</a></li> <li><a href="/wiki/Virtual_world" title="Virtual world">Virtual world</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Technologies</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="row" class="navbox-group" style="width:1%">Display</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/EyeTap" title="EyeTap">EyeTap</a></li> <li><a href="/wiki/Head-mounted_display" title="Head-mounted display">Head-mounted display</a> <ul><li><a href="/wiki/Optical_head-mounted_display" title="Optical head-mounted display">optical</a></li></ul></li> <li><a href="/wiki/Head-up_display" title="Head-up display">Head-up display</a></li> <li><a href="/wiki/Smartglasses" title="Smartglasses">Smartglasses</a></li> <li><a href="/wiki/Virtual_reality_headset" title="Virtual reality headset">Virtual reality headset</a> <ul><li><a href="/wiki/List_of_virtual_reality_headsets" title="List of virtual reality headsets">list</a></li></ul></li> <li><a href="/wiki/Virtual_retinal_display" title="Virtual retinal display">Virtual retinal display</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">3D interaction</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Brain%E2%80%93computer_interface" title="Brain–computer interface">Brain–computer interface</a></li> <li><a class="mw-selflink selflink">Eye tracking</a></li> <li><a href="/wiki/Finger_tracking" title="Finger tracking">Finger tracking</a></li> <li><a href="/wiki/Pose_tracking" title="Pose tracking">Pose tracking</a></li> <li><a href="/wiki/Simultaneous_localization_and_mapping" title="Simultaneous localization and mapping">Simultaneous localization and mapping</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Software</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Asynchronous_reprojection" title="Asynchronous reprojection">Asynchronous reprojection</a></li> <li><a href="/wiki/Foveated_rendering" title="Foveated rendering">Foveated rendering</a></li> <li><a href="/wiki/Image-based_modeling_and_rendering" title="Image-based modeling and rendering">Image-based modeling and rendering</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Photography</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/360-degree_video" title="360-degree video">360-degree video</a></li> <li><a href="/wiki/Free_viewpoint_television" title="Free viewpoint television">Free viewpoint television</a></li> <li><a href="/wiki/Omnidirectional_(360-degree)_camera" title="Omnidirectional (360-degree) camera">Omnidirectional (360-degree) camera</a></li> <li><a href="/wiki/VR_photography" title="VR photography">VR photography</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Other</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Haptic_suit" title="Haptic suit">Haptic suit</a></li> <li><a href="/wiki/Omnidirectional_treadmill" title="Omnidirectional treadmill">Omnidirectional treadmill</a></li> <li><a href="/wiki/Wearable_computer" title="Wearable computer">Wearable computer</a></li></ul> </div></td></tr></tbody></table><div></div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Peripherals</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Cyberith_Virtualizer" title="Cyberith Virtualizer">Cyberith Virtualizer</a></li> <li><a href="/wiki/Leap_Motion" title="Leap Motion">Leap Motion</a></li> <li><a href="/wiki/Oculus_Touch" title="Oculus Touch">Oculus Touch</a></li> <li><a href="/wiki/PlayStation_Move" title="PlayStation Move">PlayStation Move</a></li> <li><a href="/wiki/Razer_Hydra" title="Razer Hydra">Razer Hydra</a></li> <li><a href="/wiki/Virtuix_Omni" title="Virtuix Omni">Virtuix Omni</a></li> <li><a href="/wiki/Wired_glove" title="Wired glove">Wired glove</a></li> <li><a href="/wiki/Wizdish_ROVR" title="Wizdish ROVR">Wizdish ROVR</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Companies</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Apple_Inc." title="Apple Inc.">Apple Inc.</a></li> <li><a href="/wiki/HTC" title="HTC">HTC</a></li> <li><a href="/wiki/Khronos_Group" title="Khronos Group">Khronos Group</a></li> <li><a href="/wiki/Magic_Leap" title="Magic Leap">Magic Leap</a></li> <li><a href="/wiki/Meta_Platforms" title="Meta Platforms">Meta Platforms</a> <ul><li><a href="/wiki/Reality_Labs" title="Reality Labs">Reality Labs</a></li></ul></li> <li><a href="/wiki/Niantic,_Inc." title="Niantic, Inc.">Niantic</a></li> <li><a href="/wiki/Pimax" title="Pimax">Pimax</a></li> <li><a href="/wiki/Rokoko_(company)" title="Rokoko (company)">Rokoko</a></li> <li><a href="/wiki/Varjo" title="Varjo">Varjo</a></li> <li><a href="/wiki/Vuzix" title="Vuzix">Vuzix</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Devices</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="row" class="navbox-group" style="width:1%">Current</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Apple_Vision_Pro" title="Apple Vision Pro">Apple Vision Pro</a></li> <li><a href="/wiki/Golden-i" title="Golden-i">Golden-i headsets</a></li> <li><a href="/wiki/HTC_Vive" title="HTC Vive">HTC Vive</a></li> <li><a href="/wiki/Magic_Leap" title="Magic Leap">Magic Leap</a></li> <li><a href="/wiki/Meta_Quest_3" title="Meta Quest 3">Meta Quest 3</a> <ul><li><a href="/wiki/Meta_Quest_3S" title="Meta Quest 3S">3S</a></li></ul></li> <li><a href="/wiki/Microsoft_HoloLens" title="Microsoft HoloLens">Microsoft HoloLens</a> <ul><li><a href="/wiki/HoloLens_2" title="HoloLens 2">2</a></li></ul></li> <li><a href="/wiki/Open_Source_Virtual_Reality" title="Open Source Virtual Reality">OSVR</a></li> <li><a href="/wiki/Pico_4" title="Pico 4">PICO 4 Ultra</a></li> <li><a href="/wiki/Pimax" title="Pimax">Pimax</a></li> <li><a href="/wiki/PlayStation_VR2" title="PlayStation VR2">PlayStation VR2</a></li> <li><a href="/wiki/Valve_Index" title="Valve Index">Valve Index</a></li> <li><a href="/wiki/Vuzix" title="Vuzix">Vuzix Blade</a></li> <li><a href="/wiki/Windows_Mixed_Reality" title="Windows Mixed Reality">Windows Mixed Reality</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Former</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/AntVR" title="AntVR">AntVR</a></li> <li><a href="/wiki/CastAR" title="CastAR">castAR</a></li> <li><a href="/wiki/VPL_Research" title="VPL Research">EyePhone</a></li> <li><a href="/wiki/Google_Cardboard" title="Google Cardboard">Google Cardboard</a></li> <li><a href="/wiki/Google_Daydream" title="Google Daydream">Google Daydream</a></li> <li><a href="/wiki/Google_Glass" title="Google Glass">Google Glass</a></li> <li><a href="/wiki/Liquid_Image" title="Liquid Image">Liquid Image</a></li> <li><a href="/wiki/Oculus_Go" title="Oculus Go">Oculus Go</a></li> <li><a href="/wiki/Oculus_Quest" title="Oculus Quest">Oculus Quest</a></li> <li><a href="/wiki/Oculus_Rift" title="Oculus Rift">Oculus Rift</a> <ul><li><a href="/wiki/Oculus_Rift_CV1" title="Oculus Rift CV1">CV1</a></li> <li><a href="/wiki/Oculus_Rift_S" title="Oculus Rift S">Rift S</a></li></ul></li> <li><a href="/wiki/Quest_2" title="Quest 2">Meta Quest 2</a> <ul><li><a href="/wiki/Meta_Quest_Pro" title="Meta Quest Pro">Pro</a></li></ul></li> <li><a href="/wiki/Pico_4" title="Pico 4">PICO 4</a></li> <li><a href="/wiki/PlayStation_VR" title="PlayStation VR">PlayStation VR</a></li> <li><a href="/wiki/Samsung_Gear_VR" title="Samsung Gear VR">Samsung Gear VR</a></li> <li><a href="/wiki/Sensorama" title="Sensorama">Sensorama</a></li> <li><a href="/wiki/SixthSense" title="SixthSense">SixthSense</a></li> <li><a href="/wiki/The_Sword_of_Damocles_(virtual_reality)" title="The Sword of Damocles (virtual reality)">The Sword of Damocles</a></li> <li><a href="/wiki/VFX1_Headgear" title="VFX1 Headgear">VFX1 Headgear</a></li> <li><a href="/wiki/Virtual_Boy" title="Virtual Boy">Virtual Boy</a></li> <li><a href="/wiki/Virtual_fixture" title="Virtual fixture">Virtual fixture</a></li> <li><a href="/wiki/Virtuality_(product)" title="Virtuality (product)">Virtuality</a></li> <li><a href="/wiki/VR-1" title="VR-1">VR-1</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Unreleased</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Project_Iris" title="Project Iris">Project Iris</a></li> <li><a href="/wiki/Sega_VR" title="Sega VR">Sega VR</a></li></ul> </div></td></tr></tbody></table><div></div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Software</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="row" class="navbox-group" style="width:1%">General</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/ARCore" title="ARCore">ARCore</a></li> <li><a href="/wiki/ARToolKit" title="ARToolKit">ARToolKit</a></li> <li><a href="/wiki/Interactive_art" title="Interactive art">Interactive art</a> <ul><li><a href="/wiki/Virtual_graffiti" title="Virtual graffiti">virtual graffiti</a></li></ul></li> <li><a href="/wiki/Meta_Horizon_OS" title="Meta Horizon OS">Meta Horizon OS</a></li> <li><a href="/wiki/OpenVR" title="OpenVR">OpenVR</a></li> <li><a href="/wiki/OpenXR" title="OpenXR">OpenXR</a></li> <li><a href="/wiki/Pervasive_game" title="Pervasive game">Pervasive game</a></li> <li><a href="/wiki/List_of_software_related_to_augmented_reality" title="List of software related to augmented reality">Software related to augmented reality</a></li> <li><a href="/wiki/Virtual_reality_game" title="Virtual reality game">Virtual reality game</a></li> <li><a href="/wiki/VisionOS" title="VisionOS">visionOS</a></li> <li><a href="/wiki/Vuforia_Augmented_Reality_SDK" title="Vuforia Augmented Reality SDK">Vuforia Augmented Reality SDK</a></li> <li><a href="/wiki/WebXR" title="WebXR">WebXR</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Games</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/List_of_HTC_Vive_games" title="List of HTC Vive games">List of HTC Vive games</a></li> <li><a href="/wiki/List_of_Meta_Quest_games" title="List of Meta Quest games">List of Meta Quest games</a></li> <li><a href="/wiki/List_of_Oculus_Rift_games" title="List of Oculus Rift games">List of Oculus Rift games</a></li> <li><a href="/wiki/List_of_PlayStation_VR_games" title="List of PlayStation VR games">List of PlayStation VR games</a> <ul><li><a href="/wiki/List_of_PlayStation_VR2_games" title="List of PlayStation VR2 games">VR2</a></li></ul></li></ul> </div></td></tr></tbody></table><div></div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">People</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Philippe_Bekaert" title="Philippe Bekaert">Philippe Bekaert</a></li> <li><a href="/wiki/Maurice_Benayoun" title="Maurice Benayoun">Maurice Benayoun</a></li> <li><a href="/wiki/Simon_Birrell" title="Simon Birrell">Simon Birrell</a></li> <li><a href="/wiki/Mark_Bolas" title="Mark Bolas">Mark Bolas</a></li> <li><a href="/wiki/Brendan_Bradley_(actor)" title="Brendan Bradley (actor)">Brendan Bradley</a></li> <li><a href="/wiki/Fred_Brooks" title="Fred Brooks">Fred Brooks</a></li> <li><a href="/wiki/Bo%C5%A1tjan_Burger" title="Boštjan Burger">Boštjan Burger</a></li> <li><a href="/wiki/John_Carmack" title="John Carmack">John Carmack</a></li> <li><a href="/wiki/Tim_Cook" title="Tim Cook">Tim Cook</a></li> <li><a href="/wiki/Carolina_Cruz-Neira" title="Carolina Cruz-Neira">Carolina Cruz-Neira</a></li> <li><a href="/wiki/Ela_Darling" title="Ela Darling">Ela Darling</a></li> <li><a href="/wiki/Char_Davies" title="Char Davies">Char Davies</a></li> <li><a href="/wiki/Thomas_A._DeFanti" title="Thomas A. DeFanti">Thomas A. DeFanti</a></li> <li><a href="/wiki/Steven_K._Feiner" title="Steven K. Feiner">Steven K. Feiner</a></li> <li><a href="/wiki/Scott_Fisher_(technologist)" title="Scott Fisher (technologist)">Scott Fisher</a></li> <li><a href="/wiki/Jerome_Foster_II" title="Jerome Foster II">Jerome Foster II</a></li> <li><a href="/wiki/Henry_Fuchs" title="Henry Fuchs">Henry Fuchs</a></li> <li><a href="/wiki/Thomas_A._Furness_III" title="Thomas A. Furness III">Thomas A. Furness III</a></li> <li><a href="/wiki/Morton_Heilig" title="Morton Heilig">Morton Heilig</a></li> <li><a href="/wiki/Eric_Howlett" title="Eric Howlett">Eric Howlett</a></li> <li><a href="/wiki/Brendan_Iribe" title="Brendan Iribe">Brendan Iribe</a></li> <li><a href="/wiki/Myron_W._Krueger" title="Myron W. Krueger">Myron W. Krueger</a></li> <li><a href="/wiki/Jaron_Lanier" title="Jaron Lanier">Jaron Lanier</a></li> <li><a href="/wiki/Brenda_Laurel" title="Brenda Laurel">Brenda Laurel</a></li> <li><a href="/wiki/Ming_C._Lin" title="Ming C. Lin">Ming C. Lin</a></li> <li><a href="/wiki/R._Bowen_Loftin" title="R. Bowen Loftin">R. Bowen Loftin</a></li> <li><a href="/wiki/Palmer_Luckey" title="Palmer Luckey">Palmer Luckey</a></li> <li><a href="/wiki/Mark_Stephen_Meadows" title="Mark Stephen Meadows">Mark Stephen Meadows</a></li> <li><a href="/wiki/Michelle_and_Uri_Kranot" title="Michelle and Uri Kranot">Michelle and Uri Kranot</a></li> <li><a href="/wiki/Tony_Parisi_(software_developer)" title="Tony Parisi (software developer)">Tony Parisi</a></li> <li><a href="/wiki/Nonny_de_la_Pe%C3%B1a" title="Nonny de la Peña">Nonny de la Peña</a></li> <li><a href="/wiki/Elia_Petridis" title="Elia Petridis">Elia Petridis</a></li> <li><a href="/wiki/Warren_Robinett" title="Warren Robinett">Warren Robinett</a></li> <li><a href="/wiki/Lawrence_J._Rosenblum" title="Lawrence J. Rosenblum">Lawrence J. Rosenblum</a></li> <li><a href="/wiki/Daniel_J._Sandin" title="Daniel J. Sandin">Daniel J. Sandin</a></li> <li><a href="/wiki/Dieter_Schmalstieg" title="Dieter Schmalstieg">Dieter Schmalstieg</a></li> <li><a href="/wiki/Bob_Sproull" title="Bob Sproull">Bob Sproull</a></li> <li><a href="/wiki/Nicole_Stenger" title="Nicole Stenger">Nicole Stenger</a></li> <li><a href="/wiki/Ivan_Sutherland" title="Ivan Sutherland">Ivan Sutherland</a></li> <li><a href="/wiki/Susumu_Tachi" title="Susumu Tachi">Susumu Tachi</a></li> <li><a href="/wiki/Gunpei_Yokoi" title="Gunpei Yokoi">Gunpei Yokoi</a></li> <li><a href="/wiki/Mark_Zuckerberg" title="Mark Zuckerberg">Mark Zuckerberg</a></li></ul> </div></td></tr></tbody></table></div> <!-- NewPP limit report Parsed by mw‐web.eqiad.main‐5dc468848‐x24nb Cached time: 20241122142309 Cache expiry: 2592000 Reduced expiry: false Complications: [vary‐revision‐sha1, show‐toc] CPU time usage: 1.213 seconds Real time usage: 1.413 seconds Preprocessor visited node count: 7834/1000000 Post‐expand include size: 312934/2097152 bytes Template argument size: 4493/2097152 bytes Highest expansion depth: 14/100 Expensive parser function count: 4/500 Unstrip recursion depth: 1/20 Unstrip post‐expand size: 415122/5000000 bytes Lua time usage: 0.746/10.000 seconds Lua memory usage: 7773359/52428800 bytes Number of Wikibase entities loaded: 0/400 --> <!-- Transclusion expansion time report (%,ms,calls,template) 100.00% 1180.937 1 -total 55.93% 660.486 1 Template:Reflist 31.41% 370.984 69 Template:Cite_journal 11.71% 138.304 14 Template:Cite_book 9.06% 106.935 8 Template:Navbox 7.99% 94.415 1 Template:Harvtxt 7.63% 90.095 1 Template:Autonomous_cars_and_enabling_technologies 6.54% 77.235 1 Template:Short_description 4.93% 58.187 14 Template:Cite_web 3.66% 43.210 1 Template:Commons_category --> <!-- Saved in parser cache with key enwiki:pcache:idhash:1543423-0!canonical and timestamp 20241122142309 and revision id 1256379581. Rendering was triggered because: page-view --> </div><!--esi <esi:include src="/esitest-fa8a495983347898/content" /> --><noscript><img src="https://login.wikimedia.org/wiki/Special:CentralAutoLogin/start?type=1x1" alt="" width="1" height="1" style="border: none; position: absolute;"></noscript> <div class="printfooter" data-nosnippet="">Retrieved from "<a dir="ltr" href="https://en.wikipedia.org/w/index.php?title=Eye_tracking&oldid=1256379581">https://en.wikipedia.org/w/index.php?title=Eye_tracking&oldid=1256379581</a>"</div></div> <div id="catlinks" class="catlinks" data-mw="interface"><div id="mw-normal-catlinks" class="mw-normal-catlinks"><a href="/wiki/Help:Category" title="Help:Category">Categories</a>: <ul><li><a href="/wiki/Category:Attention" title="Category:Attention">Attention</a></li><li><a href="/wiki/Category:Cognitive_science" title="Category:Cognitive science">Cognitive science</a></li><li><a href="/wiki/Category:Human_eye" title="Category:Human eye">Human eye</a></li><li><a href="/wiki/Category:History_of_human%E2%80%93computer_interaction" title="Category:History of human–computer interaction">History of human–computer interaction</a></li><li><a href="/wiki/Category:Market_research" title="Category:Market research">Market research</a></li><li><a href="/wiki/Category:Multimodal_interaction" title="Category:Multimodal interaction">Multimodal interaction</a></li><li><a href="/wiki/Category:Promotion_and_marketing_communications" title="Category:Promotion and marketing communications">Promotion and marketing communications</a></li><li><a href="/wiki/Category:Usability" title="Category:Usability">Usability</a></li><li><a href="/wiki/Category:Vision" title="Category:Vision">Vision</a></li><li><a href="/wiki/Category:Web_design" title="Category:Web design">Web design</a></li><li><a href="/wiki/Category:Applications_of_computer_vision" title="Category:Applications of computer vision">Applications of computer vision</a></li><li><a href="/wiki/Category:Virtual_reality" title="Category:Virtual reality">Virtual reality</a></li></ul></div><div id="mw-hidden-catlinks" class="mw-hidden-catlinks mw-hidden-cats-hidden">Hidden categories: <ul><li><a href="/wiki/Category:Webarchive_template_wayback_links" title="Category:Webarchive template wayback links">Webarchive template wayback links</a></li><li><a href="/wiki/Category:CS1_Swedish-language_sources_(sv)" title="Category:CS1 Swedish-language sources (sv)">CS1 Swedish-language sources (sv)</a></li><li><a href="/wiki/Category:CS1_Polish-language_sources_(pl)" title="Category:CS1 Polish-language sources (pl)">CS1 Polish-language sources (pl)</a></li><li><a href="/wiki/Category:CS1_German-language_sources_(de)" title="Category:CS1 German-language sources (de)">CS1 German-language sources (de)</a></li><li><a href="/wiki/Category:Articles_with_short_description" title="Category:Articles with short description">Articles with short description</a></li><li><a href="/wiki/Category:Short_description_matches_Wikidata" title="Category:Short description matches Wikidata">Short description matches Wikidata</a></li><li><a href="/wiki/Category:Use_dmy_dates_from_August_2020" title="Category:Use dmy dates from August 2020">Use dmy dates from August 2020</a></li><li><a href="/wiki/Category:Commons_category_link_is_on_Wikidata" title="Category:Commons category link is on Wikidata">Commons category link is on Wikidata</a></li><li><a href="/wiki/Category:Articles_containing_video_clips" title="Category:Articles containing video clips">Articles containing video clips</a></li></ul></div></div> </div> </main> </div> <div class="mw-footer-container"> <footer id="footer" class="mw-footer" > <ul id="footer-info"> <li id="footer-info-lastmod"> This page was last edited on 9 November 2024, at 17:37<span class="anonymous-show"> (UTC)</span>.</li> <li id="footer-info-copyright">Text is available under the <a href="/wiki/Wikipedia:Text_of_the_Creative_Commons_Attribution-ShareAlike_4.0_International_License" title="Wikipedia:Text of the Creative Commons Attribution-ShareAlike 4.0 International License">Creative Commons Attribution-ShareAlike 4.0 License</a>; additional terms may apply. By using this site, you agree to the <a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Terms_of_Use" class="extiw" title="foundation:Special:MyLanguage/Policy:Terms of Use">Terms of Use</a> and <a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Privacy_policy" class="extiw" title="foundation:Special:MyLanguage/Policy:Privacy policy">Privacy Policy</a>. Wikipedia® is a registered trademark of the <a rel="nofollow" class="external text" href="https://wikimediafoundation.org/">Wikimedia Foundation, Inc.</a>, a non-profit organization.</li> </ul> <ul id="footer-places"> <li id="footer-places-privacy"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Privacy_policy">Privacy policy</a></li> <li id="footer-places-about"><a href="/wiki/Wikipedia:About">About Wikipedia</a></li> <li id="footer-places-disclaimers"><a href="/wiki/Wikipedia:General_disclaimer">Disclaimers</a></li> <li id="footer-places-contact"><a href="//en.wikipedia.org/wiki/Wikipedia:Contact_us">Contact Wikipedia</a></li> <li id="footer-places-wm-codeofconduct"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Universal_Code_of_Conduct">Code of Conduct</a></li> <li id="footer-places-developers"><a href="https://developer.wikimedia.org">Developers</a></li> <li id="footer-places-statslink"><a href="https://stats.wikimedia.org/#/en.wikipedia.org">Statistics</a></li> <li id="footer-places-cookiestatement"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Cookie_statement">Cookie statement</a></li> <li id="footer-places-mobileview"><a href="//en.m.wikipedia.org/w/index.php?title=Eye_tracking&mobileaction=toggle_view_mobile" class="noprint stopMobileRedirectToggle">Mobile view</a></li> </ul> <ul id="footer-icons" class="noprint"> <li id="footer-copyrightico"><a href="https://wikimediafoundation.org/" class="cdx-button cdx-button--fake-button cdx-button--size-large cdx-button--fake-button--enabled"><img src="/static/images/footer/wikimedia-button.svg" width="84" height="29" alt="Wikimedia Foundation" loading="lazy"></a></li> <li id="footer-poweredbyico"><a href="https://www.mediawiki.org/" class="cdx-button cdx-button--fake-button cdx-button--size-large cdx-button--fake-button--enabled"><img src="/w/resources/assets/poweredby_mediawiki.svg" alt="Powered by MediaWiki" width="88" height="31" loading="lazy"></a></li> </ul> </footer> </div> </div> </div> <div class="vector-settings" id="p-dock-bottom"> <ul></ul> </div><script>(RLQ=window.RLQ||[]).push(function(){mw.config.set({"wgHostname":"mw-web.codfw.main-f69cdc8f6-g2dn4","wgBackendResponseTime":144,"wgPageParseReport":{"limitreport":{"cputime":"1.213","walltime":"1.413","ppvisitednodes":{"value":7834,"limit":1000000},"postexpandincludesize":{"value":312934,"limit":2097152},"templateargumentsize":{"value":4493,"limit":2097152},"expansiondepth":{"value":14,"limit":100},"expensivefunctioncount":{"value":4,"limit":500},"unstrip-depth":{"value":1,"limit":20},"unstrip-size":{"value":415122,"limit":5000000},"entityaccesscount":{"value":0,"limit":400},"timingprofile":["100.00% 1180.937 1 -total"," 55.93% 660.486 1 Template:Reflist"," 31.41% 370.984 69 Template:Cite_journal"," 11.71% 138.304 14 Template:Cite_book"," 9.06% 106.935 8 Template:Navbox"," 7.99% 94.415 1 Template:Harvtxt"," 7.63% 90.095 1 Template:Autonomous_cars_and_enabling_technologies"," 6.54% 77.235 1 Template:Short_description"," 4.93% 58.187 14 Template:Cite_web"," 3.66% 43.210 1 Template:Commons_category"]},"scribunto":{"limitreport-timeusage":{"value":"0.746","limit":"10.000"},"limitreport-memusage":{"value":7773359,"limit":52428800},"limitreport-logs":"anchor_id_list = table#1 {\n [\"CITEREFAharonsonCoopooGovenderPostema2020\"] = 1,\n [\"CITEREFAlexanderMacknikMartinez-Conde2020\"] = 1,\n [\"CITEREFAlexanderWaiteMacknikMartinez-Conde2020\"] = 1,\n [\"CITEREFAndersenBracewellBarashGnadt1990\"] = 1,\n [\"CITEREFBLISCHAKLOMBARDINODYSON2003\"] = 1,\n [\"CITEREFBabuD_VPrabhakarSaluja2019\"] = 1,\n [\"CITEREFBeitlovaPopelkaVozenilek2020\"] = 1,\n [\"CITEREFBlain2021\"] = 1,\n [\"CITEREFBrychtovaColtekin2016\"] = 1,\n [\"CITEREFBullingRoggen,_D.Tröster,_G.2009\"] = 1,\n [\"CITEREFBurianPopelkaBeitlova2018\"] = 1,\n [\"CITEREFBuswell1922\"] = 1,\n [\"CITEREFBuswell1935\"] = 1,\n [\"CITEREFBuswell1937\"] = 1,\n [\"CITEREFCalhounJanson1991\"] = 1,\n [\"CITEREFCognolatoAtzoriMüller2018\"] = 1,\n [\"CITEREFCornoFarinettiSignorile2002\"] = 1,\n [\"CITEREFCornsweet1958\"] = 1,\n [\"CITEREFCornsweetCrane1973\"] = 1,\n [\"CITEREFCraneSteele,_C.M.1985\"] = 1,\n [\"CITEREFCybulski2020\"] = 1,\n [\"CITEREFDVMurthySalujaBiswas2018\"] = 1,\n [\"CITEREFDeubel1996\"] = 1,\n [\"CITEREFDickson2017\"] = 1,\n [\"CITEREFDolezalovaPopelka2016\"] = 1,\n [\"CITEREFDuchowski2002\"] = 1,\n [\"CITEREFDuchowskiBieleNiedzielskaKrejtz2018\"] = 1,\n [\"CITEREFEidGiakoumidisEl_Saddik2016\"] = 1,\n [\"CITEREFEinhäuserSchumannBardinsBartl2007\"] = 1,\n [\"CITEREFFerhatVilariño2016\"] = 1,\n [\"CITEREFFittsJonesMilton1950\"] = 1,\n [\"CITEREFGalanteMenezes2012\"] = 1,\n [\"CITEREFGneoSchmidConfortoD’Alessio2012\"] = 1,\n [\"CITEREFGrünerAnsorge2017\"] = 1,\n [\"CITEREFGöbelKieferRaubal2019\"] = 1,\n [\"CITEREFHermanPopelkaHejlova2017\"] = 1,\n [\"CITEREFHoffman2016\"] = 1,\n [\"CITEREFHolsanova2007\"] = 1,\n [\"CITEREFHuaKrishnaswamyRolland2006\"] = 1,\n [\"CITEREFHuey1908/1968\"] = 1,\n [\"CITEREFHunziker2006\"] = 1,\n [\"CITEREFItohFukuda2002\"] = 1,\n [\"CITEREFJeevithashreeSalujaBiswas2019\"] = 1,\n [\"CITEREFJonesObregónKellyBranigan2008\"] = 1,\n [\"CITEREFJustCarpenter1980\"] = 1,\n [\"CITEREFKerenYuval-GreenbergDeouell2010\"] = 1,\n [\"CITEREFKrassanakisCybulski2021\"] = 1,\n [\"CITEREFKrassanakisFilippakopoulouNakos2014\"] = 1,\n [\"CITEREFKrögerLutzMüller2020\"] = 1,\n [\"CITEREFLe_MeurBaccino2013\"] = 1,\n [\"CITEREFLohseWu2001\"] = 1,\n [\"CITEREFLouedecGuntzCrowleyVaufreydaz2019\"] = 1,\n [\"CITEREFMansonKneDykeShannon2012\"] = 1,\n [\"CITEREFMarshall2002\"] = 1,\n [\"CITEREFMcGuire2021\"] = 1,\n [\"CITEREFMondragon,_Christopher_K.Bleacher,_Brett2015\"] = 1,\n [\"CITEREFNadu2015\"] = 1,\n [\"CITEREFNielsenPernice2010\"] = 1,\n [\"CITEREFNückles2021\"] = 1,\n [\"CITEREFOomsDe_MaeyerFack2013\"] = 1,\n [\"CITEREFPeißlWickensBaruah2018\"] = 1,\n [\"CITEREFPetruselMendlingReijers2017\"] = 1,\n [\"CITEREFPeysakhovichLefrançoisDehaisCausse2018\"] = 1,\n [\"CITEREFPietersWedel2004\"] = 1,\n [\"CITEREFPietersWedel2007\"] = 1,\n [\"CITEREFPinheiroNavesPinoLesson2011\"] = 1,\n [\"CITEREFPoitschkeLaquaiStambolievRigoll2011\"] = 1,\n [\"CITEREFPopelkaBrychtova2013\"] = 1,\n [\"CITEREFPopelkaVondrakovaHujnakova2019\"] = 1,\n [\"CITEREFPosner1980\"] = 1,\n [\"CITEREFPrabhakarMukhopadhyayMurthyModiksha2020\"] = 1,\n [\"CITEREFPrabhakarRamakrishnanMurthySharma2020\"] = 1,\n [\"CITEREFPurves2001\"] = 1,\n [\"CITEREFRayner1978\"] = 1,\n [\"CITEREFRayner1998\"] = 1,\n [\"CITEREFReddy2019\"] = 1,\n [\"CITEREFRobert_J._K._JacobKeith_S._Karn2003\"] = 1,\n [\"CITEREFRobinson1963\"] = 1,\n [\"CITEREFRogers\"] = 1,\n [\"CITEREFRogers2019\"] = 1,\n [\"CITEREFSaundersSmagnerSaunders2003\"] = 1,\n [\"CITEREFSchiesslDudaThölkeFischer\"] = 1,\n [\"CITEREFSharmaMurthySingh_SalujaMollyn2020\"] = 1,\n [\"CITEREFSigutSidha2011\"] = 1,\n [\"CITEREFStanley2013\"] = 1,\n [\"CITEREFStein2020\"] = 1,\n [\"CITEREFStemberCelikKrupinskiChang2019\"] = 1,\n [\"CITEREFSultanPopelkaStrobl2022\"] = 1,\n [\"CITEREFSözenSayKılıç2020\"] = 1,\n [\"CITEREFVojtechovskaPopelka2023\"] = 1,\n [\"CITEREFWilkinsonMitchell2014\"] = 1,\n [\"CITEREFWitzner_HansenQiang_Ji2010\"] = 1,\n [\"CITEREFWrightWard2008\"] = 1,\n [\"CITEREFYarbus1967\"] = 1,\n [\"CITEREFZhaoWangZhangQi2017\"] = 1,\n [\"CITEREFde_ReusZonOuwerkerk2012\"] = 1,\n [\"CITEREFvon_Au2021\"] = 1,\n}\ntemplate_list = table#1 {\n [\"About\"] = 1,\n [\"Autonomous cars and enabling technologies\"] = 1,\n [\"Citation\"] = 1,\n [\"Cite book\"] = 14,\n [\"Cite journal\"] = 69,\n [\"Cite patent\"] = 1,\n [\"Cite periodical\"] = 2,\n [\"Cite web\"] = 14,\n [\"Commons category\"] = 1,\n [\"DEFAULTSORT:Eye Tracking\"] = 1,\n [\"Div col\"] = 1,\n [\"Div col end\"] = 1,\n [\"Extended reality\"] = 1,\n [\"Harvid\"] = 1,\n [\"Harvnb\"] = 9,\n [\"Harvtxt\"] = 1,\n [\"ISBN\"] = 2,\n [\"Quote\"] = 4,\n [\"Reflist\"] = 1,\n [\"Scholia\"] = 1,\n [\"See also\"] = 1,\n [\"Short description\"] = 1,\n [\"Use dmy dates\"] = 1,\n [\"Webarchive\"] = 1,\n}\narticle_whitelist = table#1 {\n}\n"},"cachereport":{"origin":"mw-web.eqiad.main-5dc468848-x24nb","timestamp":"20241122142309","ttl":2592000,"transientcontent":false}}});});</script> <script type="application/ld+json">{"@context":"https:\/\/schema.org","@type":"Article","name":"Eye tracking","url":"https:\/\/en.wikipedia.org\/wiki\/Eye_tracking","sameAs":"http:\/\/www.wikidata.org\/entity\/Q970687","mainEntity":"http:\/\/www.wikidata.org\/entity\/Q970687","author":{"@type":"Organization","name":"Contributors to Wikimedia projects"},"publisher":{"@type":"Organization","name":"Wikimedia Foundation, Inc.","logo":{"@type":"ImageObject","url":"https:\/\/www.wikimedia.org\/static\/images\/wmf-hor-googpub.png"}},"datePublished":"2005-02-25T05:57:40Z","dateModified":"2024-11-09T17:37:39Z","image":"https:\/\/upload.wikimedia.org\/wikipedia\/commons\/0\/0f\/Eyetracker1.jpg","headline":"measuring the point of gaze or motion of an eye relative to the head"}</script> </body> </html>