CINXE.COM

Emotion recognition - Wikipedia

<!DOCTYPE html> <html class="client-nojs vector-feature-language-in-header-enabled vector-feature-language-in-main-page-header-disabled vector-feature-page-tools-pinned-disabled vector-feature-toc-pinned-clientpref-1 vector-feature-main-menu-pinned-disabled vector-feature-limited-width-clientpref-1 vector-feature-limited-width-content-enabled vector-feature-custom-font-size-clientpref-1 vector-feature-appearance-pinned-clientpref-1 vector-feature-night-mode-enabled skin-theme-clientpref-day vector-sticky-header-enabled vector-toc-available" lang="en" dir="ltr"> <head> <meta charset="UTF-8"> <title>Emotion recognition - Wikipedia</title> <script>(function(){var className="client-js vector-feature-language-in-header-enabled vector-feature-language-in-main-page-header-disabled vector-feature-page-tools-pinned-disabled vector-feature-toc-pinned-clientpref-1 vector-feature-main-menu-pinned-disabled vector-feature-limited-width-clientpref-1 vector-feature-limited-width-content-enabled vector-feature-custom-font-size-clientpref-1 vector-feature-appearance-pinned-clientpref-1 vector-feature-night-mode-enabled skin-theme-clientpref-day vector-sticky-header-enabled vector-toc-available";var cookie=document.cookie.match(/(?:^|; )enwikimwclientpreferences=([^;]+)/);if(cookie){cookie[1].split('%2C').forEach(function(pref){className=className.replace(new RegExp('(^| )'+pref.replace(/-clientpref-\w+$|[^\w-]+/g,'')+'-clientpref-\\w+( |$)'),'$1'+pref+'$2');});}document.documentElement.className=className;}());RLCONF={"wgBreakFrames":false,"wgSeparatorTransformTable":["",""],"wgDigitTransformTable":["",""],"wgDefaultDateFormat":"dmy","wgMonthNames":["","January","February","March","April","May","June","July","August","September","October","November","December"],"wgRequestId":"e1b2fd2e-5fb4-4a6d-a33f-e0911741b20e","wgCanonicalNamespace":"","wgCanonicalSpecialPageName":false,"wgNamespaceNumber":0,"wgPageName":"Emotion_recognition","wgTitle":"Emotion recognition","wgCurRevisionId":1277684962,"wgRevisionId":1277684962,"wgArticleId":48198256,"wgIsArticle":true,"wgIsRedirect":false,"wgAction":"view","wgUserName":null,"wgUserGroups":["*"],"wgCategories":["CS1: long volume value","Articles with short description","Short description is different from Wikidata","Use dmy dates from August 2016","All articles with unsourced statements","Articles with unsourced statements from September 2019","Webarchive template wayback links","Articles with unsourced statements from February 2020","Articles containing Welsh-language text","Articles containing Danish-language text","Articles containing Japanese-language text","Articles containing Portuguese-language text","Articles containing German-language text","Articles with excerpts","Emotion","Applications of artificial intelligence","Affective computing"],"wgPageViewLanguage":"en","wgPageContentLanguage":"en","wgPageContentModel":"wikitext","wgRelevantPageName":"Emotion_recognition","wgRelevantArticleId":48198256,"wgIsProbablyEditable":true,"wgRelevantPageIsProbablyEditable":true,"wgRestrictionEdit":[],"wgRestrictionMove":[],"wgNoticeProject":"wikipedia","wgCiteReferencePreviewsActive":false,"wgFlaggedRevsParams":{"tags":{"status":{"levels":1}}},"wgMediaViewerOnClick":true,"wgMediaViewerEnabledByDefault":true,"wgPopupsFlags":0,"wgVisualEditor":{"pageLanguageCode":"en","pageLanguageDir":"ltr","pageVariantFallbacks":"en"},"wgMFDisplayWikibaseDescriptions":{"search":true,"watchlist":true,"tagline":false,"nearby":true},"wgWMESchemaEditAttemptStepOversample":false,"wgWMEPageLength":30000,"wgEditSubmitButtonLabelPublish":true,"wgULSPosition":"interlanguage","wgULSisCompactLinksEnabled":false,"wgVector2022LanguageInHeader":true,"wgULSisLanguageSelectorEmpty":false,"wgWikibaseItemId":"Q1339090","wgCheckUserClientHintsHeadersJsApi":["brands","architecture","bitness","fullVersionList","mobile","model","platform","platformVersion"],"GEHomepageSuggestedEditsEnableTopics":true,"wgGETopicsMatchModeEnabled":false,"wgGELevelingUpEnabledForUser":false}; RLSTATE={"ext.globalCssJs.user.styles":"ready","site.styles":"ready","user.styles":"ready","ext.globalCssJs.user":"ready","user":"ready","user.options":"loading","ext.cite.styles":"ready","skins.vector.search.codex.styles":"ready","skins.vector.styles":"ready","skins.vector.icons":"ready","jquery.makeCollapsible.styles":"ready","ext.wikimediamessages.styles":"ready","ext.visualEditor.desktopArticleTarget.noscript":"ready","ext.uls.interlanguage":"ready","wikibase.client.init":"ready"};RLPAGEMODULES=["ext.cite.ux-enhancements","site","mediawiki.page.ready","jquery.makeCollapsible","mediawiki.toc","skins.vector.js","ext.centralNotice.geoIP","ext.centralNotice.startUp","ext.gadget.ReferenceTooltips","ext.gadget.switcher","ext.urlShortener.toolbar","ext.centralauth.centralautologin","mmv.bootstrap","ext.popups","ext.visualEditor.desktopArticleTarget.init","ext.visualEditor.targetLoader","ext.echo.centralauth","ext.eventLogging","ext.wikimediaEvents","ext.navigationTiming","ext.uls.interface","ext.cx.eventlogging.campaigns","ext.cx.uls.quick.actions","wikibase.client.vector-2022","ext.checkUser.clientHints","ext.growthExperiments.SuggestedEditSession"];</script> <script>(RLQ=window.RLQ||[]).push(function(){mw.loader.impl(function(){return["user.options@12s5i",function($,jQuery,require,module){mw.user.tokens.set({"patrolToken":"+\\","watchToken":"+\\","csrfToken":"+\\"}); }];});});</script> <link rel="stylesheet" href="/w/load.php?lang=en&amp;modules=ext.cite.styles%7Cext.uls.interlanguage%7Cext.visualEditor.desktopArticleTarget.noscript%7Cext.wikimediamessages.styles%7Cjquery.makeCollapsible.styles%7Cskins.vector.icons%2Cstyles%7Cskins.vector.search.codex.styles%7Cwikibase.client.init&amp;only=styles&amp;skin=vector-2022"> <script async="" src="/w/load.php?lang=en&amp;modules=startup&amp;only=scripts&amp;raw=1&amp;skin=vector-2022"></script> <meta name="ResourceLoaderDynamicStyles" content=""> <link rel="stylesheet" href="/w/load.php?lang=en&amp;modules=site.styles&amp;only=styles&amp;skin=vector-2022"> <meta name="generator" content="MediaWiki 1.44.0-wmf.21"> <meta name="referrer" content="origin"> <meta name="referrer" content="origin-when-cross-origin"> <meta name="robots" content="max-image-preview:standard"> <meta name="format-detection" content="telephone=no"> <meta name="viewport" content="width=1120"> <meta property="og:title" content="Emotion recognition - Wikipedia"> <meta property="og:type" content="website"> <link rel="preconnect" href="//upload.wikimedia.org"> <link rel="alternate" media="only screen and (max-width: 640px)" href="//en.m.wikipedia.org/wiki/Emotion_recognition"> <link rel="alternate" type="application/x-wiki" title="Edit this page" href="/w/index.php?title=Emotion_recognition&amp;action=edit"> <link rel="apple-touch-icon" href="/static/apple-touch/wikipedia.png"> <link rel="icon" href="/static/favicon/wikipedia.ico"> <link rel="search" type="application/opensearchdescription+xml" href="/w/rest.php/v1/search" title="Wikipedia (en)"> <link rel="EditURI" type="application/rsd+xml" href="//en.wikipedia.org/w/api.php?action=rsd"> <link rel="canonical" href="https://en.wikipedia.org/wiki/Emotion_recognition"> <link rel="license" href="https://creativecommons.org/licenses/by-sa/4.0/deed.en"> <link rel="alternate" type="application/atom+xml" title="Wikipedia Atom feed" href="/w/index.php?title=Special:RecentChanges&amp;feed=atom"> <link rel="dns-prefetch" href="//meta.wikimedia.org" /> <link rel="dns-prefetch" href="login.wikimedia.org"> </head> <body class="skin--responsive skin-vector skin-vector-search-vue mediawiki ltr sitedir-ltr mw-hide-empty-elt ns-0 ns-subject mw-editable page-Emotion_recognition rootpage-Emotion_recognition skin-vector-2022 action-view"><a class="mw-jump-link" href="#bodyContent">Jump to content</a> <div class="vector-header-container"> <header class="vector-header mw-header"> <div class="vector-header-start"> <nav class="vector-main-menu-landmark" aria-label="Site"> <div id="vector-main-menu-dropdown" class="vector-dropdown vector-main-menu-dropdown vector-button-flush-left vector-button-flush-right" title="Main menu" > <input type="checkbox" id="vector-main-menu-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-main-menu-dropdown" class="vector-dropdown-checkbox " aria-label="Main menu" > <label id="vector-main-menu-dropdown-label" for="vector-main-menu-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-menu mw-ui-icon-wikimedia-menu"></span> <span class="vector-dropdown-label-text">Main menu</span> </label> <div class="vector-dropdown-content"> <div id="vector-main-menu-unpinned-container" class="vector-unpinned-container"> <div id="vector-main-menu" class="vector-main-menu vector-pinnable-element"> <div class="vector-pinnable-header vector-main-menu-pinnable-header vector-pinnable-header-unpinned" data-feature-name="main-menu-pinned" data-pinnable-element-id="vector-main-menu" data-pinned-container-id="vector-main-menu-pinned-container" data-unpinned-container-id="vector-main-menu-unpinned-container" > <div class="vector-pinnable-header-label">Main menu</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-main-menu.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-main-menu.unpin">hide</button> </div> <div id="p-navigation" class="vector-menu mw-portlet mw-portlet-navigation" > <div class="vector-menu-heading"> Navigation </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="n-mainpage-description" class="mw-list-item"><a href="/wiki/Main_Page" title="Visit the main page [z]" accesskey="z"><span>Main page</span></a></li><li id="n-contents" class="mw-list-item"><a href="/wiki/Wikipedia:Contents" title="Guides to browsing Wikipedia"><span>Contents</span></a></li><li id="n-currentevents" class="mw-list-item"><a href="/wiki/Portal:Current_events" title="Articles related to current events"><span>Current events</span></a></li><li id="n-randompage" class="mw-list-item"><a href="/wiki/Special:Random" title="Visit a randomly selected article [x]" accesskey="x"><span>Random article</span></a></li><li id="n-aboutsite" class="mw-list-item"><a href="/wiki/Wikipedia:About" title="Learn about Wikipedia and how it works"><span>About Wikipedia</span></a></li><li id="n-contactpage" class="mw-list-item"><a href="//en.wikipedia.org/wiki/Wikipedia:Contact_us" title="How to contact Wikipedia"><span>Contact us</span></a></li> </ul> </div> </div> <div id="p-interaction" class="vector-menu mw-portlet mw-portlet-interaction" > <div class="vector-menu-heading"> Contribute </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="n-help" class="mw-list-item"><a href="/wiki/Help:Contents" title="Guidance on how to use and edit Wikipedia"><span>Help</span></a></li><li id="n-introduction" class="mw-list-item"><a href="/wiki/Help:Introduction" title="Learn how to edit Wikipedia"><span>Learn to edit</span></a></li><li id="n-portal" class="mw-list-item"><a href="/wiki/Wikipedia:Community_portal" title="The hub for editors"><span>Community portal</span></a></li><li id="n-recentchanges" class="mw-list-item"><a href="/wiki/Special:RecentChanges" title="A list of recent changes to Wikipedia [r]" accesskey="r"><span>Recent changes</span></a></li><li id="n-upload" class="mw-list-item"><a href="/wiki/Wikipedia:File_upload_wizard" title="Add images or other media for use on Wikipedia"><span>Upload file</span></a></li><li id="n-specialpages" class="mw-list-item"><a href="/wiki/Special:SpecialPages"><span>Special pages</span></a></li> </ul> </div> </div> </div> </div> </div> </div> </nav> <a href="/wiki/Main_Page" class="mw-logo"> <img class="mw-logo-icon" src="/static/images/icons/wikipedia.png" alt="" aria-hidden="true" height="50" width="50"> <span class="mw-logo-container skin-invert"> <img class="mw-logo-wordmark" alt="Wikipedia" src="/static/images/mobile/copyright/wikipedia-wordmark-en.svg" style="width: 7.5em; height: 1.125em;"> <img class="mw-logo-tagline" alt="The Free Encyclopedia" src="/static/images/mobile/copyright/wikipedia-tagline-en.svg" width="117" height="13" style="width: 7.3125em; height: 0.8125em;"> </span> </a> </div> <div class="vector-header-end"> <div id="p-search" role="search" class="vector-search-box-vue vector-search-box-collapses vector-search-box-show-thumbnail vector-search-box-auto-expand-width vector-search-box"> <a href="/wiki/Special:Search" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only search-toggle" title="Search Wikipedia [f]" accesskey="f"><span class="vector-icon mw-ui-icon-search mw-ui-icon-wikimedia-search"></span> <span>Search</span> </a> <div class="vector-typeahead-search-container"> <div class="cdx-typeahead-search cdx-typeahead-search--show-thumbnail cdx-typeahead-search--auto-expand-width"> <form action="/w/index.php" id="searchform" class="cdx-search-input cdx-search-input--has-end-button"> <div id="simpleSearch" class="cdx-search-input__input-wrapper" data-search-loc="header-moved"> <div class="cdx-text-input cdx-text-input--has-start-icon"> <input class="cdx-text-input__input" type="search" name="search" placeholder="Search Wikipedia" aria-label="Search Wikipedia" autocapitalize="sentences" title="Search Wikipedia [f]" accesskey="f" id="searchInput" > <span class="cdx-text-input__icon cdx-text-input__start-icon"></span> </div> <input type="hidden" name="title" value="Special:Search"> </div> <button class="cdx-button cdx-search-input__end-button">Search</button> </form> </div> </div> </div> <nav class="vector-user-links vector-user-links-wide" aria-label="Personal tools"> <div class="vector-user-links-main"> <div id="p-vector-user-menu-preferences" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <div id="p-vector-user-menu-userpage" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <nav class="vector-appearance-landmark" aria-label="Appearance"> <div id="vector-appearance-dropdown" class="vector-dropdown " title="Change the appearance of the page&#039;s font size, width, and color" > <input type="checkbox" id="vector-appearance-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-appearance-dropdown" class="vector-dropdown-checkbox " aria-label="Appearance" > <label id="vector-appearance-dropdown-label" for="vector-appearance-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-appearance mw-ui-icon-wikimedia-appearance"></span> <span class="vector-dropdown-label-text">Appearance</span> </label> <div class="vector-dropdown-content"> <div id="vector-appearance-unpinned-container" class="vector-unpinned-container"> </div> </div> </div> </nav> <div id="p-vector-user-menu-notifications" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <div id="p-vector-user-menu-overflow" class="vector-menu mw-portlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-sitesupport-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="https://donate.wikimedia.org/?wmf_source=donate&amp;wmf_medium=sidebar&amp;wmf_campaign=en.wikipedia.org&amp;uselang=en" class=""><span>Donate</span></a> </li> <li id="pt-createaccount-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="/w/index.php?title=Special:CreateAccount&amp;returnto=Emotion+recognition" title="You are encouraged to create an account and log in; however, it is not mandatory" class=""><span>Create account</span></a> </li> <li id="pt-login-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="/w/index.php?title=Special:UserLogin&amp;returnto=Emotion+recognition" title="You&#039;re encouraged to log in; however, it&#039;s not mandatory. [o]" accesskey="o" class=""><span>Log in</span></a> </li> </ul> </div> </div> </div> <div id="vector-user-links-dropdown" class="vector-dropdown vector-user-menu vector-button-flush-right vector-user-menu-logged-out" title="Log in and more options" > <input type="checkbox" id="vector-user-links-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-user-links-dropdown" class="vector-dropdown-checkbox " aria-label="Personal tools" > <label id="vector-user-links-dropdown-label" for="vector-user-links-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-ellipsis mw-ui-icon-wikimedia-ellipsis"></span> <span class="vector-dropdown-label-text">Personal tools</span> </label> <div class="vector-dropdown-content"> <div id="p-personal" class="vector-menu mw-portlet mw-portlet-personal user-links-collapsible-item" title="User menu" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-sitesupport" class="user-links-collapsible-item mw-list-item"><a href="https://donate.wikimedia.org/?wmf_source=donate&amp;wmf_medium=sidebar&amp;wmf_campaign=en.wikipedia.org&amp;uselang=en"><span>Donate</span></a></li><li id="pt-createaccount" class="user-links-collapsible-item mw-list-item"><a href="/w/index.php?title=Special:CreateAccount&amp;returnto=Emotion+recognition" title="You are encouraged to create an account and log in; however, it is not mandatory"><span class="vector-icon mw-ui-icon-userAdd mw-ui-icon-wikimedia-userAdd"></span> <span>Create account</span></a></li><li id="pt-login" class="user-links-collapsible-item mw-list-item"><a href="/w/index.php?title=Special:UserLogin&amp;returnto=Emotion+recognition" title="You&#039;re encouraged to log in; however, it&#039;s not mandatory. [o]" accesskey="o"><span class="vector-icon mw-ui-icon-logIn mw-ui-icon-wikimedia-logIn"></span> <span>Log in</span></a></li> </ul> </div> </div> <div id="p-user-menu-anon-editor" class="vector-menu mw-portlet mw-portlet-user-menu-anon-editor" > <div class="vector-menu-heading"> Pages for logged out editors <a href="/wiki/Help:Introduction" aria-label="Learn more about editing"><span>learn more</span></a> </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-anoncontribs" class="mw-list-item"><a href="/wiki/Special:MyContributions" title="A list of edits made from this IP address [y]" accesskey="y"><span>Contributions</span></a></li><li id="pt-anontalk" class="mw-list-item"><a href="/wiki/Special:MyTalk" title="Discussion about edits from this IP address [n]" accesskey="n"><span>Talk</span></a></li> </ul> </div> </div> </div> </div> </nav> </div> </header> </div> <div class="mw-page-container"> <div class="mw-page-container-inner"> <div class="vector-sitenotice-container"> <div id="siteNotice"><!-- CentralNotice --></div> </div> <div class="vector-column-start"> <div class="vector-main-menu-container"> <div id="mw-navigation"> <nav id="mw-panel" class="vector-main-menu-landmark" aria-label="Site"> <div id="vector-main-menu-pinned-container" class="vector-pinned-container"> </div> </nav> </div> </div> <div class="vector-sticky-pinned-container"> <nav id="mw-panel-toc" aria-label="Contents" data-event-name="ui.sidebar-toc" class="mw-table-of-contents-container vector-toc-landmark"> <div id="vector-toc-pinned-container" class="vector-pinned-container"> <div id="vector-toc" class="vector-toc vector-pinnable-element"> <div class="vector-pinnable-header vector-toc-pinnable-header vector-pinnable-header-pinned" data-feature-name="toc-pinned" data-pinnable-element-id="vector-toc" > <h2 class="vector-pinnable-header-label">Contents</h2> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-toc.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-toc.unpin">hide</button> </div> <ul class="vector-toc-contents" id="mw-panel-toc-list"> <li id="toc-mw-content-text" class="vector-toc-list-item vector-toc-level-1"> <a href="#" class="vector-toc-link"> <div class="vector-toc-text">(Top)</div> </a> </li> <li id="toc-Human" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Human"> <div class="vector-toc-text"> <span class="vector-toc-numb">1</span> <span>Human</span> </div> </a> <ul id="toc-Human-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Automatic" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Automatic"> <div class="vector-toc-text"> <span class="vector-toc-numb">2</span> <span>Automatic</span> </div> </a> <button aria-controls="toc-Automatic-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle Automatic subsection</span> </button> <ul id="toc-Automatic-sublist" class="vector-toc-list"> <li id="toc-Approaches" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Approaches"> <div class="vector-toc-text"> <span class="vector-toc-numb">2.1</span> <span>Approaches</span> </div> </a> <ul id="toc-Approaches-sublist" class="vector-toc-list"> <li id="toc-Knowledge-based_techniques" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Knowledge-based_techniques"> <div class="vector-toc-text"> <span class="vector-toc-numb">2.1.1</span> <span>Knowledge-based techniques</span> </div> </a> <ul id="toc-Knowledge-based_techniques-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Statistical_methods" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Statistical_methods"> <div class="vector-toc-text"> <span class="vector-toc-numb">2.1.2</span> <span>Statistical methods</span> </div> </a> <ul id="toc-Statistical_methods-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Hybrid_approaches" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Hybrid_approaches"> <div class="vector-toc-text"> <span class="vector-toc-numb">2.1.3</span> <span>Hybrid approaches</span> </div> </a> <ul id="toc-Hybrid_approaches-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Datasets" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Datasets"> <div class="vector-toc-text"> <span class="vector-toc-numb">2.2</span> <span>Datasets</span> </div> </a> <ul id="toc-Datasets-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Applications" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Applications"> <div class="vector-toc-text"> <span class="vector-toc-numb">2.3</span> <span>Applications</span> </div> </a> <ul id="toc-Applications-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Subfields" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Subfields"> <div class="vector-toc-text"> <span class="vector-toc-numb">3</span> <span>Subfields</span> </div> </a> <button aria-controls="toc-Subfields-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle Subfields subsection</span> </button> <ul id="toc-Subfields-sublist" class="vector-toc-list"> <li id="toc-Emotion_recognition_in_text" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Emotion_recognition_in_text"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.1</span> <span>Emotion recognition in text</span> </div> </a> <ul id="toc-Emotion_recognition_in_text-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Emotion_recognition_in_audio" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Emotion_recognition_in_audio"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.2</span> <span>Emotion recognition in audio</span> </div> </a> <ul id="toc-Emotion_recognition_in_audio-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Emotion_recognition_in_video" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Emotion_recognition_in_video"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.3</span> <span>Emotion recognition in video</span> </div> </a> <ul id="toc-Emotion_recognition_in_video-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Emotion_recognition_in_conversation" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Emotion_recognition_in_conversation"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.4</span> <span>Emotion recognition in conversation</span> </div> </a> <ul id="toc-Emotion_recognition_in_conversation-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-See_also" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#See_also"> <div class="vector-toc-text"> <span class="vector-toc-numb">4</span> <span>See also</span> </div> </a> <ul id="toc-See_also-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-References" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#References"> <div class="vector-toc-text"> <span class="vector-toc-numb">5</span> <span>References</span> </div> </a> <ul id="toc-References-sublist" class="vector-toc-list"> </ul> </li> </ul> </div> </div> </nav> </div> </div> <div class="mw-content-container"> <main id="content" class="mw-body"> <header class="mw-body-header vector-page-titlebar"> <nav aria-label="Contents" class="vector-toc-landmark"> <div id="vector-page-titlebar-toc" class="vector-dropdown vector-page-titlebar-toc vector-button-flush-left" title="Table of Contents" > <input type="checkbox" id="vector-page-titlebar-toc-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-page-titlebar-toc" class="vector-dropdown-checkbox " aria-label="Toggle the table of contents" > <label id="vector-page-titlebar-toc-label" for="vector-page-titlebar-toc-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-listBullet mw-ui-icon-wikimedia-listBullet"></span> <span class="vector-dropdown-label-text">Toggle the table of contents</span> </label> <div class="vector-dropdown-content"> <div id="vector-page-titlebar-toc-unpinned-container" class="vector-unpinned-container"> </div> </div> </div> </nav> <h1 id="firstHeading" class="firstHeading mw-first-heading"><span class="mw-page-title-main">Emotion recognition</span></h1> <div id="p-lang-btn" class="vector-dropdown mw-portlet mw-portlet-lang" > <input type="checkbox" id="p-lang-btn-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-p-lang-btn" class="vector-dropdown-checkbox mw-interlanguage-selector" aria-label="Go to an article in another language. Available in 6 languages" > <label id="p-lang-btn-label" for="p-lang-btn-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--action-progressive mw-portlet-lang-heading-6" aria-hidden="true" ><span class="vector-icon mw-ui-icon-language-progressive mw-ui-icon-wikimedia-language-progressive"></span> <span class="vector-dropdown-label-text">6 languages</span> </label> <div class="vector-dropdown-content"> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li class="interlanguage-link interwiki-ar mw-list-item"><a href="https://ar.wikipedia.org/wiki/%D8%AA%D8%B9%D8%B1%D9%81_%D8%B9%D9%84%D9%89_%D8%A7%D9%84%D9%85%D8%B4%D8%A7%D8%B9%D8%B1" title="تعرف على المشاعر – Arabic" lang="ar" hreflang="ar" data-title="تعرف على المشاعر" data-language-autonym="العربية" data-language-local-name="Arabic" class="interlanguage-link-target"><span>العربية</span></a></li><li class="interlanguage-link interwiki-de mw-list-item"><a href="https://de.wikipedia.org/wiki/Emotionserkennung" title="Emotionserkennung – German" lang="de" hreflang="de" data-title="Emotionserkennung" data-language-autonym="Deutsch" data-language-local-name="German" class="interlanguage-link-target"><span>Deutsch</span></a></li><li class="interlanguage-link interwiki-fa mw-list-item"><a href="https://fa.wikipedia.org/wiki/%D8%AA%D8%B4%D8%AE%DB%8C%D8%B5_%D8%B9%D9%88%D8%A7%D8%B7%D9%81_(%D8%A7%D8%AD%D8%B3%D8%A7%D8%B3%D8%A7%D8%AA)" title="تشخیص عواطف (احساسات) – Persian" lang="fa" hreflang="fa" data-title="تشخیص عواطف (احساسات)" data-language-autonym="فارسی" data-language-local-name="Persian" class="interlanguage-link-target"><span>فارسی</span></a></li><li class="interlanguage-link interwiki-fr mw-list-item"><a href="https://fr.wikipedia.org/wiki/Reconnaissance_automatique_des_%C3%A9motions" title="Reconnaissance automatique des émotions – French" lang="fr" hreflang="fr" data-title="Reconnaissance automatique des émotions" data-language-autonym="Français" data-language-local-name="French" class="interlanguage-link-target"><span>Français</span></a></li><li class="interlanguage-link interwiki-ja mw-list-item"><a href="https://ja.wikipedia.org/wiki/%E6%84%9F%E6%83%85%E8%AA%8D%E8%AD%98" title="感情認識 – Japanese" lang="ja" hreflang="ja" data-title="感情認識" data-language-autonym="日本語" data-language-local-name="Japanese" class="interlanguage-link-target"><span>日本語</span></a></li><li class="interlanguage-link interwiki-uk mw-list-item"><a href="https://uk.wikipedia.org/wiki/%D0%A0%D0%BE%D0%B7%D0%BF%D1%96%D0%B7%D0%BD%D0%B0%D0%B2%D0%B0%D0%BD%D0%BD%D1%8F_%D0%B5%D0%BC%D0%BE%D1%86%D1%96%D0%B9" title="Розпізнавання емоцій – Ukrainian" lang="uk" hreflang="uk" data-title="Розпізнавання емоцій" data-language-autonym="Українська" data-language-local-name="Ukrainian" class="interlanguage-link-target"><span>Українська</span></a></li> </ul> <div class="after-portlet after-portlet-lang"><span class="wb-langlinks-edit wb-langlinks-link"><a href="https://www.wikidata.org/wiki/Special:EntityPage/Q1339090#sitelinks-wikipedia" title="Edit interlanguage links" class="wbc-editpage">Edit links</a></span></div> </div> </div> </div> </header> <div class="vector-page-toolbar"> <div class="vector-page-toolbar-container"> <div id="left-navigation"> <nav aria-label="Namespaces"> <div id="p-associated-pages" class="vector-menu vector-menu-tabs mw-portlet mw-portlet-associated-pages" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-nstab-main" class="selected vector-tab-noicon mw-list-item"><a href="/wiki/Emotion_recognition" title="View the content page [c]" accesskey="c"><span>Article</span></a></li><li id="ca-talk" class="vector-tab-noicon mw-list-item"><a href="/wiki/Talk:Emotion_recognition" rel="discussion" title="Discuss improvements to the content page [t]" accesskey="t"><span>Talk</span></a></li> </ul> </div> </div> <div id="vector-variants-dropdown" class="vector-dropdown emptyPortlet" > <input type="checkbox" id="vector-variants-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-variants-dropdown" class="vector-dropdown-checkbox " aria-label="Change language variant" > <label id="vector-variants-dropdown-label" for="vector-variants-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet" aria-hidden="true" ><span class="vector-dropdown-label-text">English</span> </label> <div class="vector-dropdown-content"> <div id="p-variants" class="vector-menu mw-portlet mw-portlet-variants emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> </div> </div> </nav> </div> <div id="right-navigation" class="vector-collapsible"> <nav aria-label="Views"> <div id="p-views" class="vector-menu vector-menu-tabs mw-portlet mw-portlet-views" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-view" class="selected vector-tab-noicon mw-list-item"><a href="/wiki/Emotion_recognition"><span>Read</span></a></li><li id="ca-edit" class="vector-tab-noicon mw-list-item"><a href="/w/index.php?title=Emotion_recognition&amp;action=edit" title="Edit this page [e]" accesskey="e"><span>Edit</span></a></li><li id="ca-history" class="vector-tab-noicon mw-list-item"><a href="/w/index.php?title=Emotion_recognition&amp;action=history" title="Past revisions of this page [h]" accesskey="h"><span>View history</span></a></li> </ul> </div> </div> </nav> <nav class="vector-page-tools-landmark" aria-label="Page tools"> <div id="vector-page-tools-dropdown" class="vector-dropdown vector-page-tools-dropdown" > <input type="checkbox" id="vector-page-tools-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-page-tools-dropdown" class="vector-dropdown-checkbox " aria-label="Tools" > <label id="vector-page-tools-dropdown-label" for="vector-page-tools-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet" aria-hidden="true" ><span class="vector-dropdown-label-text">Tools</span> </label> <div class="vector-dropdown-content"> <div id="vector-page-tools-unpinned-container" class="vector-unpinned-container"> <div id="vector-page-tools" class="vector-page-tools vector-pinnable-element"> <div class="vector-pinnable-header vector-page-tools-pinnable-header vector-pinnable-header-unpinned" data-feature-name="page-tools-pinned" data-pinnable-element-id="vector-page-tools" data-pinned-container-id="vector-page-tools-pinned-container" data-unpinned-container-id="vector-page-tools-unpinned-container" > <div class="vector-pinnable-header-label">Tools</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-page-tools.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-page-tools.unpin">hide</button> </div> <div id="p-cactions" class="vector-menu mw-portlet mw-portlet-cactions emptyPortlet vector-has-collapsible-items" title="More options" > <div class="vector-menu-heading"> Actions </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-more-view" class="selected vector-more-collapsible-item mw-list-item"><a href="/wiki/Emotion_recognition"><span>Read</span></a></li><li id="ca-more-edit" class="vector-more-collapsible-item mw-list-item"><a href="/w/index.php?title=Emotion_recognition&amp;action=edit" title="Edit this page [e]" accesskey="e"><span>Edit</span></a></li><li id="ca-more-history" class="vector-more-collapsible-item mw-list-item"><a href="/w/index.php?title=Emotion_recognition&amp;action=history"><span>View history</span></a></li> </ul> </div> </div> <div id="p-tb" class="vector-menu mw-portlet mw-portlet-tb" > <div class="vector-menu-heading"> General </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="t-whatlinkshere" class="mw-list-item"><a href="/wiki/Special:WhatLinksHere/Emotion_recognition" title="List of all English Wikipedia pages containing links to this page [j]" accesskey="j"><span>What links here</span></a></li><li id="t-recentchangeslinked" class="mw-list-item"><a href="/wiki/Special:RecentChangesLinked/Emotion_recognition" rel="nofollow" title="Recent changes in pages linked from this page [k]" accesskey="k"><span>Related changes</span></a></li><li id="t-upload" class="mw-list-item"><a href="//en.wikipedia.org/wiki/Wikipedia:File_Upload_Wizard" title="Upload files [u]" accesskey="u"><span>Upload file</span></a></li><li id="t-permalink" class="mw-list-item"><a href="/w/index.php?title=Emotion_recognition&amp;oldid=1277684962" title="Permanent link to this revision of this page"><span>Permanent link</span></a></li><li id="t-info" class="mw-list-item"><a href="/w/index.php?title=Emotion_recognition&amp;action=info" title="More information about this page"><span>Page information</span></a></li><li id="t-cite" class="mw-list-item"><a href="/w/index.php?title=Special:CiteThisPage&amp;page=Emotion_recognition&amp;id=1277684962&amp;wpFormIdentifier=titleform" title="Information on how to cite this page"><span>Cite this page</span></a></li><li id="t-urlshortener" class="mw-list-item"><a href="/w/index.php?title=Special:UrlShortener&amp;url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FEmotion_recognition"><span>Get shortened URL</span></a></li><li id="t-urlshortener-qrcode" class="mw-list-item"><a href="/w/index.php?title=Special:QrCode&amp;url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FEmotion_recognition"><span>Download QR code</span></a></li> </ul> </div> </div> <div id="p-coll-print_export" class="vector-menu mw-portlet mw-portlet-coll-print_export" > <div class="vector-menu-heading"> Print/export </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="coll-download-as-rl" class="mw-list-item"><a href="/w/index.php?title=Special:DownloadAsPdf&amp;page=Emotion_recognition&amp;action=show-download-screen" title="Download this page as a PDF file"><span>Download as PDF</span></a></li><li id="t-print" class="mw-list-item"><a href="/w/index.php?title=Emotion_recognition&amp;printable=yes" title="Printable version of this page [p]" accesskey="p"><span>Printable version</span></a></li> </ul> </div> </div> <div id="p-wikibase-otherprojects" class="vector-menu mw-portlet mw-portlet-wikibase-otherprojects" > <div class="vector-menu-heading"> In other projects </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="t-wikibase" class="wb-otherproject-link wb-otherproject-wikibase-dataitem mw-list-item"><a href="https://www.wikidata.org/wiki/Special:EntityPage/Q1339090" title="Structured data on this page hosted by Wikidata [g]" accesskey="g"><span>Wikidata item</span></a></li> </ul> </div> </div> </div> </div> </div> </div> </nav> </div> </div> </div> <div class="vector-column-end"> <div class="vector-sticky-pinned-container"> <nav class="vector-page-tools-landmark" aria-label="Page tools"> <div id="vector-page-tools-pinned-container" class="vector-pinned-container"> </div> </nav> <nav class="vector-appearance-landmark" aria-label="Appearance"> <div id="vector-appearance-pinned-container" class="vector-pinned-container"> <div id="vector-appearance" class="vector-appearance vector-pinnable-element"> <div class="vector-pinnable-header vector-appearance-pinnable-header vector-pinnable-header-pinned" data-feature-name="appearance-pinned" data-pinnable-element-id="vector-appearance" data-pinned-container-id="vector-appearance-pinned-container" data-unpinned-container-id="vector-appearance-unpinned-container" > <div class="vector-pinnable-header-label">Appearance</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-appearance.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-appearance.unpin">hide</button> </div> </div> </div> </nav> </div> </div> <div id="bodyContent" class="vector-body" aria-labelledby="firstHeading" data-mw-ve-target-container> <div class="vector-body-before-content"> <div class="mw-indicators"> </div> <div id="siteSub" class="noprint">From Wikipedia, the free encyclopedia</div> </div> <div id="contentSub"><div id="mw-content-subtitle"></div></div> <div id="mw-content-text" class="mw-body-content"><div class="mw-content-ltr mw-parser-output" lang="en" dir="ltr"><div class="shortdescription nomobile noexcerpt noprint searchaux" style="display:none">Process of visually interpreting emotions</div> <p class="mw-empty-elt"> </p> <style data-mw-deduplicate="TemplateStyles:r1129693374">.mw-parser-output .hlist dl,.mw-parser-output .hlist ol,.mw-parser-output .hlist ul{margin:0;padding:0}.mw-parser-output .hlist dd,.mw-parser-output .hlist dt,.mw-parser-output .hlist li{margin:0;display:inline}.mw-parser-output .hlist.inline,.mw-parser-output .hlist.inline dl,.mw-parser-output .hlist.inline ol,.mw-parser-output .hlist.inline ul,.mw-parser-output .hlist dl dl,.mw-parser-output .hlist dl ol,.mw-parser-output .hlist dl ul,.mw-parser-output .hlist ol dl,.mw-parser-output .hlist ol ol,.mw-parser-output .hlist ol ul,.mw-parser-output .hlist ul dl,.mw-parser-output .hlist ul ol,.mw-parser-output .hlist ul ul{display:inline}.mw-parser-output .hlist .mw-empty-li{display:none}.mw-parser-output .hlist dt::after{content:": "}.mw-parser-output .hlist dd::after,.mw-parser-output .hlist li::after{content:" · ";font-weight:bold}.mw-parser-output .hlist dd:last-child::after,.mw-parser-output .hlist dt:last-child::after,.mw-parser-output .hlist li:last-child::after{content:none}.mw-parser-output .hlist dd dd:first-child::before,.mw-parser-output .hlist dd dt:first-child::before,.mw-parser-output .hlist dd li:first-child::before,.mw-parser-output .hlist dt dd:first-child::before,.mw-parser-output .hlist dt dt:first-child::before,.mw-parser-output .hlist dt li:first-child::before,.mw-parser-output .hlist li dd:first-child::before,.mw-parser-output .hlist li dt:first-child::before,.mw-parser-output .hlist li li:first-child::before{content:" (";font-weight:normal}.mw-parser-output .hlist dd dd:last-child::after,.mw-parser-output .hlist dd dt:last-child::after,.mw-parser-output .hlist dd li:last-child::after,.mw-parser-output .hlist dt dd:last-child::after,.mw-parser-output .hlist dt dt:last-child::after,.mw-parser-output .hlist dt li:last-child::after,.mw-parser-output .hlist li dd:last-child::after,.mw-parser-output .hlist li dt:last-child::after,.mw-parser-output .hlist li li:last-child::after{content:")";font-weight:normal}.mw-parser-output .hlist ol{counter-reset:listitem}.mw-parser-output .hlist ol>li{counter-increment:listitem}.mw-parser-output .hlist ol>li::before{content:" "counter(listitem)"\a0 "}.mw-parser-output .hlist dd ol>li:first-child::before,.mw-parser-output .hlist dt ol>li:first-child::before,.mw-parser-output .hlist li ol>li:first-child::before{content:" ("counter(listitem)"\a0 "}</style><style data-mw-deduplicate="TemplateStyles:r1246091330">.mw-parser-output .sidebar{width:22em;float:right;clear:right;margin:0.5em 0 1em 1em;background:var(--background-color-neutral-subtle,#f8f9fa);border:1px solid var(--border-color-base,#a2a9b1);padding:0.2em;text-align:center;line-height:1.4em;font-size:88%;border-collapse:collapse;display:table}body.skin-minerva .mw-parser-output .sidebar{display:table!important;float:right!important;margin:0.5em 0 1em 1em!important}.mw-parser-output .sidebar-subgroup{width:100%;margin:0;border-spacing:0}.mw-parser-output .sidebar-left{float:left;clear:left;margin:0.5em 1em 1em 0}.mw-parser-output .sidebar-none{float:none;clear:both;margin:0.5em 1em 1em 0}.mw-parser-output .sidebar-outer-title{padding:0 0.4em 0.2em;font-size:125%;line-height:1.2em;font-weight:bold}.mw-parser-output .sidebar-top-image{padding:0.4em}.mw-parser-output .sidebar-top-caption,.mw-parser-output .sidebar-pretitle-with-top-image,.mw-parser-output .sidebar-caption{padding:0.2em 0.4em 0;line-height:1.2em}.mw-parser-output .sidebar-pretitle{padding:0.4em 0.4em 0;line-height:1.2em}.mw-parser-output .sidebar-title,.mw-parser-output .sidebar-title-with-pretitle{padding:0.2em 0.8em;font-size:145%;line-height:1.2em}.mw-parser-output .sidebar-title-with-pretitle{padding:0.1em 0.4em}.mw-parser-output .sidebar-image{padding:0.2em 0.4em 0.4em}.mw-parser-output .sidebar-heading{padding:0.1em 0.4em}.mw-parser-output .sidebar-content{padding:0 0.5em 0.4em}.mw-parser-output .sidebar-content-with-subgroup{padding:0.1em 0.4em 0.2em}.mw-parser-output .sidebar-above,.mw-parser-output .sidebar-below{padding:0.3em 0.8em;font-weight:bold}.mw-parser-output .sidebar-collapse .sidebar-above,.mw-parser-output .sidebar-collapse .sidebar-below{border-top:1px solid #aaa;border-bottom:1px solid #aaa}.mw-parser-output .sidebar-navbar{text-align:right;font-size:115%;padding:0 0.4em 0.4em}.mw-parser-output .sidebar-list-title{padding:0 0.4em;text-align:left;font-weight:bold;line-height:1.6em;font-size:105%}.mw-parser-output .sidebar-list-title-c{padding:0 0.4em;text-align:center;margin:0 3.3em}@media(max-width:640px){body.mediawiki .mw-parser-output .sidebar{width:100%!important;clear:both;float:none!important;margin-left:0!important;margin-right:0!important}}body.skin--responsive .mw-parser-output .sidebar a>img{max-width:none!important}@media screen{html.skin-theme-clientpref-night .mw-parser-output .sidebar:not(.notheme) .sidebar-list-title,html.skin-theme-clientpref-night .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle{background:transparent!important}html.skin-theme-clientpref-night .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle a{color:var(--color-progressive)!important}}@media screen and (prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .sidebar:not(.notheme) .sidebar-list-title,html.skin-theme-clientpref-os .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle{background:transparent!important}html.skin-theme-clientpref-os .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle a{color:var(--color-progressive)!important}}@media print{body.ns-0 .mw-parser-output .sidebar{display:none!important}}</style><table class="sidebar sidebar-collapse nomobile nowraplinks hlist"><tbody><tr><td class="sidebar-pretitle">Part of a series on</td></tr><tr><th class="sidebar-title-with-pretitle"><a href="/wiki/Artificial_intelligence" title="Artificial intelligence">Artificial intelligence (AI)</a></th></tr><tr><td class="sidebar-image"><figure class="mw-halign-center" typeof="mw:File"><a href="/wiki/File:Dall-e_3_(jan_%2724)_artificial_intelligence_icon.png" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/6/64/Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png/120px-Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png" decoding="async" width="100" height="100" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/6/64/Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png/250px-Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png 1.5x" data-file-width="820" data-file-height="820" /></a><figcaption></figcaption></figure></td></tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/Artificial_intelligence#Goals" title="Artificial intelligence">Major goals</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Artificial_general_intelligence" title="Artificial general intelligence">Artificial general intelligence</a></li> <li><a href="/wiki/Intelligent_agent" title="Intelligent agent">Intelligent agent</a></li> <li><a href="/wiki/Recursive_self-improvement" title="Recursive self-improvement">Recursive self-improvement</a></li> <li><a href="/wiki/Automated_planning_and_scheduling" title="Automated planning and scheduling">Planning</a></li> <li><a href="/wiki/Computer_vision" title="Computer vision">Computer vision</a></li> <li><a href="/wiki/General_game_playing" title="General game playing">General game playing</a></li> <li><a href="/wiki/Knowledge_representation_and_reasoning" title="Knowledge representation and reasoning">Knowledge reasoning</a></li> <li><a href="/wiki/Natural_language_processing" title="Natural language processing">Natural language processing</a></li> <li><a href="/wiki/Robotics" title="Robotics">Robotics</a></li> <li><a href="/wiki/AI_safety" title="AI safety">AI safety</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)">Approaches</div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Machine_learning" title="Machine learning">Machine learning</a></li> <li><a href="/wiki/Symbolic_artificial_intelligence" title="Symbolic artificial intelligence">Symbolic</a></li> <li><a href="/wiki/Deep_learning" title="Deep learning">Deep learning</a></li> <li><a href="/wiki/Bayesian_network" title="Bayesian network">Bayesian networks</a></li> <li><a href="/wiki/Evolutionary_algorithm" title="Evolutionary algorithm">Evolutionary algorithms</a></li> <li><a href="/wiki/Hybrid_intelligent_system" title="Hybrid intelligent system">Hybrid intelligent systems</a></li> <li><a href="/wiki/Artificial_intelligence_systems_integration" title="Artificial intelligence systems integration">Systems integration</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/Applications_of_artificial_intelligence" title="Applications of artificial intelligence">Applications</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Machine_learning_in_bioinformatics" title="Machine learning in bioinformatics">Bioinformatics</a></li> <li><a href="/wiki/Deepfake" title="Deepfake">Deepfake</a></li> <li><a href="/wiki/Machine_learning_in_earth_sciences" title="Machine learning in earth sciences">Earth sciences</a></li> <li><a href="/wiki/Applications_of_artificial_intelligence#Finance" title="Applications of artificial intelligence"> Finance </a></li> <li><a href="/wiki/Generative_artificial_intelligence" title="Generative artificial intelligence">Generative AI</a> <ul><li><a href="/wiki/Artificial_intelligence_art" title="Artificial intelligence art">Art</a></li> <li><a href="/wiki/Generative_audio" title="Generative audio">Audio</a></li> <li><a href="/wiki/Music_and_artificial_intelligence" title="Music and artificial intelligence">Music</a></li></ul></li> <li><a href="/wiki/Artificial_intelligence_in_government" title="Artificial intelligence in government">Government</a></li> <li><a href="/wiki/Artificial_intelligence_in_healthcare" title="Artificial intelligence in healthcare">Healthcare</a> <ul><li><a href="/wiki/Artificial_intelligence_in_mental_health" title="Artificial intelligence in mental health">Mental health</a></li></ul></li> <li><a href="/wiki/Artificial_intelligence_in_industry" title="Artificial intelligence in industry">Industry</a></li> <li><a href="/wiki/Machine_translation" title="Machine translation">Translation</a></li> <li><a href="/wiki/Artificial_intelligence_arms_race" title="Artificial intelligence arms race"> Military </a></li> <li><a href="/wiki/Machine_learning_in_physics" title="Machine learning in physics">Physics</a></li> <li><a href="/wiki/List_of_artificial_intelligence_projects" title="List of artificial intelligence projects">Projects</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/Philosophy_of_artificial_intelligence" title="Philosophy of artificial intelligence">Philosophy</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Artificial_consciousness" title="Artificial consciousness">Artificial consciousness</a></li> <li><a href="/wiki/Chinese_room" title="Chinese room">Chinese room</a></li> <li><a href="/wiki/Friendly_artificial_intelligence" title="Friendly artificial intelligence">Friendly AI</a></li> <li><a href="/wiki/AI_control_problem" class="mw-redirect" title="AI control problem">Control problem</a>/<a href="/wiki/AI_takeover" title="AI takeover">Takeover</a></li> <li><a href="/wiki/Ethics_of_artificial_intelligence" title="Ethics of artificial intelligence">Ethics</a></li> <li><a href="/wiki/Existential_risk_from_artificial_general_intelligence" class="mw-redirect" title="Existential risk from artificial general intelligence">Existential risk</a></li> <li><a href="/wiki/Turing_test" title="Turing test">Turing test</a></li> <li><a href="/wiki/Uncanny_valley" title="Uncanny valley">Uncanny valley</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/History_of_artificial_intelligence" title="History of artificial intelligence">History</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Timeline_of_artificial_intelligence" title="Timeline of artificial intelligence">Timeline</a></li> <li><a href="/wiki/Progress_in_artificial_intelligence" title="Progress in artificial intelligence">Progress</a></li> <li><a href="/wiki/AI_winter" title="AI winter">AI winter</a></li> <li><a href="/wiki/AI_boom" title="AI boom">AI boom</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)">Glossary</div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Glossary_of_artificial_intelligence" title="Glossary of artificial intelligence">Glossary</a></li></ul></div></div></td> </tr><tr><td class="sidebar-navbar"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374" /><style data-mw-deduplicate="TemplateStyles:r1239400231">.mw-parser-output .navbar{display:inline;font-size:88%;font-weight:normal}.mw-parser-output .navbar-collapse{float:left;text-align:left}.mw-parser-output .navbar-boxtext{word-spacing:0}.mw-parser-output .navbar ul{display:inline-block;white-space:nowrap;line-height:inherit}.mw-parser-output .navbar-brackets::before{margin-right:-0.125em;content:"[ "}.mw-parser-output .navbar-brackets::after{margin-left:-0.125em;content:" ]"}.mw-parser-output .navbar li{word-spacing:-0.125em}.mw-parser-output .navbar a>span,.mw-parser-output .navbar a>abbr{text-decoration:inherit}.mw-parser-output .navbar-mini abbr{font-variant:small-caps;border-bottom:none;text-decoration:none;cursor:inherit}.mw-parser-output .navbar-ct-full{font-size:114%;margin:0 7em}.mw-parser-output .navbar-ct-mini{font-size:114%;margin:0 4em}html.skin-theme-clientpref-night .mw-parser-output .navbar li a abbr{color:var(--color-base)!important}@media(prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .navbar li a abbr{color:var(--color-base)!important}}@media print{.mw-parser-output .navbar{display:none!important}}</style><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Artificial_intelligence" title="Template:Artificial intelligence"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Artificial_intelligence" title="Template talk:Artificial intelligence"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Artificial_intelligence" title="Special:EditPage/Template:Artificial intelligence"><abbr title="Edit this template">e</abbr></a></li></ul></div></td></tr></tbody></table> <p><b>Emotion recognition</b> is the process of identifying human <a href="/wiki/Emotion" title="Emotion">emotion</a>. People vary widely in their accuracy at recognizing the emotions of others. Use of technology to help people with emotion recognition is a relatively nascent research area. Generally, the technology works best if it uses multiple <a href="/wiki/Modality_(human%E2%80%93computer_interaction)" title="Modality (human–computer interaction)">modalities</a> in context. To date, the most work has been conducted on automating the <a href="/wiki/Facial_expression_recognition" class="mw-redirect" title="Facial expression recognition">recognition of facial expressions</a> from video, spoken expressions from audio, written expressions from text, and <a href="/wiki/Physiology" title="Physiology">physiology</a> as measured by wearables. </p> <meta property="mw:PageProp/toc" /> <div class="mw-heading mw-heading2"><h2 id="Human">Human</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=1" title="Edit section: Human"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1236090951">.mw-parser-output .hatnote{font-style:italic}.mw-parser-output div.hatnote{padding-left:1.6em;margin-bottom:0.5em}.mw-parser-output .hatnote i{font-style:normal}.mw-parser-output .hatnote+link+.hatnote{margin-top:-0.5em}@media print{body.ns-0 .mw-parser-output .hatnote{display:none!important}}</style><div role="note" class="hatnote navigation-not-searchable">Main article: <a href="/wiki/Emotion_perception" title="Emotion perception">Emotion perception</a></div> <p>Humans show a great deal of variability in their abilities to recognize emotion. A key point to keep in mind when learning about automated emotion recognition is that there are several sources of "ground truth", or truth about what the real emotion is. Suppose we are trying to recognize the emotions of Alex. One source is "what would most people say that Alex is feeling?" In this case, the 'truth' may not correspond to what Alex feels, but may correspond to what most people would say it looks like Alex feels. For example, Alex may actually feel sad, but he puts on a big smile and then most people say he looks happy. If an automated method achieves the same results as a group of observers it may be considered accurate, even if it does not actually measure what Alex truly feels. Another source of 'truth' is to ask Alex what he truly feels. This works if Alex has a good sense of his internal state, and wants to tell you what it is, and is capable of putting it accurately into words or a number. However, some people are <a href="/wiki/Alexithymia" title="Alexithymia">alexithymic</a> and do not have a good sense of their internal feelings, or they are not able to communicate them accurately with words and numbers. In general, getting to the truth of what emotion is actually present can take some work, can vary depending on the criteria that are selected, and will usually involve maintaining some level of uncertainty. </p> <div class="mw-heading mw-heading2"><h2 id="Automatic">Automatic</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=2" title="Edit section: Automatic"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Decades of scientific research have been conducted developing and evaluating methods for automated emotion recognition. There is now an extensive literature proposing and evaluating hundreds of different kinds of methods, leveraging techniques from multiple areas, such as <a href="/wiki/Signal_processing" title="Signal processing">signal processing</a>, <a href="/wiki/Machine_learning" title="Machine learning">machine learning</a>, <a href="/wiki/Computer_vision" title="Computer vision">computer vision</a>, and <a href="/wiki/Speech_processing" title="Speech processing">speech processing</a>. Different methodologies and techniques may be employed to interpret emotion such as <a href="/wiki/Bayesian_network" title="Bayesian network">Bayesian networks</a>.<sup id="cite_ref-1" class="reference"><a href="#cite_note-1"><span class="cite-bracket">&#91;</span>1<span class="cite-bracket">&#93;</span></a></sup> , Gaussian <a href="/wiki/Mixture_model" title="Mixture model">Mixture models</a><sup id="cite_ref-2" class="reference"><a href="#cite_note-2"><span class="cite-bracket">&#91;</span>2<span class="cite-bracket">&#93;</span></a></sup> and <a href="/wiki/Hidden_Markov_model" title="Hidden Markov model">Hidden Markov Models</a><sup id="cite_ref-3" class="reference"><a href="#cite_note-3"><span class="cite-bracket">&#91;</span>3<span class="cite-bracket">&#93;</span></a></sup> and <a href="/wiki/Deep_neural_networks" class="mw-redirect" title="Deep neural networks">deep neural networks</a>.<sup id="cite_ref-4" class="reference"><a href="#cite_note-4"><span class="cite-bracket">&#91;</span>4<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Approaches">Approaches</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=3" title="Edit section: Approaches"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>The accuracy of emotion recognition is usually improved when it combines the analysis of human expressions from multimodal forms such as texts, physiology, audio, or video.<sup id="cite_ref-5" class="reference"><a href="#cite_note-5"><span class="cite-bracket">&#91;</span>5<span class="cite-bracket">&#93;</span></a></sup> Different <a href="/wiki/Emotion" title="Emotion">emotion</a> types are detected through the integration of information from <a href="/wiki/Facial_expressions" class="mw-redirect" title="Facial expressions">facial expressions</a>, body movement and <a href="/wiki/Gesture_recognition" title="Gesture recognition">gestures</a>, and speech.<sup id="cite_ref-6" class="reference"><a href="#cite_note-6"><span class="cite-bracket">&#91;</span>6<span class="cite-bracket">&#93;</span></a></sup> The technology is said to contribute in the emergence of the so-called emotional or <a href="/wiki/Emotive_Internet" title="Emotive Internet">emotive Internet</a>.<sup id="cite_ref-7" class="reference"><a href="#cite_note-7"><span class="cite-bracket">&#91;</span>7<span class="cite-bracket">&#93;</span></a></sup> </p><p>The existing approaches in emotion recognition to classify certain <a href="/wiki/Emotion" title="Emotion">emotion</a> types can be generally classified into three main categories: knowledge-based techniques, statistical methods, and hybrid approaches.<sup id="cite_ref-s1_8-0" class="reference"><a href="#cite_note-s1-8"><span class="cite-bracket">&#91;</span>8<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading4"><h4 id="Knowledge-based_techniques">Knowledge-based techniques</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=4" title="Edit section: Knowledge-based techniques"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Knowledge-based techniques (sometimes referred to as <a href="/wiki/Lexicon" title="Lexicon">lexicon</a>-based techniques), utilize domain knowledge and the <a href="/wiki/Semantic" class="mw-redirect" title="Semantic">semantic</a> and <a href="/wiki/Syntactic" class="mw-redirect" title="Syntactic">syntactic</a> characteristics of text and potentially spoken language in order to detect certain <a href="/wiki/Emotion" title="Emotion">emotion</a> types.<sup id="cite_ref-9" class="reference"><a href="#cite_note-9"><span class="cite-bracket">&#91;</span>9<span class="cite-bracket">&#93;</span></a></sup> In this approach, it is common to use knowledge-based resources during the <a href="/wiki/Emotion_classification" title="Emotion classification">emotion classification</a> process such as <a href="/wiki/WordNet" title="WordNet">WordNet</a>, SenticNet,<sup id="cite_ref-SenticNet_10-0" class="reference"><a href="#cite_note-SenticNet-10"><span class="cite-bracket">&#91;</span>10<span class="cite-bracket">&#93;</span></a></sup> <a href="/wiki/ConceptNet" class="mw-redirect" title="ConceptNet">ConceptNet</a>, and EmotiNet,<sup id="cite_ref-11" class="reference"><a href="#cite_note-11"><span class="cite-bracket">&#91;</span>11<span class="cite-bracket">&#93;</span></a></sup> to name a few.<sup id="cite_ref-s6_12-0" class="reference"><a href="#cite_note-s6-12"><span class="cite-bracket">&#91;</span>12<span class="cite-bracket">&#93;</span></a></sup> One of the advantages of this approach is the accessibility and economy brought about by the large availability of such knowledge-based resources.<sup id="cite_ref-s1_8-1" class="reference"><a href="#cite_note-s1-8"><span class="cite-bracket">&#91;</span>8<span class="cite-bracket">&#93;</span></a></sup> A limitation of this technique on the other hand, is its inability to handle concept nuances and complex linguistic rules.<sup id="cite_ref-s1_8-2" class="reference"><a href="#cite_note-s1-8"><span class="cite-bracket">&#91;</span>8<span class="cite-bracket">&#93;</span></a></sup> </p><p>Knowledge-based techniques can be mainly classified into two categories: dictionary-based and corpus-based approaches.<sup class="noprint Inline-Template Template-Fact" style="white-space:nowrap;">&#91;<i><a href="/wiki/Wikipedia:Citation_needed" title="Wikipedia:Citation needed"><span title="This claim needs references to reliable sources. (September 2019)">citation needed</span></a></i>&#93;</sup> Dictionary-based approaches find opinion or <a href="/wiki/Emotion" title="Emotion">emotion</a> seed words in a <a href="/wiki/Dictionary" title="Dictionary">dictionary</a> and search for their <a href="/wiki/Synonym" title="Synonym">synonyms</a> and <a href="/wiki/Antonym" class="mw-redirect" title="Antonym">antonyms</a> to expand the initial list of opinions or <a href="/wiki/Emotion" title="Emotion">emotions</a>.<sup id="cite_ref-s3_13-0" class="reference"><a href="#cite_note-s3-13"><span class="cite-bracket">&#91;</span>13<span class="cite-bracket">&#93;</span></a></sup> Corpus-based approaches on the other hand, start with a seed list of opinion or <a href="/wiki/Emotion" title="Emotion">emotion</a> words, and expand the database by finding other words with context-specific characteristics in a large <a href="/wiki/Corpus_linguistics" title="Corpus linguistics">corpus</a>.<sup id="cite_ref-s3_13-1" class="reference"><a href="#cite_note-s3-13"><span class="cite-bracket">&#91;</span>13<span class="cite-bracket">&#93;</span></a></sup> While corpus-based approaches take into account context, their performance still vary in different domains since a word in one domain can have a different orientation in another domain.<sup id="cite_ref-14" class="reference"><a href="#cite_note-14"><span class="cite-bracket">&#91;</span>14<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading4"><h4 id="Statistical_methods">Statistical methods</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=5" title="Edit section: Statistical methods"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Statistical methods commonly involve the use of different supervised <a href="/wiki/Machine_learning" title="Machine learning">machine learning</a> algorithms in which a large set of annotated data is fed into the algorithms for the system to learn and predict the appropriate <a href="/wiki/Emotion" title="Emotion">emotion</a> types.<sup id="cite_ref-s1_8-3" class="reference"><a href="#cite_note-s1-8"><span class="cite-bracket">&#91;</span>8<span class="cite-bracket">&#93;</span></a></sup> <a href="/wiki/Machine_learning" title="Machine learning">Machine learning</a> algorithms generally provide more reasonable classification accuracy compared to other approaches, but one of the challenges in achieving good results in the classification process, is the need to have a sufficiently large training set.<sup id="cite_ref-s1_8-4" class="reference"><a href="#cite_note-s1-8"><span class="cite-bracket">&#91;</span>8<span class="cite-bracket">&#93;</span></a></sup> </p><p>Some of the most commonly used <a href="/wiki/Machine_learning" title="Machine learning">machine learning</a> algorithms include <a href="/wiki/Support_vector_machines" class="mw-redirect" title="Support vector machines">Support Vector Machines (SVM)</a>, <a href="/wiki/Naive_bayes_classifier" class="mw-redirect" title="Naive bayes classifier">Naive Bayes</a>, and <a href="/wiki/Maximum_entropy_classifier" class="mw-redirect" title="Maximum entropy classifier">Maximum Entropy</a>.<sup id="cite_ref-s4_15-0" class="reference"><a href="#cite_note-s4-15"><span class="cite-bracket">&#91;</span>15<span class="cite-bracket">&#93;</span></a></sup> <a href="/wiki/Deep_learning" title="Deep learning">Deep learning</a>, which is under the unsupervised family of <a href="/wiki/Machine_learning" title="Machine learning">machine learning</a>, is also widely employed in emotion recognition.<sup id="cite_ref-16" class="reference"><a href="#cite_note-16"><span class="cite-bracket">&#91;</span>16<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-17" class="reference"><a href="#cite_note-17"><span class="cite-bracket">&#91;</span>17<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-18" class="reference"><a href="#cite_note-18"><span class="cite-bracket">&#91;</span>18<span class="cite-bracket">&#93;</span></a></sup> Well-known <a href="/wiki/Deep_learning" title="Deep learning">deep learning</a> algorithms include different architectures of <a href="/wiki/Artificial_neural_network" class="mw-redirect" title="Artificial neural network">Artificial Neural Network (ANN)</a> such as <a href="/wiki/Convolutional_neural_network" title="Convolutional neural network">Convolutional Neural Network (CNN)</a>, <a href="/wiki/Long_short-term_memory" title="Long short-term memory">Long Short-term Memory (LSTM)</a>, and <a href="/wiki/Extreme_learning_machine" title="Extreme learning machine">Extreme Learning Machine (ELM)</a>.<sup id="cite_ref-s4_15-1" class="reference"><a href="#cite_note-s4-15"><span class="cite-bracket">&#91;</span>15<span class="cite-bracket">&#93;</span></a></sup> The popularity of <a href="/wiki/Deep_learning" title="Deep learning">deep learning</a> approaches in the domain of emotion recognition may be mainly attributed to its success in related applications such as in <a href="/wiki/Computer_vision" title="Computer vision">computer vision</a>, <a href="/wiki/Speech_recognition" title="Speech recognition">speech recognition</a>, and <a href="/wiki/Natural_language_processing" title="Natural language processing">Natural Language Processing (NLP)</a>.<sup id="cite_ref-s4_15-2" class="reference"><a href="#cite_note-s4-15"><span class="cite-bracket">&#91;</span>15<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading4"><h4 id="Hybrid_approaches">Hybrid approaches</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=6" title="Edit section: Hybrid approaches"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Hybrid approaches in emotion recognition are essentially a combination of knowledge-based techniques and statistical methods, which exploit complementary characteristics from both techniques.<sup id="cite_ref-s1_8-5" class="reference"><a href="#cite_note-s1-8"><span class="cite-bracket">&#91;</span>8<span class="cite-bracket">&#93;</span></a></sup> Some of the works that have applied an ensemble of knowledge-driven linguistic elements and statistical methods include sentic computing and iFeel, both of which have adopted the concept-level knowledge-based resource SenticNet.<sup id="cite_ref-19" class="reference"><a href="#cite_note-19"><span class="cite-bracket">&#91;</span>19<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-20" class="reference"><a href="#cite_note-20"><span class="cite-bracket">&#91;</span>20<span class="cite-bracket">&#93;</span></a></sup> The role of such knowledge-based resources in the implementation of hybrid approaches is highly important in the <a href="/wiki/Emotion" title="Emotion">emotion</a> classification process.<sup id="cite_ref-s6_12-1" class="reference"><a href="#cite_note-s6-12"><span class="cite-bracket">&#91;</span>12<span class="cite-bracket">&#93;</span></a></sup> Since hybrid techniques gain from the benefits offered by both knowledge-based and statistical approaches, they tend to have better classification performance as opposed to employing knowledge-based or statistical methods independently.<sup class="noprint Inline-Template Template-Fact" style="white-space:nowrap;">&#91;<i><a href="/wiki/Wikipedia:Citation_needed" title="Wikipedia:Citation needed"><span title="This claim needs references to reliable sources. (September 2019)">citation needed</span></a></i>&#93;</sup> A downside of using hybrid techniques however, is the computational complexity during the classification process.<sup id="cite_ref-s6_12-2" class="reference"><a href="#cite_note-s6-12"><span class="cite-bracket">&#91;</span>12<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Datasets">Datasets</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=7" title="Edit section: Datasets"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Data is an integral part of the existing approaches in emotion recognition and in most cases it is a challenge to obtain annotated data that is necessary to train <a href="/wiki/Machine_learning" title="Machine learning">machine learning</a> algorithms.<sup id="cite_ref-s3_13-2" class="reference"><a href="#cite_note-s3-13"><span class="cite-bracket">&#91;</span>13<span class="cite-bracket">&#93;</span></a></sup> For the task of classifying different <a href="/wiki/Emotion" title="Emotion">emotion</a> types from multimodal sources in the form of texts, audio, videos or physiological signals, the following datasets are available: </p> <ol><li>HUMAINE: provides natural clips with emotion words and context labels in multiple modalities<sup id="cite_ref-21" class="reference"><a href="#cite_note-21"><span class="cite-bracket">&#91;</span>21<span class="cite-bracket">&#93;</span></a></sup></li> <li>Belfast database: provides clips with a wide range of emotions from TV programs and interview recordings<sup id="cite_ref-22" class="reference"><a href="#cite_note-22"><span class="cite-bracket">&#91;</span>22<span class="cite-bracket">&#93;</span></a></sup></li> <li>SEMAINE: provides audiovisual recordings between a person and a <a href="/wiki/Intelligent_agent" title="Intelligent agent">virtual agent</a> and contains emotion annotations such as angry, happy, fear, disgust, sadness, contempt, and amusement<sup id="cite_ref-23" class="reference"><a href="#cite_note-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup></li> <li>IEMOCAP: provides recordings of dyadic sessions between actors and contains emotion annotations such as happiness, anger, sadness, frustration, and neutral state<sup id="cite_ref-24" class="reference"><a href="#cite_note-24"><span class="cite-bracket">&#91;</span>24<span class="cite-bracket">&#93;</span></a></sup></li> <li>eNTERFACE: provides audiovisual recordings of subjects from seven nationalities and contains emotion annotations such as happiness, anger, sadness, surprise, disgust, and fear<sup id="cite_ref-25" class="reference"><a href="#cite_note-25"><span class="cite-bracket">&#91;</span>25<span class="cite-bracket">&#93;</span></a></sup></li> <li>DEAP: provides <a href="/wiki/Electroencephalography" title="Electroencephalography">electroencephalography</a> (EEG), <a href="/wiki/Electrocardiography" title="Electrocardiography">electrocardiography</a> (ECG), and face video recordings, as well as emotion annotations in terms of <a href="/wiki/Valence_(psychology)" title="Valence (psychology)">valence</a>, <a href="/wiki/Arousal" title="Arousal">arousal</a>, and <a href="/w/index.php?title=Dominance_(psychology)&amp;action=edit&amp;redlink=1" class="new" title="Dominance (psychology) (page does not exist)">dominance</a> of people watching film clips<sup id="cite_ref-26" class="reference"><a href="#cite_note-26"><span class="cite-bracket">&#91;</span>26<span class="cite-bracket">&#93;</span></a></sup></li> <li>DREAMER: provides electroencephalography (EEG) and electrocardiography (ECG) recordings, as well as emotion annotations in terms of valence, dominance of people watching film clips<sup id="cite_ref-27" class="reference"><a href="#cite_note-27"><span class="cite-bracket">&#91;</span>27<span class="cite-bracket">&#93;</span></a></sup></li> <li>MELD: is a multiparty conversational dataset where each utterance is labeled with emotion and sentiment. MELD<sup id="cite_ref-28" class="reference"><a href="#cite_note-28"><span class="cite-bracket">&#91;</span>28<span class="cite-bracket">&#93;</span></a></sup> provides conversations in video format and hence suitable for <a href="/wiki/Multimodal_interaction" title="Multimodal interaction">multimodal</a> emotion recognition and <a href="/wiki/Sentiment_analysis" title="Sentiment analysis">sentiment analysis</a>. MELD is useful for <a href="/wiki/Multimodal_sentiment_analysis" title="Multimodal sentiment analysis">multimodal sentiment analysis</a> and emotion recognition, <a href="/wiki/Dialogue_system" title="Dialogue system">dialogue systems</a> and <a href="/wiki/Emotion_recognition_in_conversation" title="Emotion recognition in conversation">emotion recognition in conversations</a>.<sup id="cite_ref-:0_29-0" class="reference"><a href="#cite_note-:0-29"><span class="cite-bracket">&#91;</span>29<span class="cite-bracket">&#93;</span></a></sup></li> <li>MuSe: provides audiovisual recordings of natural interactions between a person and an object.<sup id="cite_ref-30" class="reference"><a href="#cite_note-30"><span class="cite-bracket">&#91;</span>30<span class="cite-bracket">&#93;</span></a></sup> It has discrete and continuous emotion annotations in terms of valence, arousal and trustworthiness as well as speech topics useful for multimodal sentiment analysis and emotion recognition.</li> <li>UIT-VSMEC: is a standard Vietnamese Social Media Emotion Corpus (UIT-VSMEC) with about 6,927 human-annotated sentences with six emotion labels, contributing to emotion recognition research in Vietnamese which is a low-resource language in Natural Language Processing (NLP).<sup id="cite_ref-31" class="reference"><a href="#cite_note-31"><span class="cite-bracket">&#91;</span>31<span class="cite-bracket">&#93;</span></a></sup></li> <li>BED: provides valence and arousal of people watching images. It also includes electroencephalography (EEG) recordings of people exposed to various stimuli (<a href="/wiki/SSVEP" class="mw-redirect" title="SSVEP">SSVEP</a>, resting with eyes closed, resting with eyes open, cognitive tasks) for the task of EEG-based <a href="/wiki/Biometrics" title="Biometrics">biometrics</a>.<sup id="cite_ref-32" class="reference"><a href="#cite_note-32"><span class="cite-bracket">&#91;</span>32<span class="cite-bracket">&#93;</span></a></sup></li></ol> <div class="mw-heading mw-heading3"><h3 id="Applications">Applications</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=8" title="Edit section: Applications"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Emotion recognition is used in society for a variety of reasons. <a href="/wiki/Affectiva" title="Affectiva">Affectiva</a>, which spun out of <a href="/wiki/MIT" class="mw-redirect" title="MIT">MIT</a>, provides <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">artificial intelligence</a> software that makes it more efficient to do tasks previously done manually by people, mainly to gather facial expression and vocal expression information related to specific contexts where viewers have consented to share this information. For example, instead of filling out a lengthy survey about how you feel at each point watching an educational video or advertisement, you can consent to have a camera watch your face and listen to what you say, and note during which parts of the experience you show expressions such as boredom, interest, confusion, or smiling. (Note that this does not imply it is reading your innermost feelings—it only reads what you express outwardly.) Other uses by <a href="/wiki/Affectiva" title="Affectiva">Affectiva</a> include helping children with autism, helping people who are blind to read facial expressions, helping robots interact more intelligently with people, and monitoring signs of attention while driving in an effort to enhance driver safety.<sup id="cite_ref-33" class="reference"><a href="#cite_note-33"><span class="cite-bracket">&#91;</span>33<span class="cite-bracket">&#93;</span></a></sup> </p><p>Academic research increasingly uses emotion recognition as a method to study social science questions around elections, protests, and democracy. Several studies focus on the facial expressions of political candidates on <a href="/wiki/Social_media" title="Social media">social media</a> and find that politicians tend to express happiness.<sup id="cite_ref-34" class="reference"><a href="#cite_note-34"><span class="cite-bracket">&#91;</span>34<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-35" class="reference"><a href="#cite_note-35"><span class="cite-bracket">&#91;</span>35<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-36" class="reference"><a href="#cite_note-36"><span class="cite-bracket">&#91;</span>36<span class="cite-bracket">&#93;</span></a></sup> However, this research finds that computer vision tools such as <a href="/wiki/Amazon_Rekognition" title="Amazon Rekognition">Amazon Rekognition</a> are only accurate for happiness and are mostly reliable as 'happy detectors'.<sup id="cite_ref-37" class="reference"><a href="#cite_note-37"><span class="cite-bracket">&#91;</span>37<span class="cite-bracket">&#93;</span></a></sup> Researchers examining protests, where negative <a href="/wiki/Affect_(psychology)" title="Affect (psychology)">affect</a> such as anger is expected, have therefore developed their own models to more accurately study expressions of negativity and violence in democratic processes.<sup id="cite_ref-38" class="reference"><a href="#cite_note-38"><span class="cite-bracket">&#91;</span>38<span class="cite-bracket">&#93;</span></a></sup> </p><p>A <a rel="nofollow" class="external text" href="https://pdfpiw.uspto.gov/.piw?PageNum=0&amp;docid=10061977&amp;IDKey=20D25A962A60&amp;HomeUrl=http%3A%2F%2Fpatft.uspto.gov%2Fnetacgi%2Fnph-Parser%3FSect2%3DPTO1%2526Sect2%3DHITOFF%2526p%3D1%2526u%3D%2Fnetahtml%2FPTO%2Fsearch-bool.html%2526r%3D1%2526f%3DG%2526l%3D50%2526d%3DPALL%2526S1%3D10061977.PN.%2526OS%3DPN%2F10061977%2526RS%3DPN%2F10061977">patent</a> <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191007210411/https://pdfpiw.uspto.gov/.piw?PageNum=0&amp;docid=10061977&amp;IDKey=20D25A962A60&amp;HomeUrl=http%3A%2F%2Fpatft.uspto.gov%2Fnetacgi%2Fnph-Parser%3FSect2%3DPTO1%2526Sect2%3DHITOFF%2526p%3D1%2526u%3D%2Fnetahtml%2FPTO%2Fsearch-bool.html%2526r%3D1%2526f%3DG%2526l%3D50%2526d%3DPALL%2526S1%3D10061977.PN.%2526OS%3DPN%2F10061977%2526RS%3DPN%2F10061977">Archived</a> 7 October 2019 at the <a href="/wiki/Wayback_Machine" title="Wayback Machine">Wayback Machine</a> filed by <a href="/wiki/Snapchat" title="Snapchat">Snapchat</a> in 2015 describes a method of extracting data about crowds at public events by performing algorithmic emotion recognition on users' geotagged <a href="/wiki/Selfie" title="Selfie">selfies</a>.<sup id="cite_ref-39" class="reference"><a href="#cite_note-39"><span class="cite-bracket">&#91;</span>39<span class="cite-bracket">&#93;</span></a></sup> </p><p>Emotient was a <a href="/wiki/Startup_company" title="Startup company">startup company</a> which applied emotion recognition to reading frowns, smiles, and other expressions on faces, namely <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">artificial intelligence</a> to predict "attitudes and actions based on facial expressions".<sup id="cite_ref-DeMuth_40-0" class="reference"><a href="#cite_note-DeMuth-40"><span class="cite-bracket">&#91;</span>40<span class="cite-bracket">&#93;</span></a></sup> <a href="/wiki/Apple_Inc." title="Apple Inc.">Apple</a> bought Emotient in 2016 and uses emotion recognition technology to enhance the emotional intelligence of its products.<sup id="cite_ref-DeMuth_40-1" class="reference"><a href="#cite_note-DeMuth-40"><span class="cite-bracket">&#91;</span>40<span class="cite-bracket">&#93;</span></a></sup> </p><p>nViso provides real-time emotion recognition for web and mobile applications through a real-time <a href="/wiki/API" title="API">API</a>.<sup id="cite_ref-41" class="reference"><a href="#cite_note-41"><span class="cite-bracket">&#91;</span>41<span class="cite-bracket">&#93;</span></a></sup> <a href="/wiki/Visage_Technologies_AB" title="Visage Technologies AB">Visage Technologies AB</a> offers emotion estimation as a part of their <a href="/wiki/Visage_SDK" title="Visage SDK">Visage SDK</a> for <a href="/wiki/Marketing" title="Marketing">marketing</a> and scientific research and similar purposes.<sup id="cite_ref-42" class="reference"><a href="#cite_note-42"><span class="cite-bracket">&#91;</span>42<span class="cite-bracket">&#93;</span></a></sup> </p><p><a href="/wiki/Eyeris" title="Eyeris">Eyeris</a> is an emotion recognition company that works with <a href="/wiki/Embedded_system" title="Embedded system">embedded system</a> manufacturers including car makers and social robotic companies on integrating its face analytics and emotion recognition software; as well as with video content creators to help them measure the perceived effectiveness of their short and long form video creative.<sup id="cite_ref-43" class="reference"><a href="#cite_note-43"><span class="cite-bracket">&#91;</span>43<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-44" class="reference"><a href="#cite_note-44"><span class="cite-bracket">&#91;</span>44<span class="cite-bracket">&#93;</span></a></sup> </p><p>Many products also exist to aggregate information from emotions communicated online, including via "like" button presses and via counts of positive and negative phrases in text and affect recognition is increasingly used in some kinds of games and virtual reality, both for educational purposes and to give players more natural control over their social avatars.<sup class="noprint Inline-Template Template-Fact" style="white-space:nowrap;">&#91;<i><a href="/wiki/Wikipedia:Citation_needed" title="Wikipedia:Citation needed"><span title="This claim needs references to reliable sources. (February 2020)">citation needed</span></a></i>&#93;</sup> </p> <div class="mw-heading mw-heading2"><h2 id="Subfields">Subfields</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=9" title="Edit section: Subfields"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Emotion recognition is probably to gain the best outcome if applying <a href="/wiki/Multimodal_interaction" title="Multimodal interaction">multiple modalities</a> by combining different objects, including <a href="/wiki/Text_(literary_theory)" title="Text (literary theory)">text</a> (conversation), audio, video, and <a href="/wiki/Physiology" title="Physiology">physiology</a> to detect emotions. </p> <div class="mw-heading mw-heading3"><h3 id="Emotion_recognition_in_text">Emotion recognition in text</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=10" title="Edit section: Emotion recognition in text"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Text data is a favorable research object for emotion recognition when it is free and available everywhere in human life. Compare to other types of data, the storage of text data is lighter and easy to compress to the best performance due to the frequent repetition of words and characters in languages. Emotions can be extracted from two essential text forms: written texts and <a href="/wiki/Conversation" title="Conversation">conversations</a> (dialogues).<sup id="cite_ref-45" class="reference"><a href="#cite_note-45"><span class="cite-bracket">&#91;</span>45<span class="cite-bracket">&#93;</span></a></sup> For written texts, many scholars focus on working with sentence level to extract "words/phrases" representing emotions.<sup id="cite_ref-46" class="reference"><a href="#cite_note-46"><span class="cite-bracket">&#91;</span>46<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-47" class="reference"><a href="#cite_note-47"><span class="cite-bracket">&#91;</span>47<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Emotion_recognition_in_audio">Emotion recognition in audio</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=11" title="Edit section: Emotion recognition in audio"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Different from emotion recognition in text, vocal signals are used for the recognition to <a href="/wiki/Emotional_speech_recognition" class="mw-redirect" title="Emotional speech recognition">extract emotions from audio</a>.<sup id="cite_ref-48" class="reference"><a href="#cite_note-48"><span class="cite-bracket">&#91;</span>48<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Emotion_recognition_in_video">Emotion recognition in video</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=12" title="Edit section: Emotion recognition in video"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Video data is a combination of audio data, image data and sometimes texts (in case of <a href="/wiki/Subtitles" title="Subtitles">subtitles</a><sup id="cite_ref-49" class="reference"><a href="#cite_note-49"><span class="cite-bracket">&#91;</span>49<span class="cite-bracket">&#93;</span></a></sup>). </p> <div class="mw-heading mw-heading3"><h3 id="Emotion_recognition_in_conversation">Emotion recognition in conversation</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=13" title="Edit section: Emotion recognition in conversation"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p><a href="/wiki/Emotion_recognition_in_conversation" title="Emotion recognition in conversation">Emotion recognition in conversation</a> (ERC) extracts opinions between participants from massive conversational data in <a href="/wiki/Social_network" title="Social network">social platforms</a>, such as <a href="/wiki/Facebook" title="Facebook">Facebook</a>, <a href="/wiki/Twitter" title="Twitter">Twitter</a>, YouTube, and others.<sup id="cite_ref-:0_29-1" class="reference"><a href="#cite_note-:0-29"><span class="cite-bracket">&#91;</span>29<span class="cite-bracket">&#93;</span></a></sup> ERC can take input data like text, audio, video or a combination form to detect several emotions such as fear, lust, pain, and pleasure. </p> <div class="mw-heading mw-heading2"><h2 id="See_also">See also</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=14" title="Edit section: See also"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <ul><li><a href="/wiki/Affective_computing" title="Affective computing">Affective computing</a></li> <li><a href="/wiki/Face_perception" title="Face perception">Face perception</a></li> <li><a href="/wiki/Facial_recognition_system" title="Facial recognition system">Facial recognition system</a></li> <li><a href="/wiki/Sentiment_analysis" title="Sentiment analysis">Sentiment analysis</a></li> <li><a href="/wiki/Interpersonal_accuracy" title="Interpersonal accuracy">Interpersonal accuracy</a></li></ul> <div class="mw-heading mw-heading2"><h2 id="References">References</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Emotion_recognition&amp;action=edit&amp;section=15" title="Edit section: References"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1239543626">.mw-parser-output .reflist{margin-bottom:0.5em;list-style-type:decimal}@media screen{.mw-parser-output .reflist{font-size:90%}}.mw-parser-output .reflist .references{font-size:100%;margin-bottom:0;list-style-type:inherit}.mw-parser-output .reflist-columns-2{column-width:30em}.mw-parser-output .reflist-columns-3{column-width:25em}.mw-parser-output .reflist-columns{margin-top:0.3em}.mw-parser-output .reflist-columns ol{margin-top:0}.mw-parser-output .reflist-columns li{page-break-inside:avoid;break-inside:avoid-column}.mw-parser-output .reflist-upper-alpha{list-style-type:upper-alpha}.mw-parser-output .reflist-upper-roman{list-style-type:upper-roman}.mw-parser-output .reflist-lower-alpha{list-style-type:lower-alpha}.mw-parser-output .reflist-lower-greek{list-style-type:lower-greek}.mw-parser-output .reflist-lower-roman{list-style-type:lower-roman}</style><div class="reflist"> <div class="mw-references-wrap mw-references-columns"><ol class="references"> <li id="cite_note-1"><span class="mw-cite-backlink"><b><a href="#cite_ref-1">^</a></b></span> <span class="reference-text">Miyakoshi, Yoshihiro, and Shohei Kato. <a rel="nofollow" class="external text" href="https://ieeexplore.ieee.org/document/5958891">"Facial Emotion Detection Considering Partial Occlusion Of Face Using Baysian Network"</a>. Computers and Informatics (2011): 96–101.</span> </li> <li id="cite_note-2"><span class="mw-cite-backlink"><b><a href="#cite_ref-2">^</a></b></span> <span class="reference-text">Hari Krishna Vydana, P. Phani Kumar, K. Sri Rama Krishna and Anil Kumar Vuppala. <a rel="nofollow" class="external text" href="https://ieeexplore.ieee.org/document/7058214/references">"Improved emotion recognition using GMM-UBMs"</a>. 2015 International Conference on Signal Processing and Communication Engineering Systems</span> </li> <li id="cite_note-3"><span class="mw-cite-backlink"><b><a href="#cite_ref-3">^</a></b></span> <span class="reference-text">B. Schuller, G. Rigoll M. Lang. <a rel="nofollow" class="external text" href="https://ieeexplore.ieee.org/document/1220939/">"Hidden Markov model-based speech emotion recognition"</a>. ICME '03. Proceedings. 2003 International Conference on Multimedia and Expo, 2003.</span> </li> <li id="cite_note-4"><span class="mw-cite-backlink"><b><a href="#cite_ref-4">^</a></b></span> <span class="reference-text"><style data-mw-deduplicate="TemplateStyles:r1238218222">.mw-parser-output cite.citation{font-style:inherit;word-wrap:break-word}.mw-parser-output .citation q{quotes:"\"""\"""'""'"}.mw-parser-output .citation:target{background-color:rgba(0,127,255,0.133)}.mw-parser-output .id-lock-free.id-lock-free a{background:url("//upload.wikimedia.org/wikipedia/commons/6/65/Lock-green.svg")right 0.1em center/9px no-repeat}.mw-parser-output .id-lock-limited.id-lock-limited a,.mw-parser-output .id-lock-registration.id-lock-registration a{background:url("//upload.wikimedia.org/wikipedia/commons/d/d6/Lock-gray-alt-2.svg")right 0.1em center/9px no-repeat}.mw-parser-output .id-lock-subscription.id-lock-subscription a{background:url("//upload.wikimedia.org/wikipedia/commons/a/aa/Lock-red-alt-2.svg")right 0.1em center/9px no-repeat}.mw-parser-output .cs1-ws-icon a{background:url("//upload.wikimedia.org/wikipedia/commons/4/4c/Wikisource-logo.svg")right 0.1em center/12px no-repeat}body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-free a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-limited a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-registration a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-subscription a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .cs1-ws-icon a{background-size:contain;padding:0 1em 0 0}.mw-parser-output .cs1-code{color:inherit;background:inherit;border:none;padding:inherit}.mw-parser-output .cs1-hidden-error{display:none;color:var(--color-error,#d33)}.mw-parser-output .cs1-visible-error{color:var(--color-error,#d33)}.mw-parser-output .cs1-maint{display:none;color:#085;margin-left:0.3em}.mw-parser-output .cs1-kern-left{padding-left:0.2em}.mw-parser-output .cs1-kern-right{padding-right:0.2em}.mw-parser-output .citation .mw-selflink{font-weight:inherit}@media screen{.mw-parser-output .cs1-format{font-size:95%}html.skin-theme-clientpref-night .mw-parser-output .cs1-maint{color:#18911f}}@media screen and (prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .cs1-maint{color:#18911f}}</style><cite id="CITEREFSinghSahaSahidullah2021" class="citation book cs1">Singh, Premjeet; Saha, Goutam; Sahidullah, Md (2021). "Non-linear frequency warping using constant-Q transformation for speech emotion recognition". <i>2021 International Conference on Computer Communication and Informatics (ICCCI)</i>. pp.&#160;<span class="nowrap">1–</span>4. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/2102.04029">2102.04029</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FICCCI50826.2021.9402569">10.1109/ICCCI50826.2021.9402569</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-1-7281-5875-4" title="Special:BookSources/978-1-7281-5875-4"><bdi>978-1-7281-5875-4</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:231846518">231846518</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Non-linear+frequency+warping+using+constant-Q+transformation+for+speech+emotion+recognition&amp;rft.btitle=2021+International+Conference+on+Computer+Communication+and+Informatics+%28ICCCI%29&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E1-%3C%2Fspan%3E4&amp;rft.date=2021&amp;rft_id=info%3Aarxiv%2F2102.04029&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A231846518%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1109%2FICCCI50826.2021.9402569&amp;rft.isbn=978-1-7281-5875-4&amp;rft.aulast=Singh&amp;rft.aufirst=Premjeet&amp;rft.au=Saha%2C+Goutam&amp;rft.au=Sahidullah%2C+Md&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-5"><span class="mw-cite-backlink"><b><a href="#cite_ref-5">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFPoriaCambriaBajpaiHussain2017" class="citation journal cs1">Poria, Soujanya; Cambria, Erik; Bajpai, Rajiv; Hussain, Amir (September 2017). <a rel="nofollow" class="external text" href="http://researchrepository.napier.ac.uk/Output/1792429">"A review of affective computing: From unimodal analysis to multimodal fusion"</a>. <i>Information Fusion</i>. <b>37</b>: <span class="nowrap">98–</span>125. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.inffus.2017.02.003">10.1016/j.inffus.2017.02.003</a>. <a href="/wiki/Hdl_(identifier)" class="mw-redirect" title="Hdl (identifier)">hdl</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://hdl.handle.net/1893%2F25490">1893/25490</a></span>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:205433041">205433041</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Information+Fusion&amp;rft.atitle=A+review+of+affective+computing%3A+From+unimodal+analysis+to+multimodal+fusion&amp;rft.volume=37&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E98-%3C%2Fspan%3E125&amp;rft.date=2017-09&amp;rft_id=info%3Ahdl%2F1893%2F25490&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A205433041%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1016%2Fj.inffus.2017.02.003&amp;rft.aulast=Poria&amp;rft.aufirst=Soujanya&amp;rft.au=Cambria%2C+Erik&amp;rft.au=Bajpai%2C+Rajiv&amp;rft.au=Hussain%2C+Amir&amp;rft_id=http%3A%2F%2Fresearchrepository.napier.ac.uk%2FOutput%2F1792429&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-6"><span class="mw-cite-backlink"><b><a href="#cite_ref-6">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFCaridakisCastellanoKessousRaouzaiou2007" class="citation book cs1">Caridakis, George; Castellano, Ginevra; Kessous, Loic; Raouzaiou, Amaryllis; Malatesta, Lori; Asteriadis, Stelios; Karpouzis, Kostas (19 September 2007). "Multimodal emotion recognition from expressive faces, body gestures and speech". <i>Artificial Intelligence and Innovations 2007: From Theory to Applications</i>. IFIP the International Federation for Information Processing. Vol.&#160;247. pp.&#160;<span class="nowrap">375–</span>388. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2F978-0-387-74161-1_41">10.1007/978-0-387-74161-1_41</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-0-387-74160-4" title="Special:BookSources/978-0-387-74160-4"><bdi>978-0-387-74160-4</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Multimodal+emotion+recognition+from+expressive+faces%2C+body+gestures+and+speech&amp;rft.btitle=Artificial+Intelligence+and+Innovations+2007%3A+From+Theory+to+Applications&amp;rft.series=IFIP+the+International+Federation+for+Information+Processing&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E375-%3C%2Fspan%3E388&amp;rft.date=2007-09-19&amp;rft_id=info%3Adoi%2F10.1007%2F978-0-387-74161-1_41&amp;rft.isbn=978-0-387-74160-4&amp;rft.aulast=Caridakis&amp;rft.aufirst=George&amp;rft.au=Castellano%2C+Ginevra&amp;rft.au=Kessous%2C+Loic&amp;rft.au=Raouzaiou%2C+Amaryllis&amp;rft.au=Malatesta%2C+Lori&amp;rft.au=Asteriadis%2C+Stelios&amp;rft.au=Karpouzis%2C+Kostas&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-7"><span class="mw-cite-backlink"><b><a href="#cite_ref-7">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFPrice2015" class="citation web cs1">Price (23 August 2015). <a rel="nofollow" class="external text" href="https://techcrunch.com/2015/08/23/tapping-into-the-emotional-internet/">"Tapping Into The Emotional Internet"</a>. <i>TechCrunch</i><span class="reference-accessdate">. Retrieved <span class="nowrap">12 December</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=TechCrunch&amp;rft.atitle=Tapping+Into+The+Emotional+Internet&amp;rft.date=2015-08-23&amp;rft.au=Price&amp;rft_id=https%3A%2F%2Ftechcrunch.com%2F2015%2F08%2F23%2Ftapping-into-the-emotional-internet%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-s1-8"><span class="mw-cite-backlink">^ <a href="#cite_ref-s1_8-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-s1_8-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-s1_8-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-s1_8-3"><sup><i><b>d</b></i></sup></a> <a href="#cite_ref-s1_8-4"><sup><i><b>e</b></i></sup></a> <a href="#cite_ref-s1_8-5"><sup><i><b>f</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFCambria2016" class="citation journal cs1">Cambria, Erik (March 2016). "Affective Computing and Sentiment Analysis". <i>IEEE Intelligent Systems</i>. <b>31</b> (2): <span class="nowrap">102–</span>107. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FMIS.2016.31">10.1109/MIS.2016.31</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:18580557">18580557</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=IEEE+Intelligent+Systems&amp;rft.atitle=Affective+Computing+and+Sentiment+Analysis&amp;rft.volume=31&amp;rft.issue=2&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E102-%3C%2Fspan%3E107&amp;rft.date=2016-03&amp;rft_id=info%3Adoi%2F10.1109%2FMIS.2016.31&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A18580557%23id-name%3DS2CID&amp;rft.aulast=Cambria&amp;rft.aufirst=Erik&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-9"><span class="mw-cite-backlink"><b><a href="#cite_ref-9">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFTaboadaBrookeTofiloskiVoll2011" class="citation journal cs1">Taboada, Maite; Brooke, Julian; Tofiloski, Milan; Voll, Kimberly; Stede, Manfred (June 2011). <a rel="nofollow" class="external text" href="https://doi.org/10.1162%2Fcoli_a_00049">"Lexicon-Based Methods for Sentiment Analysis"</a>. <i>Computational Linguistics</i>. <b>37</b> (2): <span class="nowrap">267–</span>307. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1162%2Fcoli_a_00049">10.1162/coli_a_00049</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0891-2017">0891-2017</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Computational+Linguistics&amp;rft.atitle=Lexicon-Based+Methods+for+Sentiment+Analysis&amp;rft.volume=37&amp;rft.issue=2&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E267-%3C%2Fspan%3E307&amp;rft.date=2011-06&amp;rft_id=info%3Adoi%2F10.1162%2Fcoli_a_00049&amp;rft.issn=0891-2017&amp;rft.aulast=Taboada&amp;rft.aufirst=Maite&amp;rft.au=Brooke%2C+Julian&amp;rft.au=Tofiloski%2C+Milan&amp;rft.au=Voll%2C+Kimberly&amp;rft.au=Stede%2C+Manfred&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1162%252Fcoli_a_00049&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-SenticNet-10"><span class="mw-cite-backlink"><b><a href="#cite_ref-SenticNet_10-0">^</a></b></span> <span class="reference-text"> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFCambriaLiuDecherchiXing2022" class="citation conference cs1">Cambria, Erik; Liu, Qian; Decherchi, Sergio; Xing, Frank; Kwok, Kenneth (2022). <a rel="nofollow" class="external text" href="https://sentic.net/senticnet-7.pdf">"SenticNet 7: A Commonsense-based Neurosymbolic AI Framework for Explainable Sentiment Analysis"</a> <span class="cs1-format">(PDF)</span>. <i>Proceedings of LREC</i>. pp.&#160;<span class="nowrap">3829–</span>3839.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=conference&amp;rft.atitle=SenticNet+7%3A+A+Commonsense-based+Neurosymbolic+AI+Framework+for+Explainable+Sentiment+Analysis&amp;rft.btitle=Proceedings+of+LREC&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E3829-%3C%2Fspan%3E3839&amp;rft.date=2022&amp;rft.aulast=Cambria&amp;rft.aufirst=Erik&amp;rft.au=Liu%2C+Qian&amp;rft.au=Decherchi%2C+Sergio&amp;rft.au=Xing%2C+Frank&amp;rft.au=Kwok%2C+Kenneth&amp;rft_id=https%3A%2F%2Fsentic.net%2Fsenticnet-7.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-11"><span class="mw-cite-backlink"><b><a href="#cite_ref-11">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFBalahurHermidaMontoyo2012" class="citation journal cs1">Balahur, Alexandra; Hermida, JesúS M; Montoyo, AndréS (1 November 2012). <a rel="nofollow" class="external text" href="https://dl.acm.org/citation.cfm?id=2364904">"Detecting implicit expressions of emotion in text: A comparative analysis"</a>. <i>Decision Support Systems</i>. <b>53</b> (4): <span class="nowrap">742–</span>753. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.dss.2012.05.024">10.1016/j.dss.2012.05.024</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0167-9236">0167-9236</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Decision+Support+Systems&amp;rft.atitle=Detecting+implicit+expressions+of+emotion+in+text%3A+A+comparative+analysis&amp;rft.volume=53&amp;rft.issue=4&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E742-%3C%2Fspan%3E753&amp;rft.date=2012-11-01&amp;rft_id=info%3Adoi%2F10.1016%2Fj.dss.2012.05.024&amp;rft.issn=0167-9236&amp;rft.aulast=Balahur&amp;rft.aufirst=Alexandra&amp;rft.au=Hermida%2C+Jes%C3%BAS+M&amp;rft.au=Montoyo%2C+Andr%C3%A9S&amp;rft_id=https%3A%2F%2Fdl.acm.org%2Fcitation.cfm%3Fid%3D2364904&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-s6-12"><span class="mw-cite-backlink">^ <a href="#cite_ref-s6_12-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-s6_12-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-s6_12-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFMedhatHassanKorashy2014" class="citation journal cs1">Medhat, Walaa; Hassan, Ahmed; Korashy, Hoda (December 2014). <a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.asej.2014.04.011">"Sentiment analysis algorithms and applications: A survey"</a>. <i>Ain Shams Engineering Journal</i>. <b>5</b> (4): <span class="nowrap">1093–</span>1113. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.asej.2014.04.011">10.1016/j.asej.2014.04.011</a></span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Ain+Shams+Engineering+Journal&amp;rft.atitle=Sentiment+analysis+algorithms+and+applications%3A+A+survey&amp;rft.volume=5&amp;rft.issue=4&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E1093-%3C%2Fspan%3E1113&amp;rft.date=2014-12&amp;rft_id=info%3Adoi%2F10.1016%2Fj.asej.2014.04.011&amp;rft.aulast=Medhat&amp;rft.aufirst=Walaa&amp;rft.au=Hassan%2C+Ahmed&amp;rft.au=Korashy%2C+Hoda&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1016%252Fj.asej.2014.04.011&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-s3-13"><span class="mw-cite-backlink">^ <a href="#cite_ref-s3_13-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-s3_13-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-s3_13-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFMadhoushiHamdanZainudin2015" class="citation book cs1">Madhoushi, Zohreh; Hamdan, Abdul Razak; Zainudin, Suhaila (2015). "Sentiment analysis techniques in recent works". <i>2015 Science and Information Conference (SAI)</i>. pp.&#160;<span class="nowrap">288–</span>291. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FSAI.2015.7237157">10.1109/SAI.2015.7237157</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-1-4799-8547-0" title="Special:BookSources/978-1-4799-8547-0"><bdi>978-1-4799-8547-0</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:14821209">14821209</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Sentiment+analysis+techniques+in+recent+works&amp;rft.btitle=2015+Science+and+Information+Conference+%28SAI%29&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E288-%3C%2Fspan%3E291&amp;rft.date=2015&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A14821209%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1109%2FSAI.2015.7237157&amp;rft.isbn=978-1-4799-8547-0&amp;rft.aulast=Madhoushi&amp;rft.aufirst=Zohreh&amp;rft.au=Hamdan%2C+Abdul+Razak&amp;rft.au=Zainudin%2C+Suhaila&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-14"><span class="mw-cite-backlink"><b><a href="#cite_ref-14">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFHemmatianSohrabi2017" class="citation journal cs1">Hemmatian, Fatemeh; Sohrabi, Mohammad Karim (18 December 2017). "A survey on classification techniques for opinion mining and sentiment analysis". <i>Artificial Intelligence Review</i>. <b>52</b> (3): <span class="nowrap">1495–</span>1545. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs10462-017-9599-6">10.1007/s10462-017-9599-6</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:11741285">11741285</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Artificial+Intelligence+Review&amp;rft.atitle=A+survey+on+classification+techniques+for+opinion+mining+and+sentiment+analysis&amp;rft.volume=52&amp;rft.issue=3&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E1495-%3C%2Fspan%3E1545&amp;rft.date=2017-12-18&amp;rft_id=info%3Adoi%2F10.1007%2Fs10462-017-9599-6&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A11741285%23id-name%3DS2CID&amp;rft.aulast=Hemmatian&amp;rft.aufirst=Fatemeh&amp;rft.au=Sohrabi%2C+Mohammad+Karim&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-s4-15"><span class="mw-cite-backlink">^ <a href="#cite_ref-s4_15-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-s4_15-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-s4_15-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFSunLuoChen2017" class="citation journal cs1">Sun, Shiliang; Luo, Chen; Chen, Junyu (July 2017). "A review of natural language processing techniques for opinion mining systems". <i>Information Fusion</i>. <b>36</b>: <span class="nowrap">10–</span>25. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.inffus.2016.10.004">10.1016/j.inffus.2016.10.004</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Information+Fusion&amp;rft.atitle=A+review+of+natural+language+processing+techniques+for+opinion+mining+systems&amp;rft.volume=36&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E10-%3C%2Fspan%3E25&amp;rft.date=2017-07&amp;rft_id=info%3Adoi%2F10.1016%2Fj.inffus.2016.10.004&amp;rft.aulast=Sun&amp;rft.aufirst=Shiliang&amp;rft.au=Luo%2C+Chen&amp;rft.au=Chen%2C+Junyu&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-16"><span class="mw-cite-backlink"><b><a href="#cite_ref-16">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFMajumderPoriaGelbukhCambria2017" class="citation journal cs1">Majumder, Navonil; Poria, Soujanya; Gelbukh, Alexander; Cambria, Erik (March 2017). "Deep Learning-Based Document Modeling for Personality Detection from Text". <i>IEEE Intelligent Systems</i>. <b>32</b> (2): <span class="nowrap">74–</span>79. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FMIS.2017.23">10.1109/MIS.2017.23</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:206468984">206468984</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=IEEE+Intelligent+Systems&amp;rft.atitle=Deep+Learning-Based+Document+Modeling+for+Personality+Detection+from+Text&amp;rft.volume=32&amp;rft.issue=2&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E74-%3C%2Fspan%3E79&amp;rft.date=2017-03&amp;rft_id=info%3Adoi%2F10.1109%2FMIS.2017.23&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A206468984%23id-name%3DS2CID&amp;rft.aulast=Majumder&amp;rft.aufirst=Navonil&amp;rft.au=Poria%2C+Soujanya&amp;rft.au=Gelbukh%2C+Alexander&amp;rft.au=Cambria%2C+Erik&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-17"><span class="mw-cite-backlink"><b><a href="#cite_ref-17">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFMahendhiranKannimuthu2018" class="citation journal cs1">Mahendhiran, P. D.; Kannimuthu, S. (May 2018). "Deep Learning Techniques for Polarity Classification in Multimodal Sentiment Analysis". <i>International Journal of Information Technology &amp; Decision Making</i>. <b>17</b> (3): <span class="nowrap">883–</span>910. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1142%2FS0219622018500128">10.1142/S0219622018500128</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=International+Journal+of+Information+Technology+%26+Decision+Making&amp;rft.atitle=Deep+Learning+Techniques+for+Polarity+Classification+in+Multimodal+Sentiment+Analysis&amp;rft.volume=17&amp;rft.issue=3&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E883-%3C%2Fspan%3E910&amp;rft.date=2018-05&amp;rft_id=info%3Adoi%2F10.1142%2FS0219622018500128&amp;rft.aulast=Mahendhiran&amp;rft.aufirst=P.+D.&amp;rft.au=Kannimuthu%2C+S.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-18"><span class="mw-cite-backlink"><b><a href="#cite_ref-18">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFYuGuiMadaioOgan2017" class="citation book cs1">Yu, Hongliang; Gui, Liangke; Madaio, Michael; Ogan, Amy; Cassell, Justine; Morency, Louis-Philippe (23 October 2017). "Temporally Selective Attention Model for Social and Affective State Recognition in Multimedia Content". <i>Proceedings of the 25th ACM international conference on Multimedia</i>. MM '17. ACM. pp.&#160;<span class="nowrap">1743–</span>1751. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F3123266.3123413">10.1145/3123266.3123413</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/9781450349062" title="Special:BookSources/9781450349062"><bdi>9781450349062</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:3148578">3148578</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Temporally+Selective+Attention+Model+for+Social+and+Affective+State+Recognition+in+Multimedia+Content&amp;rft.btitle=Proceedings+of+the+25th+ACM+international+conference+on+Multimedia&amp;rft.series=MM+%2717&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E1743-%3C%2Fspan%3E1751&amp;rft.pub=ACM&amp;rft.date=2017-10-23&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A3148578%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1145%2F3123266.3123413&amp;rft.isbn=9781450349062&amp;rft.aulast=Yu&amp;rft.aufirst=Hongliang&amp;rft.au=Gui%2C+Liangke&amp;rft.au=Madaio%2C+Michael&amp;rft.au=Ogan%2C+Amy&amp;rft.au=Cassell%2C+Justine&amp;rft.au=Morency%2C+Louis-Philippe&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-19"><span class="mw-cite-backlink"><b><a href="#cite_ref-19">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFCambriaHussain2015" class="citation book cs1">Cambria, Erik; Hussain, Amir (2015). <a rel="nofollow" class="external text" href="https://dl.acm.org/citation.cfm?id=2878632"><i>Sentic Computing: A Common-Sense-Based Framework for Concept-Level Sentiment Analysis</i></a>. Springer Publishing Company, Incorporated. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-3319236537" title="Special:BookSources/978-3319236537"><bdi>978-3319236537</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Sentic+Computing%3A+A+Common-Sense-Based+Framework+for+Concept-Level+Sentiment+Analysis&amp;rft.pub=Springer+Publishing+Company%2C+Incorporated&amp;rft.date=2015&amp;rft.isbn=978-3319236537&amp;rft.aulast=Cambria&amp;rft.aufirst=Erik&amp;rft.au=Hussain%2C+Amir&amp;rft_id=https%3A%2F%2Fdl.acm.org%2Fcitation.cfm%3Fid%3D2878632&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-20"><span class="mw-cite-backlink"><b><a href="#cite_ref-20">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFAraújoGonçalvesChaBenevenuto2014" class="citation book cs1">Araújo, Matheus; Gonçalves, Pollyanna; <a href="/wiki/Cha_Meeyoung" title="Cha Meeyoung">Cha, Meeyoung</a>; Benevenuto, Fabrício (7 April 2014). "IFeel: A system that compares and combines sentiment analysis methods". <i>Proceedings of the 23rd International Conference on World Wide Web</i>. WWW '14 Companion. ACM. pp.&#160;<span class="nowrap">75–</span>78. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F2567948.2577013">10.1145/2567948.2577013</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/9781450327459" title="Special:BookSources/9781450327459"><bdi>9781450327459</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:11018367">11018367</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=IFeel%3A+A+system+that+compares+and+combines+sentiment+analysis+methods&amp;rft.btitle=Proceedings+of+the+23rd+International+Conference+on+World+Wide+Web&amp;rft.series=WWW+%2714+Companion&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E75-%3C%2Fspan%3E78&amp;rft.pub=ACM&amp;rft.date=2014-04-07&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A11018367%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1145%2F2567948.2577013&amp;rft.isbn=9781450327459&amp;rft.aulast=Ara%C3%BAjo&amp;rft.aufirst=Matheus&amp;rft.au=Gon%C3%A7alves%2C+Pollyanna&amp;rft.au=Cha%2C+Meeyoung&amp;rft.au=Benevenuto%2C+Fabr%C3%ADcio&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-21"><span class="mw-cite-backlink"><b><a href="#cite_ref-21">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFPaolo_PettaCatherine_PelachaudRoddy_Cowie2011" class="citation book cs1">Paolo Petta; <a href="/wiki/Catherine_Pelachaud" title="Catherine Pelachaud">Catherine Pelachaud</a>; Roddy Cowie, eds. (2011). <i>Emotion-oriented systems the humaine handbook</i>. Berlin: Springer. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-3-642-15184-2" title="Special:BookSources/978-3-642-15184-2"><bdi>978-3-642-15184-2</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Emotion-oriented+systems+the+humaine+handbook&amp;rft.place=Berlin&amp;rft.pub=Springer&amp;rft.date=2011&amp;rft.isbn=978-3-642-15184-2&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-22"><span class="mw-cite-backlink"><b><a href="#cite_ref-22">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFDouglas-CowieCampbellCowieRoach2003" class="citation journal cs1">Douglas-Cowie, Ellen; Campbell, Nick; Cowie, Roddy; Roach, Peter (1 April 2003). <a rel="nofollow" class="external text" href="https://dl.acm.org/citation.cfm?id=772595">"Emotional speech: towards a new generation of databases"</a>. <i>Speech Communication</i>. <b>40</b> (<span class="nowrap">1–</span>2): <span class="nowrap">33–</span>60. <a href="/wiki/CiteSeerX_(identifier)" class="mw-redirect" title="CiteSeerX (identifier)">CiteSeerX</a>&#160;<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.128.3991">10.1.1.128.3991</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2FS0167-6393%2802%2900070-5">10.1016/S0167-6393(02)00070-5</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0167-6393">0167-6393</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:6421586">6421586</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Speech+Communication&amp;rft.atitle=Emotional+speech%3A+towards+a+new+generation+of+databases&amp;rft.volume=40&amp;rft.issue=%3Cspan+class%3D%22nowrap%22%3E1%E2%80%93%3C%2Fspan%3E2&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E33-%3C%2Fspan%3E60&amp;rft.date=2003-04-01&amp;rft_id=https%3A%2F%2Fciteseerx.ist.psu.edu%2Fviewdoc%2Fsummary%3Fdoi%3D10.1.1.128.3991%23id-name%3DCiteSeerX&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A6421586%23id-name%3DS2CID&amp;rft.issn=0167-6393&amp;rft_id=info%3Adoi%2F10.1016%2FS0167-6393%2802%2900070-5&amp;rft.aulast=Douglas-Cowie&amp;rft.aufirst=Ellen&amp;rft.au=Campbell%2C+Nick&amp;rft.au=Cowie%2C+Roddy&amp;rft.au=Roach%2C+Peter&amp;rft_id=https%3A%2F%2Fdl.acm.org%2Fcitation.cfm%3Fid%3D772595&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-23"><span class="mw-cite-backlink"><b><a href="#cite_ref-23">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFMcKeownValstarCowiePantic2012" class="citation journal cs1">McKeown, G.; Valstar, M.; Cowie, R.; Pantic, M.; Schroder, M. (January 2012). <a rel="nofollow" class="external text" href="https://pure.qub.ac.uk/portal/en/publications/the-semaine-database-annotated-multimodal-records-of-emotionally-colored-conversations-between-a-person-and-a-limited-agent(4f349228-ebb5-4964-be2c-18f3559be29f).html">"The SEMAINE Database: Annotated Multimodal Records of Emotionally Colored Conversations between a Person and a Limited Agent"</a>. <i>IEEE Transactions on Affective Computing</i>. <b>3</b> (1): <span class="nowrap">5–</span>17. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FT-AFFC.2011.20">10.1109/T-AFFC.2011.20</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:2995377">2995377</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=IEEE+Transactions+on+Affective+Computing&amp;rft.atitle=The+SEMAINE+Database%3A+Annotated+Multimodal+Records+of+Emotionally+Colored+Conversations+between+a+Person+and+a+Limited+Agent&amp;rft.volume=3&amp;rft.issue=1&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E5-%3C%2Fspan%3E17&amp;rft.date=2012-01&amp;rft_id=info%3Adoi%2F10.1109%2FT-AFFC.2011.20&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A2995377%23id-name%3DS2CID&amp;rft.aulast=McKeown&amp;rft.aufirst=G.&amp;rft.au=Valstar%2C+M.&amp;rft.au=Cowie%2C+R.&amp;rft.au=Pantic%2C+M.&amp;rft.au=Schroder%2C+M.&amp;rft_id=https%3A%2F%2Fpure.qub.ac.uk%2Fportal%2Fen%2Fpublications%2Fthe-semaine-database-annotated-multimodal-records-of-emotionally-colored-conversations-between-a-person-and-a-limited-agent%284f349228-ebb5-4964-be2c-18f3559be29f%29.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-24"><span class="mw-cite-backlink"><b><a href="#cite_ref-24">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFBussoBulutLeeKazemzadeh2008" class="citation journal cs1">Busso, Carlos; Bulut, Murtaza; Lee, Chi-Chun; Kazemzadeh, Abe; <a href="/wiki/Emily_Mower_Provost" title="Emily Mower Provost">Mower, Emily</a>; Kim, Samuel; Chang, Jeannette N.; Lee, Sungbok; Narayanan, Shrikanth S. (5 November 2008). "IEMOCAP: interactive emotional dyadic motion capture database". <i>Language Resources and Evaluation</i>. <b>42</b> (4): <span class="nowrap">335–</span>359. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs10579-008-9076-6">10.1007/s10579-008-9076-6</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1574-020X">1574-020X</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:11820063">11820063</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Language+Resources+and+Evaluation&amp;rft.atitle=IEMOCAP%3A+interactive+emotional+dyadic+motion+capture+database&amp;rft.volume=42&amp;rft.issue=4&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E335-%3C%2Fspan%3E359&amp;rft.date=2008-11-05&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A11820063%23id-name%3DS2CID&amp;rft.issn=1574-020X&amp;rft_id=info%3Adoi%2F10.1007%2Fs10579-008-9076-6&amp;rft.aulast=Busso&amp;rft.aufirst=Carlos&amp;rft.au=Bulut%2C+Murtaza&amp;rft.au=Lee%2C+Chi-Chun&amp;rft.au=Kazemzadeh%2C+Abe&amp;rft.au=Mower%2C+Emily&amp;rft.au=Kim%2C+Samuel&amp;rft.au=Chang%2C+Jeannette+N.&amp;rft.au=Lee%2C+Sungbok&amp;rft.au=Narayanan%2C+Shrikanth+S.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-25"><span class="mw-cite-backlink"><b><a href="#cite_ref-25">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFMartinKotsiaMacqPitas2006" class="citation book cs1">Martin, O.; Kotsia, I.; Macq, B.; Pitas, I. (3 April 2006). "The eNTERFACE'05 Audio-Visual Emotion Database". <a rel="nofollow" class="external text" href="https://dl.acm.org/citation.cfm?id=1130193"><i>22nd International Conference on Data Engineering Workshops (ICDEW'06)</i></a>. Icdew '06. IEEE Computer Society. pp.&#160;8–. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FICDEW.2006.145">10.1109/ICDEW.2006.145</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/9780769525716" title="Special:BookSources/9780769525716"><bdi>9780769525716</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:16185196">16185196</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=The+eNTERFACE%2705+Audio-Visual+Emotion+Database&amp;rft.btitle=22nd+International+Conference+on+Data+Engineering+Workshops+%28ICDEW%2706%29&amp;rft.series=Icdew+%2706&amp;rft.pages=8-&amp;rft.pub=IEEE+Computer+Society&amp;rft.date=2006-04-03&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A16185196%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1109%2FICDEW.2006.145&amp;rft.isbn=9780769525716&amp;rft.aulast=Martin&amp;rft.aufirst=O.&amp;rft.au=Kotsia%2C+I.&amp;rft.au=Macq%2C+B.&amp;rft.au=Pitas%2C+I.&amp;rft_id=https%3A%2F%2Fdl.acm.org%2Fcitation.cfm%3Fid%3D1130193&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-26"><span class="mw-cite-backlink"><b><a href="#cite_ref-26">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFKoelstraMuhlSoleymaniLee2012" class="citation journal cs1">Koelstra, Sander; Muhl, Christian; Soleymani, Mohammad; Lee, Jong-Seok; Yazdani, Ashkan; Ebrahimi, Touradj; Pun, Thierry; Nijholt, Anton; Patras, Ioannis (January 2012). "DEAP: A Database for Emotion Analysis Using Physiological Signals". <i>IEEE Transactions on Affective Computing</i>. <b>3</b> (1): <span class="nowrap">18–</span>31. <a href="/wiki/CiteSeerX_(identifier)" class="mw-redirect" title="CiteSeerX (identifier)">CiteSeerX</a>&#160;<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.593.8470">10.1.1.593.8470</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FT-AFFC.2011.15">10.1109/T-AFFC.2011.15</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1949-3045">1949-3045</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:206597685">206597685</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=IEEE+Transactions+on+Affective+Computing&amp;rft.atitle=DEAP%3A+A+Database+for+Emotion+Analysis+Using+Physiological+Signals&amp;rft.volume=3&amp;rft.issue=1&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E18-%3C%2Fspan%3E31&amp;rft.date=2012-01&amp;rft_id=https%3A%2F%2Fciteseerx.ist.psu.edu%2Fviewdoc%2Fsummary%3Fdoi%3D10.1.1.593.8470%23id-name%3DCiteSeerX&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A206597685%23id-name%3DS2CID&amp;rft.issn=1949-3045&amp;rft_id=info%3Adoi%2F10.1109%2FT-AFFC.2011.15&amp;rft.aulast=Koelstra&amp;rft.aufirst=Sander&amp;rft.au=Muhl%2C+Christian&amp;rft.au=Soleymani%2C+Mohammad&amp;rft.au=Lee%2C+Jong-Seok&amp;rft.au=Yazdani%2C+Ashkan&amp;rft.au=Ebrahimi%2C+Touradj&amp;rft.au=Pun%2C+Thierry&amp;rft.au=Nijholt%2C+Anton&amp;rft.au=Patras%2C+Ioannis&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-27"><span class="mw-cite-backlink"><b><a href="#cite_ref-27">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFKatsigiannisRamzan2018" class="citation journal cs1">Katsigiannis, Stamos; Ramzan, Naeem (January 2018). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20221101164333/https://myresearchspace.uws.ac.uk/ws/files/1077176/Accepted_Author_Manuscript.pdf">"DREAMER: A Database for Emotion Recognition Through EEG and ECG Signals From Wireless Low-cost Off-the-Shelf Devices"</a> <span class="cs1-format">(PDF)</span>. <i>IEEE Journal of Biomedical and Health Informatics</i>. <b>22</b> (1): <span class="nowrap">98–</span>107. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FJBHI.2017.2688239">10.1109/JBHI.2017.2688239</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2168-2194">2168-2194</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a>&#160;<a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/28368836">28368836</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:23477696">23477696</a>. Archived from <a rel="nofollow" class="external text" href="https://myresearchspace.uws.ac.uk/ws/files/1077176/Accepted_Author_Manuscript.pdf">the original</a> <span class="cs1-format">(PDF)</span> on 1 November 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">1 October</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=IEEE+Journal+of+Biomedical+and+Health+Informatics&amp;rft.atitle=DREAMER%3A+A+Database+for+Emotion+Recognition+Through+EEG+and+ECG+Signals+From+Wireless+Low-cost+Off-the-Shelf+Devices&amp;rft.volume=22&amp;rft.issue=1&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E98-%3C%2Fspan%3E107&amp;rft.date=2018-01&amp;rft.issn=2168-2194&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A23477696%23id-name%3DS2CID&amp;rft_id=info%3Apmid%2F28368836&amp;rft_id=info%3Adoi%2F10.1109%2FJBHI.2017.2688239&amp;rft.aulast=Katsigiannis&amp;rft.aufirst=Stamos&amp;rft.au=Ramzan%2C+Naeem&amp;rft_id=https%3A%2F%2Fmyresearchspace.uws.ac.uk%2Fws%2Ffiles%2F1077176%2FAccepted_Author_Manuscript.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-28"><span class="mw-cite-backlink"><b><a href="#cite_ref-28">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFPoriaHazarikaMajumderNaik2019" class="citation journal cs1">Poria, Soujanya; Hazarika, Devamanyu; Majumder, Navonil; Naik, Gautam; Cambria, Erik; Mihalcea, Rada (2019). "MELD: A Multimodal Multi-Party Dataset for Emotion Recognition in Conversations". <i>Proceedings of the 57th Annual Meeting of the Association for Computational Linguistics</i>. Stroudsburg, PA, USA: Association for Computational Linguistics: <span class="nowrap">527–</span>536. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/1810.02508">1810.02508</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.18653%2Fv1%2Fp19-1050">10.18653/v1/p19-1050</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:52932143">52932143</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Proceedings+of+the+57th+Annual+Meeting+of+the+Association+for+Computational+Linguistics&amp;rft.atitle=MELD%3A+A+Multimodal+Multi-Party+Dataset+for+Emotion+Recognition+in+Conversations&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E527-%3C%2Fspan%3E536&amp;rft.date=2019&amp;rft_id=info%3Aarxiv%2F1810.02508&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A52932143%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.18653%2Fv1%2Fp19-1050&amp;rft.aulast=Poria&amp;rft.aufirst=Soujanya&amp;rft.au=Hazarika%2C+Devamanyu&amp;rft.au=Majumder%2C+Navonil&amp;rft.au=Naik%2C+Gautam&amp;rft.au=Cambria%2C+Erik&amp;rft.au=Mihalcea%2C+Rada&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-:0-29"><span class="mw-cite-backlink">^ <a href="#cite_ref-:0_29-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:0_29-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text">Poria, S., Majumder, N., Mihalcea, R., &amp; Hovy, E. (2019). <a rel="nofollow" class="external text" href="https://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=8764449">Emotion recognition in conversation: Research challenges, datasets, and recent advances</a>. IEEE Access, 7, 100943-100953.</span> </li> <li id="cite_note-30"><span class="mw-cite-backlink"><b><a href="#cite_ref-30">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFStappenSchullerLefterCambria2020" class="citation book cs1">Stappen, Lukas; Schuller, Björn; Lefter, Iulia; Cambria, Erik; Kompatsiaris, Ioannis (2020). "Summary of MuSe 2020: Multimodal Sentiment Analysis, Emotion-target Engagement and Trustworthiness Detection in Real-life Media". <i>Proceedings of the 28th ACM International Conference on Multimedia</i>. Seattle, PA, USA: Association for Computing Machinery. pp.&#160;<span class="nowrap">4769–</span>4770. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/2004.14858">2004.14858</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F3394171.3421901">10.1145/3394171.3421901</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/9781450379885" title="Special:BookSources/9781450379885"><bdi>9781450379885</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:222278714">222278714</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Summary+of+MuSe+2020%3A+Multimodal+Sentiment+Analysis%2C+Emotion-target+Engagement+and+Trustworthiness+Detection+in+Real-life+Media&amp;rft.btitle=Proceedings+of+the+28th+ACM+International+Conference+on+Multimedia&amp;rft.place=Seattle%2C+PA%2C+USA&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E4769-%3C%2Fspan%3E4770&amp;rft.pub=Association+for+Computing+Machinery&amp;rft.date=2020&amp;rft_id=info%3Aarxiv%2F2004.14858&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A222278714%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1145%2F3394171.3421901&amp;rft.isbn=9781450379885&amp;rft.aulast=Stappen&amp;rft.aufirst=Lukas&amp;rft.au=Schuller%2C+Bj%C3%B6rn&amp;rft.au=Lefter%2C+Iulia&amp;rft.au=Cambria%2C+Erik&amp;rft.au=Kompatsiaris%2C+Ioannis&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-31"><span class="mw-cite-backlink"><b><a href="#cite_ref-31">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFHo2020" class="citation book cs1">Ho, Vong (2020). <a rel="nofollow" class="external text" href="https://link.springer.com/chapter/10.1007/978-981-15-6168-9_27">"Emotion Recognition for Vietnamese Social Media Text"</a>. <i>Computational Linguistics</i>. Communications in Computer and Information Science. Vol.&#160;1215. pp.&#160;<span class="nowrap">319–</span>333. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/1911.09339">1911.09339</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2F978-981-15-6168-9_27">10.1007/978-981-15-6168-9_27</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-981-15-6167-2" title="Special:BookSources/978-981-15-6167-2"><bdi>978-981-15-6167-2</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:208202333">208202333</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Emotion+Recognition+for+Vietnamese+Social+Media+Text&amp;rft.btitle=Computational+Linguistics&amp;rft.series=Communications+in+Computer+and+Information+Science&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E319-%3C%2Fspan%3E333&amp;rft.date=2020&amp;rft_id=info%3Aarxiv%2F1911.09339&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A208202333%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1007%2F978-981-15-6168-9_27&amp;rft.isbn=978-981-15-6167-2&amp;rft.aulast=Ho&amp;rft.aufirst=Vong&amp;rft_id=https%3A%2F%2Flink.springer.com%2Fchapter%2F10.1007%2F978-981-15-6168-9_27&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-32"><span class="mw-cite-backlink"><b><a href="#cite_ref-32">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFArnau-GonzálezKatsigiannisArevalillo-HerráezRamzan2021" class="citation journal cs1 cs1-prop-long-vol">Arnau-González, Pablo; Katsigiannis, Stamos; Arevalillo-Herráez, Miguel; Ramzan, Naeem (February 2021). <a rel="nofollow" class="external text" href="https://ieeexplore.ieee.org/document/9361690">"BED: A new dataset for EEG-based biometrics"</a>. <i>IEEE Internet of Things Journal</i>. (Early Access) (15): 1. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FJIOT.2021.3061727">10.1109/JIOT.2021.3061727</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2327-4662">2327-4662</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:233916681">233916681</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=IEEE+Internet+of+Things+Journal&amp;rft.atitle=BED%3A+A+new+dataset+for+EEG-based+biometrics&amp;rft.volume=%28Early+Access%29&amp;rft.issue=15&amp;rft.pages=1&amp;rft.date=2021-02&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A233916681%23id-name%3DS2CID&amp;rft.issn=2327-4662&amp;rft_id=info%3Adoi%2F10.1109%2FJIOT.2021.3061727&amp;rft.aulast=Arnau-Gonz%C3%A1lez&amp;rft.aufirst=Pablo&amp;rft.au=Katsigiannis%2C+Stamos&amp;rft.au=Arevalillo-Herr%C3%A1ez%2C+Miguel&amp;rft.au=Ramzan%2C+Naeem&amp;rft_id=https%3A%2F%2Fieeexplore.ieee.org%2Fdocument%2F9361690&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-33"><span class="mw-cite-backlink"><b><a href="#cite_ref-33">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite class="citation web cs1"><a rel="nofollow" class="external text" href="http://www.affectiva.com">"Affectiva"</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Affectiva&amp;rft_id=http%3A%2F%2Fwww.affectiva.com&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-34"><span class="mw-cite-backlink"><b><a href="#cite_ref-34">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFBossettaSchmøkel2023" class="citation journal cs1">Bossetta, Michael; Schmøkel, Rasmus (2 January 2023). <a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F10584609.2022.2128949">"Cross-Platform Emotions and Audience Engagement in Social Media Political Campaigning: Comparing Candidates' Facebook and Instagram Images in the 2020 US Election"</a>. <i>Political Communication</i>. <b>40</b> (1): <span class="nowrap">48–</span>68. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F10584609.2022.2128949">10.1080/10584609.2022.2128949</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1058-4609">1058-4609</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Political+Communication&amp;rft.atitle=Cross-Platform+Emotions+and+Audience+Engagement+in+Social+Media+Political+Campaigning%3A+Comparing+Candidates%27+Facebook+and+Instagram+Images+in+the+2020+US+Election&amp;rft.volume=40&amp;rft.issue=1&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E48-%3C%2Fspan%3E68&amp;rft.date=2023-01-02&amp;rft_id=info%3Adoi%2F10.1080%2F10584609.2022.2128949&amp;rft.issn=1058-4609&amp;rft.aulast=Bossetta&amp;rft.aufirst=Michael&amp;rft.au=Schm%C3%B8kel%2C+Rasmus&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1080%252F10584609.2022.2128949&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-35"><span class="mw-cite-backlink"><b><a href="#cite_ref-35">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFPeng2021" class="citation journal cs1">Peng, Yilang (January 2021). <a rel="nofollow" class="external text" href="http://journals.sagepub.com/doi/10.1177/1940161220964769">"What Makes Politicians' Instagram Posts Popular? Analyzing Social Media Strategies of Candidates and Office Holders with Computer Vision"</a>. <i>The International Journal of Press/Politics</i>. <b>26</b> (1): <span class="nowrap">143–</span>166. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F1940161220964769">10.1177/1940161220964769</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1940-1612">1940-1612</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:225108765">225108765</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+International+Journal+of+Press%2FPolitics&amp;rft.atitle=What+Makes+Politicians%27+Instagram+Posts+Popular%3F+Analyzing+Social+Media+Strategies+of+Candidates+and+Office+Holders+with+Computer+Vision&amp;rft.volume=26&amp;rft.issue=1&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E143-%3C%2Fspan%3E166&amp;rft.date=2021-01&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A225108765%23id-name%3DS2CID&amp;rft.issn=1940-1612&amp;rft_id=info%3Adoi%2F10.1177%2F1940161220964769&amp;rft.aulast=Peng&amp;rft.aufirst=Yilang&amp;rft_id=http%3A%2F%2Fjournals.sagepub.com%2Fdoi%2F10.1177%2F1940161220964769&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-36"><span class="mw-cite-backlink"><b><a href="#cite_ref-36">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFHaimJungblut2021" class="citation journal cs1">Haim, Mario; Jungblut, Marc (15 March 2021). <a rel="nofollow" class="external text" href="https://www.tandfonline.com/doi/full/10.1080/10584609.2020.1753869">"Politicians' Self-depiction and Their News Portrayal: Evidence from 28 Countries Using Visual Computational Analysis"</a>. <i>Political Communication</i>. <b>38</b> (<span class="nowrap">1–</span>2): <span class="nowrap">55–</span>74. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F10584609.2020.1753869">10.1080/10584609.2020.1753869</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1058-4609">1058-4609</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:219481457">219481457</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Political+Communication&amp;rft.atitle=Politicians%27+Self-depiction+and+Their+News+Portrayal%3A+Evidence+from+28+Countries+Using+Visual+Computational+Analysis&amp;rft.volume=38&amp;rft.issue=%3Cspan+class%3D%22nowrap%22%3E1%E2%80%93%3C%2Fspan%3E2&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E55-%3C%2Fspan%3E74&amp;rft.date=2021-03-15&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A219481457%23id-name%3DS2CID&amp;rft.issn=1058-4609&amp;rft_id=info%3Adoi%2F10.1080%2F10584609.2020.1753869&amp;rft.aulast=Haim&amp;rft.aufirst=Mario&amp;rft.au=Jungblut%2C+Marc&amp;rft_id=https%3A%2F%2Fwww.tandfonline.com%2Fdoi%2Ffull%2F10.1080%2F10584609.2020.1753869&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-37"><span class="mw-cite-backlink"><b><a href="#cite_ref-37">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFBossettaSchmøkel2023" class="citation journal cs1">Bossetta, Michael; Schmøkel, Rasmus (2 January 2023). <a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F10584609.2022.2128949">"Cross-Platform Emotions and Audience Engagement in Social Media Political Campaigning: Comparing Candidates' Facebook and Instagram Images in the 2020 US Election"</a>. <i>Political Communication</i>. <b>40</b> (1): <span class="nowrap">48–</span>68. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F10584609.2022.2128949">10.1080/10584609.2022.2128949</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1058-4609">1058-4609</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Political+Communication&amp;rft.atitle=Cross-Platform+Emotions+and+Audience+Engagement+in+Social+Media+Political+Campaigning%3A+Comparing+Candidates%27+Facebook+and+Instagram+Images+in+the+2020+US+Election&amp;rft.volume=40&amp;rft.issue=1&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E48-%3C%2Fspan%3E68&amp;rft.date=2023-01-02&amp;rft_id=info%3Adoi%2F10.1080%2F10584609.2022.2128949&amp;rft.issn=1058-4609&amp;rft.aulast=Bossetta&amp;rft.aufirst=Michael&amp;rft.au=Schm%C3%B8kel%2C+Rasmus&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1080%252F10584609.2022.2128949&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-38"><span class="mw-cite-backlink"><b><a href="#cite_ref-38">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFWonSteinert-ThrelkeldJoo2017" class="citation book cs1">Won, Donghyeon; Steinert-Threlkeld, Zachary C.; Joo, Jungseock (19 October 2017). <a rel="nofollow" class="external text" href="https://doi.org/10.1145/3123266.3123282">"Protest Activity Detection and Perceived Violence Estimation from Social Media Images"</a>. <i>Proceedings of the 25th ACM international conference on Multimedia</i>. MM '17. New York, NY, USA: Association for Computing Machinery. pp.&#160;<span class="nowrap">786–</span>794. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/1709.06204">1709.06204</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F3123266.3123282">10.1145/3123266.3123282</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-1-4503-4906-2" title="Special:BookSources/978-1-4503-4906-2"><bdi>978-1-4503-4906-2</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Protest+Activity+Detection+and+Perceived+Violence+Estimation+from+Social+Media+Images&amp;rft.btitle=Proceedings+of+the+25th+ACM+international+conference+on+Multimedia&amp;rft.place=New+York%2C+NY%2C+USA&amp;rft.series=MM+%2717&amp;rft.pages=%3Cspan+class%3D%22nowrap%22%3E786-%3C%2Fspan%3E794&amp;rft.pub=Association+for+Computing+Machinery&amp;rft.date=2017-10-19&amp;rft_id=info%3Aarxiv%2F1709.06204&amp;rft_id=info%3Adoi%2F10.1145%2F3123266.3123282&amp;rft.isbn=978-1-4503-4906-2&amp;rft.aulast=Won&amp;rft.aufirst=Donghyeon&amp;rft.au=Steinert-Threlkeld%2C+Zachary+C.&amp;rft.au=Joo%2C+Jungseock&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1145%2F3123266.3123282&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-39"><span class="mw-cite-backlink"><b><a href="#cite_ref-39">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFBushwick" class="citation web cs1">Bushwick, Sophie. <a rel="nofollow" class="external text" href="https://www.scientificamerican.com/article/this-video-watches-you-back/">"This Video Watches You Back"</a>. <i>Scientific American</i><span class="reference-accessdate">. Retrieved <span class="nowrap">27 January</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Scientific+American&amp;rft.atitle=This+Video+Watches+You+Back&amp;rft.aulast=Bushwick&amp;rft.aufirst=Sophie&amp;rft_id=https%3A%2F%2Fwww.scientificamerican.com%2Farticle%2Fthis-video-watches-you-back%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-DeMuth-40"><span class="mw-cite-backlink">^ <a href="#cite_ref-DeMuth_40-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-DeMuth_40-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFDeMuth_Jr.2016" class="citation news cs1">DeMuth Jr., Chris (8 January 2016). <a rel="nofollow" class="external text" href="https://seekingalpha.com/article/3798766-apple-reads-your-mind">"Apple Reads Your Mind"</a>. <i>M&amp;A Daily</i>. Seeking Alpha<span class="reference-accessdate">. Retrieved <span class="nowrap">9 January</span> 2016</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=M%26A+Daily&amp;rft.atitle=Apple+Reads+Your+Mind&amp;rft.date=2016-01-08&amp;rft.aulast=DeMuth+Jr.&amp;rft.aufirst=Chris&amp;rft_id=http%3A%2F%2Fseekingalpha.com%2Farticle%2F3798766-apple-reads-your-mind&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-41"><span class="mw-cite-backlink"><b><a href="#cite_ref-41">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite class="citation web cs1"><a rel="nofollow" class="external text" href="http://www.nviso.ch">"nViso"</a>. <i>nViso.ch</i>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=nViso.ch&amp;rft.atitle=nViso&amp;rft_id=http%3A%2F%2Fwww.nviso.ch&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-42"><span class="mw-cite-backlink"><b><a href="#cite_ref-42">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://visagetechnologies.com/products-and-services/visagesdk/faceanalysis/">"Visage Technologies"</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Visage+Technologies&amp;rft_id=https%3A%2F%2Fvisagetechnologies.com%2Fproducts-and-services%2Fvisagesdk%2Ffaceanalysis%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-43"><span class="mw-cite-backlink"><b><a href="#cite_ref-43">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.cnet.com/roadshow/news/eyeris-emovu-detects-driver-emotions/">"Feeling sad, angry? Your future car will know"</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Feeling+sad%2C+angry%3F+Your+future+car+will+know&amp;rft_id=http%3A%2F%2Fwww.cnet.com%2Froadshow%2Fnews%2Feyeris-emovu-detects-driver-emotions%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-44"><span class="mw-cite-backlink"><b><a href="#cite_ref-44">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222" /><cite id="CITEREFVaragur2016" class="citation news cs1">Varagur, Krithika (22 March 2016). <a rel="nofollow" class="external text" href="http://www.huffingtonpost.com/entry/drowsy-driving-warning-system_us_56eadd1be4b09bf44a9c96aa">"Cars May Soon Warn Drivers Before They Nod Off"</a>. <i>Huffington Post</i>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Huffington+Post&amp;rft.atitle=Cars+May+Soon+Warn+Drivers+Before+They+Nod+Off&amp;rft.date=2016-03-22&amp;rft.aulast=Varagur&amp;rft.aufirst=Krithika&amp;rft_id=http%3A%2F%2Fwww.huffingtonpost.com%2Fentry%2Fdrowsy-driving-warning-system_us_56eadd1be4b09bf44a9c96aa&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AEmotion+recognition" class="Z3988"></span></span> </li> <li id="cite_note-45"><span class="mw-cite-backlink"><b><a href="#cite_ref-45">^</a></b></span> <span class="reference-text">Shivhare, S. N., &amp; Khethawat, S. (2012). Emotion detection from text. arXiv preprint <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<a rel="nofollow" class="external text" href="https://arxiv.org/abs/1205.4944">1205.4944</a></span> </li> <li id="cite_note-46"><span class="mw-cite-backlink"><b><a href="#cite_ref-46">^</a></b></span> <span class="reference-text">Ezhilarasi, R., &amp; Minu, R. I. (2012). <a rel="nofollow" class="external text" href="https://www.researchgate.net/profile/R_I_Minu/publication/255704866_Automatic_Emotion_Recognition_and_Classification/links/567f92d308ae19758389f82e.pdf">Automatic emotion recognition and classification</a>. Procedia Engineering, 38, 21-26.</span> </li> <li id="cite_note-47"><span class="mw-cite-backlink"><b><a href="#cite_ref-47">^</a></b></span> <span class="reference-text">Krcadinac, U., Pasquier, P., Jovanovic, J., &amp; Devedzic, V. (2013). <a rel="nofollow" class="external text" href="https://www.krcadinac.com/download/synesketch/research/2013-Synesketch_An_Open_Source_Library_for_Sentence-based_Emotion_Recognition-Krcadinac_Pasquier_Jovanovic_and_Devedzic.pdf">Synesketch: An open source library for sentence-based emotion recognition</a>. IEEE Transactions on Affective Computing, 4(3), 312-325.</span> </li> <li id="cite_note-48"><span class="mw-cite-backlink"><b><a href="#cite_ref-48">^</a></b></span> <span class="reference-text">Schmitt, M., Ringeval, F., &amp; Schuller, B. W. (2016, September). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200215183308/https://pdfs.semanticscholar.org/7ebf/51a3bff0834a33e3313bd51c0c7d7ac50fc2.pdf">At the Border of Acoustics and Linguistics: Bag-of-Audio-Words for the Recognition of Emotions in Speech</a>. In Interspeech (pp. 495-499).</span> </li> <li id="cite_note-49"><span class="mw-cite-backlink"><b><a href="#cite_ref-49">^</a></b></span> <span class="reference-text">Dhall, A., Goecke, R., Lucey, S., &amp; Gedeon, T. (2012). <a rel="nofollow" class="external text" href="https://www.researchgate.net/profile/Abhinav_Dhall/publication/256575277_Collecting_Large_Richly_Annotated_Facial-Expression_Databases_from_Movies/links/00b4953b27b9057001000000/Collecting-Large-Richly-Annotated-Facial-Expression-Databases-from-Movies.pdf">Collecting large, richly annotated facial-expression databases from movies</a>. IEEE multimedia, (3), 34-41.</span> </li> </ol></div></div> <div class="navbox-styles"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374" /><style data-mw-deduplicate="TemplateStyles:r1236075235">.mw-parser-output .navbox{box-sizing:border-box;border:1px solid #a2a9b1;width:100%;clear:both;font-size:88%;text-align:center;padding:1px;margin:1em auto 0}.mw-parser-output .navbox .navbox{margin-top:0}.mw-parser-output .navbox+.navbox,.mw-parser-output .navbox+.navbox-styles+.navbox{margin-top:-1px}.mw-parser-output .navbox-inner,.mw-parser-output .navbox-subgroup{width:100%}.mw-parser-output .navbox-group,.mw-parser-output .navbox-title,.mw-parser-output .navbox-abovebelow{padding:0.25em 1em;line-height:1.5em;text-align:center}.mw-parser-output .navbox-group{white-space:nowrap;text-align:right}.mw-parser-output .navbox,.mw-parser-output .navbox-subgroup{background-color:#fdfdfd}.mw-parser-output .navbox-list{line-height:1.5em;border-color:#fdfdfd}.mw-parser-output .navbox-list-with-group{text-align:left;border-left-width:2px;border-left-style:solid}.mw-parser-output tr+tr>.navbox-abovebelow,.mw-parser-output tr+tr>.navbox-group,.mw-parser-output tr+tr>.navbox-image,.mw-parser-output tr+tr>.navbox-list{border-top:2px solid #fdfdfd}.mw-parser-output .navbox-title{background-color:#ccf}.mw-parser-output .navbox-abovebelow,.mw-parser-output .navbox-group,.mw-parser-output .navbox-subgroup .navbox-title{background-color:#ddf}.mw-parser-output .navbox-subgroup .navbox-group,.mw-parser-output .navbox-subgroup .navbox-abovebelow{background-color:#e6e6ff}.mw-parser-output .navbox-even{background-color:#f7f7f7}.mw-parser-output .navbox-odd{background-color:transparent}.mw-parser-output .navbox .hlist td dl,.mw-parser-output .navbox .hlist td ol,.mw-parser-output .navbox .hlist td ul,.mw-parser-output .navbox td.hlist dl,.mw-parser-output .navbox td.hlist ol,.mw-parser-output .navbox td.hlist ul{padding:0.125em 0}.mw-parser-output .navbox .navbar{display:block;font-size:100%}.mw-parser-output .navbox-title .navbar{float:left;text-align:left;margin-right:0.5em}body.skin--responsive .mw-parser-output .navbox-image img{max-width:none!important}@media print{body.ns-0 .mw-parser-output .navbox{display:none!important}}</style></div><div role="navigation" class="navbox" aria-labelledby="Emotions_(list)282" style="padding:3px"><table class="nowraplinks mw-collapsible autocollapse navbox-inner" style="border-spacing:0;background:transparent;color:inherit"><tbody><tr><th scope="col" class="navbox-title" colspan="3"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374" /><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1239400231" /><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Emotion_navbox" title="Template:Emotion navbox"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Emotion_navbox" title="Template talk:Emotion navbox"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Emotion_navbox" title="Special:EditPage/Template:Emotion navbox"><abbr title="Edit this template">e</abbr></a></li></ul></div><div id="Emotions_(list)282" style="font-size:114%;margin:0 4em"><a href="/wiki/Emotion" title="Emotion">Emotions</a> (<a href="/wiki/Emotion_classification" title="Emotion classification">list</a>)</div></th></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Emotion" title="Emotion">Emotions</a></th><td class="navbox-list-with-group navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Acceptance" title="Acceptance">Acceptance</a></li> <li><a href="/wiki/Admiration" title="Admiration">Admiration</a></li> <li><a href="/wiki/Adoration" title="Adoration">Adoration</a></li> <li><a href="/wiki/Aesthetic_emotions" title="Aesthetic emotions">Aesthetic</a></li> <li><a href="/wiki/Affection" title="Affection">Affection</a></li> <li><a href="/wiki/Psychomotor_agitation" title="Psychomotor agitation">Agitation</a></li> <li><a href="/wiki/Pain" title="Pain">Agony</a></li> <li><a href="/wiki/Amusement" title="Amusement">Amusement</a></li> <li><a href="/wiki/Anger" title="Anger">Anger</a></li> <li><a href="/wiki/Angst" title="Angst">Angst</a></li> <li><a href="/wiki/Anguish" title="Anguish">Anguish</a></li> <li><a href="/wiki/Annoyance" title="Annoyance">Annoyance</a></li> <li><a href="/wiki/Anticipation" title="Anticipation">Anticipation</a></li> <li><a href="/wiki/Antipathy" title="Antipathy">Antipathy</a></li> <li><a href="/wiki/Anxiety" title="Anxiety">Anxiety</a></li> <li><a href="/wiki/Apathy" title="Apathy">Apathy</a></li> <li><a href="/wiki/Arousal" title="Arousal">Arousal</a></li> <li><a href="/wiki/Interpersonal_attraction" title="Interpersonal attraction">Attraction</a></li> <li><a href="/wiki/Awe" title="Awe">Awe</a></li> <li><a href="/wiki/Belongingness" title="Belongingness">Belongingness</a></li> <li><a href="/wiki/Boredom" title="Boredom">Boredom</a></li> <li><a href="/wiki/Calmness" title="Calmness">Calmness</a></li> <li><a href="/wiki/Comfort" title="Comfort">Comfort</a></li> <li><a href="/wiki/Compassion" title="Compassion">Compassion</a></li> <li><a href="/wiki/Confidence" title="Confidence">Confidence</a></li> <li><a href="/wiki/Confusion" title="Confusion">Confusion</a></li> <li><a href="/wiki/Contempt" title="Contempt">Contempt</a></li> <li><a href="/wiki/Contentment" title="Contentment">Contentment</a></li> <li><a href="/wiki/Courage" title="Courage">Courage</a></li> <li><a href="/wiki/Cruelty" title="Cruelty">Cruelty</a></li> <li><a href="/wiki/Curiosity" title="Curiosity">Curiosity</a></li> <li><a href="/wiki/Defeatism" title="Defeatism">Defeat</a></li> <li><a href="/wiki/Depression_(mood)" title="Depression (mood)">Depression</a></li> <li><a href="/wiki/Desire" title="Desire">Desire</a></li> <li><a href="/wiki/Disappointment" title="Disappointment">Disappointment</a></li> <li><a href="/wiki/Disgust" title="Disgust">Disgust</a></li> <li><a href="/wiki/Distrust" title="Distrust">Distrust</a></li> <li><a href="/wiki/Doubt" title="Doubt">Doubt</a></li> <li><a href="/wiki/Ecstasy_(emotion)" title="Ecstasy (emotion)">Ecstasy</a></li> <li><a href="/wiki/Embarrassment" title="Embarrassment">Embarrassment</a> <ul><li><a href="/wiki/Vicarious_embarrassment" title="Vicarious embarrassment">vicarious</a></li></ul></li> <li><a href="/wiki/Emotion_work" title="Emotion work">Emotion work</a></li> <li><a href="/wiki/Empathy" title="Empathy">Empathy</a></li> <li><a href="/wiki/Emptiness" title="Emptiness">Emptiness</a></li> <li><a href="/wiki/Attention" title="Attention">Enthrallment</a></li> <li><a href="/wiki/Enthusiasm" title="Enthusiasm">Enthusiasm</a></li> <li><a href="/wiki/Envy" title="Envy">Envy</a></li> <li><a href="/wiki/Euphoria" title="Euphoria">Euphoria</a></li> <li><a href="/wiki/Stimulation" title="Stimulation">Excitement</a></li> <li><a href="/wiki/Faith" title="Faith">Faith</a></li> <li><a href="/wiki/Fear" title="Fear">Fear</a></li> <li><a href="/wiki/Flow_(psychology)" title="Flow (psychology)">Flow</a></li> <li><a href="/wiki/Frustration" title="Frustration">Frustration</a></li> <li><a href="/wiki/Fun" title="Fun">Fun</a></li> <li><a href="/wiki/Gratification" title="Gratification">Gratification</a></li> <li><a href="/wiki/Gratitude" title="Gratitude">Gratitude</a></li> <li><a href="/wiki/Greed" title="Greed">Greed</a></li> <li><a href="/wiki/Grief" title="Grief">Grief</a></li> <li><a href="/wiki/Guilt_(emotion)" title="Guilt (emotion)">Guilt</a></li> <li><a href="/wiki/Happiness" title="Happiness">Happiness</a> <ul><li><i><a href="/wiki/Joie_de_vivre" title="Joie de vivre">Joie de vivre</a></i></li></ul></li> <li><a href="/wiki/Hatred" title="Hatred">Hatred</a> <ul><li><a href="/wiki/Self-hatred" title="Self-hatred">self-hatred</a></li></ul></li> <li><span title="Welsh-language text"><i lang="cy"><a href="/wiki/Hiraeth" title="Hiraeth">Hiraeth</a></i></span></li> <li><a href="/wiki/Homesickness" title="Homesickness">Homesickness</a></li> <li><a href="/wiki/Hope" title="Hope">Hope</a></li> <li><a href="/wiki/Horror_and_terror" title="Horror and terror">Horror</a></li> <li><a href="/wiki/Hostility" title="Hostility">Hostility</a></li> <li><a href="/wiki/Humiliation" title="Humiliation">Humiliation</a></li> <li><span title="Danish-language text"><i lang="da"><a href="/wiki/Hygge" title="Hygge">Hygge</a></i></span></li> <li><a href="/wiki/Hysteria" title="Hysteria">Hysteria</a></li> <li><a href="/wiki/Ikigai" title="Ikigai"><i>Ikigai</i> (sense of purpose)</a></li> <li><a href="/wiki/Hedonism" title="Hedonism">Indulgence</a></li> <li><a href="/wiki/Infatuation" title="Infatuation">Infatuation</a></li> <li><a href="/wiki/Insecurity_(emotion)" title="Insecurity (emotion)">Insecurity</a></li> <li><a href="/wiki/Insignificance" title="Insignificance">Insignificance</a></li> <li><a href="/wiki/Artistic_inspiration" title="Artistic inspiration">Inspiration</a></li> <li><a href="/wiki/Interest_(emotion)" title="Interest (emotion)">Interest</a></li> <li><a href="/wiki/Irritability" title="Irritability">Irritation</a></li> <li><a href="/wiki/Isolation_(psychology)" title="Isolation (psychology)">Isolation</a></li> <li><a href="/wiki/Jealousy" title="Jealousy">Jealousy</a></li> <li><a href="/wiki/Joy" title="Joy">Joy</a></li> <li><a href="/wiki/Kindness" title="Kindness">Kindness</a></li> <li><a href="/wiki/Loneliness" title="Loneliness">Loneliness</a></li> <li><a href="/wiki/Love" title="Love">Love</a> <ul><li><a href="/wiki/Limerence" title="Limerence">limerence</a></li> <li><a href="/wiki/Love_at_first_sight" title="Love at first sight">at first sight</a></li></ul></li> <li><a href="/wiki/Lust" title="Lust">Lust</a></li> <li><span title="Japanese-language text"><i lang="ja-Latn"><a href="/wiki/Mono_no_aware" title="Mono no aware">Mono no aware</a></i></span></li> <li><a href="/wiki/Neglect" title="Neglect">Neglect</a></li> <li><a href="/wiki/Nostalgia" title="Nostalgia">Nostalgia</a></li> <li><a href="/wiki/Outrage_(emotion)" title="Outrage (emotion)">Outrage</a></li> <li><a href="/wiki/Panic" title="Panic">Panic</a></li> <li><a href="/wiki/Passion_(emotion)" title="Passion (emotion)">Passion</a></li> <li><a href="/wiki/Pity" title="Pity">Pity</a> <ul><li><a href="/wiki/Self-pity" title="Self-pity">self-pity</a></li></ul></li> <li><a href="/wiki/Pleasure" title="Pleasure">Pleasure</a></li> <li><a href="/wiki/Pride" title="Pride">Pride</a> <ul><li><a href="/wiki/Grandiosity" title="Grandiosity">grandiosity</a></li> <li><a href="/wiki/Hubris" title="Hubris">hubris</a></li> <li><a href="/wiki/Insult" title="Insult">insult</a></li> <li><a href="/wiki/Vanity" title="Vanity">vanity</a></li></ul></li> <li><a href="/wiki/Rage_(emotion)" title="Rage (emotion)">Rage</a></li> <li><a href="/wiki/Regret" title="Regret">Regret</a></li> <li><a href="/wiki/Social_rejection" title="Social rejection">Rejection</a></li> <li><a href="/wiki/Relaxation_(psychology)" title="Relaxation (psychology)">Relaxation</a></li> <li><a href="/wiki/Relief_(emotion)" title="Relief (emotion)">Relief</a></li> <li><a href="/wiki/Remorse" title="Remorse">Remorse</a></li> <li><a href="/wiki/Resentment" title="Resentment">Resentment</a></li> <li><a href="/wiki/Revenge" title="Revenge">Revenge</a></li> <li><a href="/wiki/Sadness" title="Sadness">Sadness</a> <ul><li><a href="/wiki/Melancholia" title="Melancholia">melancholy</a></li></ul></li> <li><span title="Portuguese-language text"><i lang="pt"><a href="/wiki/Saudade" title="Saudade">Saudade</a></i></span></li> <li><span title="German-language text"><i lang="de"><a href="/wiki/Schadenfreude" title="Schadenfreude">Schadenfreude</a></i></span></li> <li><span title="German-language text"><i lang="de"><a href="/wiki/Sehnsucht" title="Sehnsucht">Sehnsucht</a></i></span></li> <li><a href="/wiki/Sentimentality" title="Sentimentality">Sentimentality</a></li> <li><a href="/wiki/Shame" title="Shame">Shame</a></li> <li><a href="/wiki/Acute_stress_reaction" title="Acute stress reaction">Shock</a></li> <li><a href="/wiki/Shyness" title="Shyness">Shyness</a></li> <li><a href="/wiki/Solitude" title="Solitude">Solitude</a></li> <li><a href="/wiki/Social_connection" title="Social connection">Social connection</a></li> <li><a href="/wiki/Sorrow_(emotion)" title="Sorrow (emotion)">Sorrow</a></li> <li><a href="/wiki/Spite_(sentiment)" title="Spite (sentiment)">Spite</a></li> <li><a href="/wiki/Psychological_stress" title="Psychological stress">Stress</a> <ul><li><a href="/wiki/Chronic_stress" title="Chronic stress">chronic</a></li></ul></li> <li><a href="/wiki/Suffering" title="Suffering">Suffering</a></li> <li><a href="/wiki/Surprise_(emotion)" title="Surprise (emotion)">Surprise</a></li> <li><a href="/wiki/Suspense" title="Suspense">Suspense</a></li> <li><a href="/wiki/Suspicion_(emotion)" title="Suspicion (emotion)">Suspicion</a></li> <li><a href="/wiki/Sympathy" title="Sympathy">Sympathy</a></li> <li><a href="/wiki/Trust_(social_science)" title="Trust (social science)">Trust</a></li> <li><a href="/wiki/Wonder_(emotion)" title="Wonder (emotion)">Wonder</a> <ul><li><a href="/wiki/Sense_of_wonder" title="Sense of wonder">sense of wonder</a></li></ul></li> <li><a href="/wiki/Worry" title="Worry">Worry</a></li> <li><a href="/wiki/Zest_(positive_psychology)" title="Zest (positive psychology)">Zest</a></li></ul> </div></td><td class="noviewer navbox-image" rowspan="3" style="width:1px;padding:0 0 0 2px"><div><span typeof="mw:File"><a href="/wiki/File:Plutchik-wheel.svg" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/c/ce/Plutchik-wheel.svg/120px-Plutchik-wheel.svg.png" decoding="async" width="90" height="91" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/c/ce/Plutchik-wheel.svg/250px-Plutchik-wheel.svg.png 1.5x" data-file-width="715" data-file-height="725" /></a></span><br /><span typeof="mw:File"><a href="/wiki/File:Plutchik_Dyads.svg" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/a/ad/Plutchik_Dyads.svg/90px-Plutchik_Dyads.svg.png" decoding="async" width="90" height="90" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/a/ad/Plutchik_Dyads.svg/135px-Plutchik_Dyads.svg.png 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/a/ad/Plutchik_Dyads.svg/180px-Plutchik_Dyads.svg.png 2x" data-file-width="1357" data-file-height="1356" /></a></span></div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Worldview" title="Worldview">Worldviews</a></th><td class="navbox-list-with-group navbox-list navbox-even hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Cynicism_(contemporary)" title="Cynicism (contemporary)">Cynicism</a></li> <li><a href="/wiki/Defeatism" title="Defeatism">Defeatism</a></li> <li><a href="/wiki/Fatalism" title="Fatalism">Fatalism</a></li> <li><a href="/wiki/Misanthropy" title="Misanthropy">Misanthropy</a></li> <li><a href="/wiki/Nihilism" title="Nihilism">Nihilism</a></li> <li><a href="/wiki/Optimism" title="Optimism">Optimism</a></li> <li><a href="/wiki/Pessimism" title="Pessimism">Pessimism</a></li> <li><a href="/wiki/Recluse" title="Recluse">Reclusion</a></li> <li><span title="German-language text"><i lang="de"><a href="/wiki/Weltschmerz" title="Weltschmerz">Weltschmerz</a></i></span></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Related</th><td class="navbox-list-with-group navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li>Affect <ul><li><a href="/wiki/Affect_consciousness" title="Affect consciousness">consciousness</a></li> <li><a href="/wiki/Affect_(education)" title="Affect (education)">in education</a></li> <li><a href="/wiki/Affect_measures" title="Affect measures">measures</a></li> <li><a href="/wiki/Affect_(psychology)" title="Affect (psychology)">in psychology</a></li></ul></li> <li>Affective <ul><li><a href="/wiki/Affective_computing" title="Affective computing">computing</a></li> <li><a href="/wiki/Affective_forecasting" title="Affective forecasting">forecasting</a></li> <li><a href="/wiki/Affective_neuroscience" title="Affective neuroscience">neuroscience</a></li> <li><a href="/wiki/Affective_science" title="Affective science">science</a></li> <li><a href="/wiki/Affective_spectrum" title="Affective spectrum">spectrum</a></li></ul></li> <li>Affectivity <ul><li><a href="/wiki/Positive_affectivity" title="Positive affectivity">positive</a></li> <li><a href="/wiki/Negative_affectivity" title="Negative affectivity">negative</a></li></ul></li> <li><a href="/wiki/Appeal_to_emotion" title="Appeal to emotion">Appeal to emotion</a></li> <li><a href="/wiki/Amygdala_hijack" title="Amygdala hijack">Amygdala hijack</a></li> <li>Emotion <ul><li><a href="/wiki/Art_and_emotion" title="Art and emotion">and art</a></li> <li><a href="/wiki/Emotion_and_memory" title="Emotion and memory">and memory</a></li> <li><a href="/wiki/Music_and_emotion" title="Music and emotion">and music</a></li> <li><a href="/wiki/Sex_differences_in_psychology" title="Sex differences in psychology">and sex</a></li> <li><a href="/wiki/Sleep_and_emotions" title="Sleep and emotions">and sleep</a></li> <li><a href="/wiki/Emotion_classification" title="Emotion classification">classification</a> <ul><li><a href="/wiki/Emotion_classification#Circumplex_model" title="Emotion classification">circumplex</a></li> <li><a href="/wiki/EmojiGrid" title="EmojiGrid">EmojiGrid</a></li> <li><a href="/wiki/L%C3%B6vheim_Cube_of_Emotions" title="Lövheim Cube of Emotions">Lövheim</a></li> <li><a href="/wiki/PAD_emotional_state_model" title="PAD emotional state model">PAD</a></li> <li><a href="/wiki/Emotion_classification#Plutchik&#39;s_wheel_of_emotions" title="Emotion classification">Plutchik</a></li></ul></li> <li><a href="/wiki/Evolution_of_emotion" title="Evolution of emotion">evolution</a></li> <li><a href="/wiki/Expressed_emotion" title="Expressed emotion">expressed</a></li> <li><a href="/wiki/Functional_accounts_of_emotion" title="Functional accounts of emotion">functional accounts</a></li> <li><a href="/wiki/Group_emotion" title="Group emotion">group</a></li> <li><a href="/wiki/Homeostatic_emotion" class="mw-redirect" title="Homeostatic emotion">homeostatic</a></li> <li><a href="/wiki/Emotion_in_animals" title="Emotion in animals">in animals</a></li> <li><a href="/wiki/Emotion_perception" title="Emotion perception">perception</a></li> <li><a class="mw-selflink selflink">recognition</a> <ul><li><a href="/wiki/Emotion_recognition_in_conversation" title="Emotion recognition in conversation">in conversation</a></li></ul></li> <li><a href="/wiki/Emotional_self-regulation" title="Emotional self-regulation">regulation</a> <ul><li><a href="/wiki/Interpersonal_emotion_regulation" title="Interpersonal emotion regulation">interpersonal</a></li></ul></li> <li><a href="/wiki/Emotion_work" title="Emotion work">work</a></li></ul></li> <li>Emotional <ul><li><a href="/wiki/Emotional_aperture" title="Emotional aperture">aperture</a></li> <li><a href="/wiki/Emotional_bias" title="Emotional bias">bias</a></li> <li><a href="/wiki/Emotional_blackmail" title="Emotional blackmail">blackmail</a></li> <li><a href="/wiki/Emotional_competence" title="Emotional competence">competence</a></li> <li><a href="/wiki/Emotional_conflict" title="Emotional conflict">conflict</a></li> <li><a href="/wiki/Emotional_contagion" title="Emotional contagion">contagion</a></li> <li><a href="/wiki/Emotional_detachment" title="Emotional detachment">detachment</a></li> <li><a href="/wiki/Emotional_dysregulation" title="Emotional dysregulation">dysregulation</a></li> <li><a href="/wiki/Emotional_eating" title="Emotional eating">eating</a></li> <li><a href="/wiki/Emotional_exhaustion" title="Emotional exhaustion">exhaustion</a></li> <li><a href="/wiki/Emotional_expression" title="Emotional expression">expression</a> <ul><li><a href="/wiki/Gender_and_emotional_expression" title="Gender and emotional expression">and gender</a></li></ul></li> <li><a href="/wiki/Emotional_intelligence" title="Emotional intelligence">intelligence</a> <ul><li><a href="/wiki/Bullying_and_emotional_intelligence" title="Bullying and emotional intelligence">and bullying</a></li> <li><a href="/wiki/Empathy_quotient" title="Empathy quotient">Empathy quotient</a></li></ul></li> <li><a href="/wiki/Emotional_intimacy" title="Emotional intimacy">intimacy</a></li> <li><a href="/wiki/Emotional_isolation" title="Emotional isolation">isolation</a></li> <li><a href="/wiki/Emotional_lability" title="Emotional lability">lability</a></li> <li><a href="/wiki/Emotional_labor" title="Emotional labor">labor</a></li> <li><a href="/wiki/Emotional_lateralization" title="Emotional lateralization">lateralization</a></li> <li><a href="/wiki/Emotional_literacy" title="Emotional literacy">literacy</a></li> <li><a href="/wiki/Emotional_prosody" title="Emotional prosody">prosody</a></li> <li><a href="/wiki/Emotional_reasoning" title="Emotional reasoning">reasoning</a></li> <li><a href="/wiki/Emotional_responsivity" title="Emotional responsivity">responsivity</a></li> <li><a href="/wiki/Emotional_security" class="mw-redirect" title="Emotional security">security</a></li> <li><a href="/wiki/Emotional_symbiosis" title="Emotional symbiosis">symbiosis</a></li> <li><a href="/wiki/Emotional_thought_method" title="Emotional thought method">thought method</a></li> <li><a href="/wiki/Emotional_well-being" class="mw-redirect" title="Emotional well-being">well-being</a></li></ul></li> <li><a href="/wiki/Emotionality" title="Emotionality">Emotionality</a> <ul><li><a href="/wiki/Bounded_emotionality" title="Bounded emotionality">bounded</a></li></ul></li> <li>Emotions <ul><li><a href="/wiki/Emotions_and_culture" title="Emotions and culture">and culture</a></li> <li><a href="/wiki/History_of_emotions" title="History of emotions">history</a></li> <li><a href="/wiki/Emotions_in_decision-making" title="Emotions in decision-making">in decision-making</a></li> <li><a href="/wiki/Emotions_in_the_workplace" title="Emotions in the workplace">in the workplace</a></li> <li><a href="/wiki/Emotions_in_virtual_communication" title="Emotions in virtual communication">in virtual communication</a></li> <li><a href="/wiki/Moral_emotions" title="Moral emotions">moral</a></li> <li><a href="/wiki/Self-conscious_emotions" title="Self-conscious emotions">self-conscious</a></li> <li><a href="/wiki/Social_emotions" title="Social emotions">social</a></li> <li><a href="/wiki/Social_sharing_of_emotions" title="Social sharing of emotions">social sharing</a></li> <li><a href="/wiki/Sociology_of_emotions" title="Sociology of emotions">sociology</a></li></ul></li> <li><a href="/wiki/Feeling" title="Feeling">Feeling</a></li> <li><a href="/wiki/Group_affective_tone" title="Group affective tone">Group affective tone</a></li> <li><a href="/wiki/Interactions_between_the_emotional_and_executive_brain_systems" title="Interactions between the emotional and executive brain systems">Interactions between the emotional and executive brain systems</a></li> <li><a href="/wiki/Jealousy_in_art" title="Jealousy in art">Jealousy in art</a></li> <li><a href="/wiki/Mental_state" title="Mental state">Mental state</a></li> <li><a href="/wiki/Meta-emotion" title="Meta-emotion">Meta-emotion</a></li> <li><a href="/wiki/Pathognomy" title="Pathognomy">Pathognomy</a></li> <li><a href="/wiki/Pathos" title="Pathos">Pathos</a></li> <li><a href="/wiki/Social_emotional_development" title="Social emotional development">Social emotional development</a></li> <li><a href="/wiki/Stoic_passions" title="Stoic passions">Stoic passions</a></li> <li>Theory <ul><li><a href="/wiki/Affect_theory" title="Affect theory">affect</a></li> <li><a href="/wiki/Affect_as_information_hypothesis" title="Affect as information hypothesis">affect as information</a></li> <li><a href="/wiki/Appraisal_theory" title="Appraisal theory">appraisal</a></li> <li><a href="/wiki/Cannon-Bard_theory" class="mw-redirect" title="Cannon-Bard theory">Cannon-Bard</a></li> <li><a href="/wiki/Theory_of_constructed_emotion" title="Theory of constructed emotion">constructed emotion</a></li> <li><a href="/wiki/Discrete_emotion_theory" title="Discrete emotion theory">discrete emotion</a></li> <li><a href="/wiki/James-Lange_theory" class="mw-redirect" title="James-Lange theory">James-Lange</a></li> <li><a href="/wiki/Somatic_theory" title="Somatic theory">somatic</a></li> <li><a href="/wiki/Somatic_marker_hypothesis" title="Somatic marker hypothesis">somatic marker</a></li> <li><a href="/wiki/Two-factor_theory_of_emotion" title="Two-factor theory of emotion">two-factor</a></li></ul></li></ul> </div></td></tr><tr><td class="navbox-abovebelow" colspan="3"><div><i>Italics</i> indicate emotion names in foreign languages <div style="clear:both;" class=""></div> <span class="noviewer" typeof="mw:File"><span title="Category"><img alt="" src="//upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/16px-Symbol_category_class.svg.png" decoding="async" width="16" height="16" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/23px-Symbol_category_class.svg.png 1.5x, //upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/31px-Symbol_category_class.svg.png 2x" data-file-width="180" data-file-height="185" /></span></span> <a href="/wiki/Category:Emotion" title="Category:Emotion">Category</a></div></td></tr></tbody></table></div> <div class="navbox-styles"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374" /><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236075235" /><style data-mw-deduplicate="TemplateStyles:r1066933788">.mw-parser-output .excerpt-hat .mw-editsection-like{font-style:normal}</style></div><div role="navigation" class="navbox" aria-labelledby="Psychology908" style="padding:3px"><table class="nowraplinks hlist mw-collapsible autocollapse navbox-inner" style="border-spacing:0;background:transparent;color:inherit"><tbody><tr><th scope="col" class="navbox-title" colspan="3" style="background:#BFD7FF;"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374" /><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1239400231" /><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Psychology" title="Template:Psychology"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Psychology" title="Template talk:Psychology"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Psychology" title="Special:EditPage/Template:Psychology"><abbr title="Edit this template">e</abbr></a></li></ul></div><div id="Psychology908" style="font-size:114%;margin:0 4em"><a href="/wiki/Psychology" title="Psychology">Psychology</a></div></th></tr><tr><td class="navbox-abovebelow" colspan="3" style="background:#DFEBFF;"><div> <ul><li><a href="/wiki/History_of_psychology" title="History of psychology">History</a></li> <li><a href="/wiki/Philosophy_of_psychology" title="Philosophy of psychology">Philosophy</a></li> <li><a href="/wiki/Portal:Psychology" title="Portal:Psychology">Portal</a></li> <li><a href="/wiki/Psychologist" title="Psychologist">Psychologist</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%;background:#CFE1FF;"><a href="/wiki/Basic_science_(psychology)" title="Basic science (psychology)">Basic <br />psychology</a></th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Abnormal_psychology" title="Abnormal psychology">Abnormal</a></li> <li><a href="/wiki/Affective_neuroscience" title="Affective neuroscience">Affective neuroscience</a></li> <li><a href="/wiki/Affective_science" title="Affective science">Affective science</a></li> <li><a href="/wiki/Behavioural_genetics" title="Behavioural genetics">Behavioral genetics</a></li> <li><a href="/wiki/Behavioral_neuroscience" title="Behavioral neuroscience">Behavioral neuroscience</a></li> <li><a href="/wiki/Behaviorism" title="Behaviorism">Behaviorism</a></li> <li><a href="/wiki/Cognitive_psychology" title="Cognitive psychology">Cognitive</a>/<a href="/wiki/Cognitivism_(psychology)" title="Cognitivism (psychology)">Cognitivism</a></li> <li><a href="/wiki/Cognitive_neuroscience" title="Cognitive neuroscience">Cognitive neuroscience</a> <ul><li><a href="/wiki/Social_cognitive_neuroscience" title="Social cognitive neuroscience">Social</a></li></ul></li> <li><a href="/wiki/Comparative_psychology" title="Comparative psychology">Comparative</a></li> <li><a href="/wiki/Cross-cultural_psychology" title="Cross-cultural psychology">Cross-cultural</a></li> <li><a href="/wiki/Cultural_psychology" title="Cultural psychology">Cultural</a></li> <li><a href="/wiki/Developmental_psychology" title="Developmental psychology">Developmental</a></li> <li><a href="/wiki/Differential_psychology" title="Differential psychology">Differential</a></li> <li><a href="/wiki/Ecological_psychology" title="Ecological psychology">Ecological</a></li> <li><a href="/wiki/Evolutionary_psychology" title="Evolutionary psychology">Evolutionary</a></li> <li><a href="/wiki/Experimental_psychology" title="Experimental psychology">Experimental</a></li> <li><a href="/wiki/Gestalt_psychology" title="Gestalt psychology">Gestalt</a></li> <li><a href="/wiki/Intelligence" title="Intelligence">Intelligence</a></li> <li><a href="/wiki/Mathematical_psychology" title="Mathematical psychology">Mathematical</a></li> <li><a href="/wiki/Moral_psychology" title="Moral psychology">Moral</a></li> <li><a href="/wiki/Neuropsychology" title="Neuropsychology">Neuropsychology</a></li> <li><a href="/wiki/Perception" title="Perception">Perception</a></li> <li><a href="/wiki/Personality_psychology" title="Personality psychology">Personality</a></li> <li><a href="/wiki/Psycholinguistics" title="Psycholinguistics">Psycholinguistics</a></li> <li><a href="/wiki/Psychophysiology" title="Psychophysiology">Psychophysiology</a></li> <li><a href="/wiki/Quantitative_psychology" title="Quantitative psychology">Quantitative</a></li> <li><a href="/wiki/Social_psychology" title="Social psychology">Social</a></li> <li><a href="/wiki/Theoretical_psychology" title="Theoretical psychology">Theoretical</a></li></ul> </div></td><td class="noviewer navbox-image" rowspan="6" style="width:1px;padding:0 0 0 2px"><div><span typeof="mw:File"><a href="/wiki/File:Psi-stylized.svg" class="mw-file-description"><img alt="stylized letter psi" src="//upload.wikimedia.org/wikipedia/commons/thumb/e/e4/Psi-stylized.svg/60px-Psi-stylized.svg.png" decoding="async" width="50" height="50" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/e/e4/Psi-stylized.svg/120px-Psi-stylized.svg.png 1.5x" data-file-width="123" data-file-height="124" /></a></span></div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%;background:#CFE1FF;"><a href="/wiki/Applied_psychology" title="Applied psychology">Applied <br />psychology</a></th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Anomalistic_psychology" title="Anomalistic psychology">Anomalistic</a></li> <li><a href="/wiki/Applied_behavior_analysis" title="Applied behavior analysis">Applied behavior analysis</a></li> <li><a href="/wiki/Psychological_testing" title="Psychological testing">Assessment</a></li> <li><a href="/wiki/Clinical_psychology" title="Clinical psychology">Clinical</a></li> <li><a href="/wiki/Coaching_psychology" title="Coaching psychology">Coaching</a></li> <li><a href="/wiki/Community_psychology" title="Community psychology">Community</a></li> <li><a href="/wiki/Consumer_behaviour" title="Consumer behaviour">Consumer</a></li> <li><a href="/wiki/Counseling_psychology" title="Counseling psychology">Counseling</a></li> <li><a href="/wiki/Critical_psychology" title="Critical psychology">Critical</a></li> <li><a href="/wiki/Educational_psychology" title="Educational psychology">Educational</a></li> <li><a href="/wiki/Ergonomics" title="Ergonomics">Ergonomics</a></li> <li><a href="/wiki/Feminist_psychology" title="Feminist psychology">Feminist</a></li> <li><a href="/wiki/Forensic_psychology" title="Forensic psychology">Forensic</a></li> <li><a href="/wiki/Health_psychology" title="Health psychology">Health</a></li> <li><a href="/wiki/Humanistic_psychology" title="Humanistic psychology">Humanistic</a></li> <li><a href="/wiki/Industrial_and_organizational_psychology" title="Industrial and organizational psychology">Industrial and organizational</a></li> <li><a href="/wiki/Legal_psychology" title="Legal psychology">Legal</a></li> <li><a href="/wiki/Media_psychology" title="Media psychology">Media</a></li> <li><a href="/wiki/Medical_psychology" title="Medical psychology">Medical</a></li> <li><a href="/wiki/Military_psychology" title="Military psychology">Military</a></li> <li><a href="/wiki/Music_psychology" class="mw-redirect" title="Music psychology">Music</a></li> <li><a href="/wiki/Occupational_health_psychology" title="Occupational health psychology">Occupational health</a></li> <li><a href="/wiki/Pastoral_psychology" class="mw-redirect" title="Pastoral psychology">Pastoral</a></li> <li><a href="/wiki/Political_psychology" title="Political psychology">Political</a></li> <li><a href="/wiki/Positive_psychology" title="Positive psychology">Positive</a></li> <li><a href="/wiki/Psychometrics" title="Psychometrics">Psychometrics</a></li> <li><a href="/wiki/Psychotherapy" title="Psychotherapy">Psychotherapy</a></li> <li><a href="/wiki/Psychology_of_religion" title="Psychology of religion">Religion</a></li> <li><a href="/wiki/School_psychology" title="School psychology">School</a></li> <li><a href="/wiki/Sport_psychology" title="Sport psychology">Sport and exercise</a></li> <li><a href="/wiki/Suicidology" title="Suicidology">Suicidology</a></li> <li><a href="/wiki/Systems_psychology" title="Systems psychology">Systems</a></li> <li><a href="/wiki/Traffic_psychology" title="Traffic psychology">Traffic</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%;background:#CFE1FF;"><a href="/wiki/List_of_psychological_research_methods" title="List of psychological research methods">Methodologies</a></th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Animal_testing" title="Animal testing">Animal testing</a></li> <li><a href="/wiki/Archival_research" title="Archival research">Archival research</a></li> <li><a href="/wiki/Behavioral_epigenetics" title="Behavioral epigenetics">Behavior epigenetics</a></li> <li><a href="/wiki/Case_study" title="Case study">Case study</a></li> <li><a href="/wiki/Content_analysis" title="Content analysis">Content analysis</a></li> <li><a href="/wiki/Experimental_psychology" title="Experimental psychology">Experiments</a></li> <li><a href="/wiki/Human_subject_research" title="Human subject research">Human subject research</a></li> <li><a href="/wiki/Interview_(research)" title="Interview (research)">Interviews</a></li> <li><a href="/wiki/Neuroimaging" title="Neuroimaging">Neuroimaging</a></li> <li><a href="/wiki/Observation" title="Observation">Observation</a></li> <li><a href="/wiki/Psychophysics" title="Psychophysics">Psychophysics</a></li> <li><a href="/wiki/Qualitative_psychological_research" title="Qualitative psychological research">Qualitative research</a></li> <li><a href="/wiki/Quantitative_psychological_research" title="Quantitative psychological research">Quantitative research</a></li> <li><a href="/wiki/Self-report_inventory" title="Self-report inventory">Self-report inventory</a></li> <li><a href="/wiki/Survey_methodology" title="Survey methodology">Statistical surveys</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%;background:#CFE1FF;">Concepts</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"><div class="excerpt-block"><div class="excerpt"> <ul><li><a href="/wiki/Behavior" title="Behavior">Behavior</a></li> <li><a href="/wiki/Applied_behavior_analysis" title="Applied behavior analysis">Behavioral engineering</a></li> <li><a href="/wiki/Behavioural_genetics" title="Behavioural genetics">Behavioral genetics</a></li> <li><a href="/wiki/Behavioral_neuroscience" title="Behavioral neuroscience">Behavioral neuroscience</a></li> <li><a href="/wiki/Cognition" title="Cognition">Cognition</a></li> <li><a href="/wiki/Competence_(polyseme)" title="Competence (polyseme)">Competence</a></li> <li><a href="/wiki/Consciousness" title="Consciousness">Consciousness</a></li> <li><a href="/wiki/Consumer_behaviour" title="Consumer behaviour">Consumer behavior</a></li> <li><a href="/wiki/Emotion" title="Emotion">Emotions</a></li> <li><a href="/wiki/Feeling" title="Feeling">Feelings</a></li> <li><a href="/wiki/Ergonomics" title="Ergonomics">Human factors and ergonomics</a></li> <li><a href="/wiki/Intelligence" title="Intelligence">Intelligence</a></li> <li><a href="/wiki/Maslow%27s_hierarchy_of_needs" title="Maslow&#39;s hierarchy of needs">Maslow's hierarchy of needs</a></li> <li><a href="/wiki/Mental_state" title="Mental state">Mental state</a></li> <li><a href="/wiki/Mind" title="Mind">Mind</a></li> <li><a href="/wiki/Psychology_of_religion" title="Psychology of religion">Psychology of religion</a></li> <li><a href="/wiki/Psychometrics" title="Psychometrics">Psychometrics</a></li> <li><a href="/wiki/Sex_differences_in_psychology" title="Sex differences in psychology">Sex differences</a></li> <li><a href="/wiki/Terror_management_theory" title="Terror management theory">Terror management theory</a></li></ul></div></div></div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%;background:#CFE1FF;"><div style="display: inline-block; line-height: 1.2em; padding: .1em 0;"><a href="/wiki/List_of_psychologists" title="List of psychologists">Psychologists</a></div></th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <li><a href="/wiki/Wilhelm_Wundt" title="Wilhelm Wundt">Wilhelm Wundt</a></li> <li><a href="/wiki/William_James" title="William James">William James</a></li> <li><a href="/wiki/Ivan_Pavlov" title="Ivan Pavlov">Ivan Pavlov</a></li> <li><a href="/wiki/Sigmund_Freud" title="Sigmund Freud">Sigmund Freud</a></li> <li><a href="/wiki/Edward_Thorndike" title="Edward Thorndike">Edward Thorndike</a></li> <li><a href="/wiki/Carl_Jung" title="Carl Jung">Carl Jung</a></li> <li><a href="/wiki/John_B._Watson" title="John B. Watson">John B. Watson</a></li> <li><a href="/wiki/Clark_L._Hull" title="Clark L. Hull">Clark L. Hull</a></li> <li><a href="/wiki/Kurt_Lewin" title="Kurt Lewin">Kurt Lewin</a></li> <li><a href="/wiki/Jean_Piaget" title="Jean Piaget">Jean Piaget</a></li> <li><a href="/wiki/Gordon_Allport" title="Gordon Allport">Gordon Allport</a></li> <li><a href="/wiki/J._P._Guilford" title="J. P. Guilford">J. P. Guilford</a></li> <li><a href="/wiki/Carl_Rogers" title="Carl Rogers">Carl Rogers</a></li> <li><a href="/wiki/Erik_Erikson" title="Erik Erikson">Erik Erikson</a></li> <li><a href="/wiki/B._F._Skinner" title="B. F. Skinner">B. F. Skinner</a></li> <li><a href="/wiki/Donald_O._Hebb" title="Donald O. Hebb">Donald O. Hebb</a></li> <li><a href="/wiki/Ernest_Hilgard" title="Ernest Hilgard">Ernest Hilgard</a></li> <li><a href="/wiki/Harry_Harlow" title="Harry Harlow">Harry Harlow</a></li> <li><a href="/wiki/Raymond_Cattell" title="Raymond Cattell">Raymond Cattell</a></li> <li><a href="/wiki/Abraham_Maslow" title="Abraham Maslow">Abraham Maslow</a></li> <li><a href="/wiki/Neal_E._Miller" title="Neal E. Miller">Neal E. Miller</a></li> <li><a href="/wiki/Jerome_Bruner" title="Jerome Bruner">Jerome Bruner</a></li> <li><a href="/wiki/Donald_T._Campbell" title="Donald T. Campbell">Donald T. Campbell</a></li> <li><a href="/wiki/Hans_Eysenck" title="Hans Eysenck">Hans Eysenck</a></li> <li><a href="/wiki/Herbert_A._Simon" title="Herbert A. Simon">Herbert A. Simon</a></li> <li><a href="/wiki/David_McClelland" title="David McClelland">David McClelland</a></li> <li><a href="/wiki/Leon_Festinger" title="Leon Festinger">Leon Festinger</a></li> <li><a href="/wiki/George_Armitage_Miller" title="George Armitage Miller">George A. Miller</a></li> <li><a href="/wiki/Richard_Lazarus" title="Richard Lazarus">Richard Lazarus</a></li> <li><a href="/wiki/Stanley_Schachter" title="Stanley Schachter">Stanley Schachter</a></li> <li><a href="/wiki/Robert_Zajonc" title="Robert Zajonc">Robert Zajonc</a></li> <li><a href="/wiki/Albert_Bandura" title="Albert Bandura">Albert Bandura</a></li> <li><a href="/wiki/Roger_Brown_(psychologist)" title="Roger Brown (psychologist)">Roger Brown</a></li> <li><a href="/wiki/Endel_Tulving" title="Endel Tulving">Endel Tulving</a></li> <li><a href="/wiki/Lawrence_Kohlberg" title="Lawrence Kohlberg">Lawrence Kohlberg</a></li> <li><a href="/wiki/Noam_Chomsky" title="Noam Chomsky">Noam Chomsky</a></li> <li><a href="/wiki/Ulric_Neisser" title="Ulric Neisser">Ulric Neisser</a></li> <li><a href="/wiki/Jerome_Kagan" title="Jerome Kagan">Jerome Kagan</a></li> <li><a href="/wiki/Walter_Mischel" title="Walter Mischel">Walter Mischel</a></li> <li><a href="/wiki/Elliot_Aronson" title="Elliot Aronson">Elliot Aronson</a></li> <li><a href="/wiki/Daniel_Kahneman" title="Daniel Kahneman">Daniel Kahneman</a></li> <li><a href="/wiki/Paul_Ekman" title="Paul Ekman">Paul Ekman</a></li> <li><a href="/wiki/Michael_Posner_(psychologist)" title="Michael Posner (psychologist)">Michael Posner</a></li> <li><a href="/wiki/Amos_Tversky" title="Amos Tversky">Amos Tversky</a></li> <li><a href="/wiki/Bruce_McEwen" title="Bruce McEwen">Bruce McEwen</a></li> <li><a href="/wiki/Larry_Squire" title="Larry Squire">Larry Squire</a></li> <li><a href="/wiki/Richard_E._Nisbett" title="Richard E. Nisbett">Richard E. Nisbett</a></li> <li><a href="/wiki/Martin_Seligman" title="Martin Seligman">Martin Seligman</a></li> <li><a href="/wiki/Ed_Diener" title="Ed Diener">Ed Diener</a></li> <li><a href="/wiki/Shelley_E._Taylor" title="Shelley E. Taylor">Shelley E. Taylor</a></li> <li><a href="/wiki/John_Robert_Anderson_(psychologist)" title="John Robert Anderson (psychologist)">John Anderson</a></li> <li><a href="/wiki/Ronald_C._Kessler" title="Ronald C. Kessler">Ronald C. Kessler</a></li> <li><a href="/wiki/Joseph_E._LeDoux" title="Joseph E. LeDoux">Joseph E. LeDoux</a></li> <li><a href="/wiki/Richard_Davidson" title="Richard Davidson">Richard Davidson</a></li> <li><a href="/wiki/Susan_Fiske" title="Susan Fiske">Susan Fiske</a></li> <li><a href="/wiki/Roy_Baumeister" title="Roy Baumeister">Roy Baumeister</a></li> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%;background:#CFE1FF;"><a href="/wiki/Category:Psychology_lists" title="Category:Psychology lists">Lists</a></th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Outline_of_counseling" title="Outline of counseling">Counseling topics</a></li> <li><a href="/wiki/List_of_branches_of_psychology" title="List of branches of psychology">Disciplines</a></li> <li><a href="/wiki/List_of_psychology_organizations" title="List of psychology organizations">Organizations</a></li> <li><a href="/wiki/Outline_of_psychology" title="Outline of psychology">Outline</a></li> <li><a href="/wiki/List_of_psychologists" title="List of psychologists">Psychologists</a></li> <li><a href="/wiki/List_of_psychotherapies" title="List of psychotherapies">Psychotherapies</a></li> <li><a href="/wiki/List_of_psychological_research_methods" title="List of psychological research methods">Research methods</a></li> <li><a href="/wiki/List_of_psychological_schools" title="List of psychological schools">Schools of thought</a></li> <li><a href="/wiki/Timeline_of_psychology" title="Timeline of psychology">Timeline</a></li> <li><a href="/wiki/Index_of_psychology_articles" title="Index of psychology articles">Topics</a></li></ul> </div></td></tr><tr><td class="navbox-abovebelow" colspan="3" style="background:#DFEBFF;"><div> <ul><li><span class="noviewer" typeof="mw:File"><span title="Category"><img alt="" src="//upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/16px-Symbol_category_class.svg.png" decoding="async" width="16" height="16" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/23px-Symbol_category_class.svg.png 1.5x, //upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/31px-Symbol_category_class.svg.png 2x" data-file-width="180" data-file-height="185" /></span></span> <a href="/wiki/Category:Psychology" title="Category:Psychology">Category</a></li> <li><a href="https://en.wiktionary.org/wiki/psychology" class="extiw" title="wiktionary:psychology">Wiktionary definition</a></li> <li><a href="https://en.wiktionary.org/wiki/Category:en:Psychology" class="extiw" title="wiktionary:Category:en:Psychology">Wiktionary category</a></li> <li><a href="https://en.wikisource.org/wiki/Category:Psychology" class="extiw" title="wikisource:Category:Psychology">Wikisource</a></li> <li><a href="https://commons.wikimedia.org/wiki/category:Psychology" class="extiw" title="commons:category:Psychology">Wikimedia Commons</a></li> <li><a href="https://en.wikiquote.org/wiki/Psychology" class="extiw" title="wikiquote:Psychology">Wikiquote</a></li> <li><a href="https://en.wikinews.org/wiki/Special:Search/Psychology" class="extiw" title="wikinews:Special:Search/Psychology">Wikinews</a></li> <li><a href="https://en.wikibooks.org/wiki/Psychology" class="extiw" title="wikibooks:Psychology">Wikibooks</a></li></ul> </div></td></tr></tbody></table></div> <div class="navbox-styles"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374" /><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236075235" /></div><div role="navigation" class="navbox" aria-labelledby="Nonverbal_communication97" style="padding:3px"><table class="nowraplinks mw-collapsible autocollapse navbox-inner" style="border-spacing:0;background:transparent;color:inherit"><tbody><tr><th scope="col" class="navbox-title" colspan="2"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374" /><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1239400231" /><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Nonverbal_communication" title="Template:Nonverbal communication"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Nonverbal_communication" title="Template talk:Nonverbal communication"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Nonverbal_communication" title="Special:EditPage/Template:Nonverbal communication"><abbr title="Edit this template">e</abbr></a></li></ul></div><div id="Nonverbal_communication97" style="font-size:114%;margin:0 4em"><a href="/wiki/Nonverbal_communication" title="Nonverbal communication">Nonverbal communication</a></div></th></tr><tr><td colspan="2" class="navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks mw-collapsible mw-collapsed navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="col" class="navbox-title" colspan="2"><div id="Modalities97" style="font-size:114%;margin:0 4em"><a href="/wiki/Modality_(semiotics)" title="Modality (semiotics)">Modalities</a></div></th></tr><tr><td colspan="2" class="navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="row" class="navbox-group" style="width:1%">Physical</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Blushing" title="Blushing">Blushing</a></li> <li><a href="/wiki/Body_language" title="Body language">Body language</a> / <a href="/wiki/Kinesics" title="Kinesics">Kinesics</a></li> <li><a href="/wiki/Body-to-body_communication" title="Body-to-body communication">Body-to-body communication</a></li> <li><a href="/wiki/Facial_expression" title="Facial expression">Facial expression</a> <ul><li><a href="/wiki/Facial_Action_Coding_System" title="Facial Action Coding System">Facial Action Coding System</a></li> <li><a href="/wiki/Microexpression" title="Microexpression">Microexpression</a></li> <li><a href="/wiki/Subtle_expression" title="Subtle expression">Subtle expression</a></li></ul></li> <li><a href="/wiki/Gesture" title="Gesture">Gesture</a> <ul><li><a href="/wiki/List_of_gestures" title="List of gestures">List</a></li> <li><a href="/wiki/Speech-independent_gestures" class="mw-redirect" title="Speech-independent gestures">Speech-independent gestures</a></li></ul></li> <li><a href="/wiki/Haptic_communication" title="Haptic communication">Haptic communication</a></li> <li><a href="/wiki/Imitation" title="Imitation">Imitation</a></li> <li><a href="/wiki/Synchronization#Human_movement" title="Synchronization">Interpersonal synchrony</a></li> <li><a href="/wiki/Laughter" title="Laughter">Laughter</a></li> <li><a href="/wiki/Oculesics" title="Oculesics">Oculesics</a> <ul><li><a href="/wiki/Eye_contact" title="Eye contact">Eye contact</a></li> <li><a href="/wiki/Pupillary_response" title="Pupillary response">Pupil dilation</a></li></ul></li> <li><a href="/wiki/Olfactic_communication" title="Olfactic communication">Olfaction</a></li> <li><a href="/wiki/Posture_(psychology)" title="Posture (psychology)">Posture</a></li> <li><a href="/wiki/Proxemics" title="Proxemics">Proxemics</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Speech" title="Speech">Speech</a></th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Affect_(linguistics)" title="Affect (linguistics)">Affect</a></li> <li><a href="/wiki/Emotional_prosody" title="Emotional prosody">Emotional prosody</a></li> <li><a href="/wiki/Paralanguage" title="Paralanguage">Paralanguage</a> <ul><li><a href="/wiki/Intonation_(linguistics)" title="Intonation (linguistics)">Intonation</a></li> <li><a href="/wiki/Loudness" title="Loudness">Loudness</a></li> <li><a href="/wiki/Prosody_(linguistics)" title="Prosody (linguistics)">Prosody</a></li> <li><a href="/wiki/Rhythm" title="Rhythm">Rhythm</a></li> <li><a href="/wiki/Stress_(linguistics)" title="Stress (linguistics)">Stress</a></li> <li><a href="/wiki/Tone_(linguistics)" title="Tone (linguistics)">Tone</a></li> <li><a href="/wiki/Phonation" title="Phonation">Voice quality</a></li></ul></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Social_environment" title="Social environment">Social context</a></th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Chronemics" title="Chronemics">Chronemics</a></li> <li><a href="/wiki/Convention_(norm)" title="Convention (norm)">Conventions</a></li> <li><a href="/wiki/Display_rules" title="Display rules">Display rules</a></li> <li><a href="/wiki/Habitus_(sociology)" title="Habitus (sociology)">Habitus</a></li> <li><a href="/wiki/High-context_and_low-context_cultures" title="High-context and low-context cultures">High-context and low-context cultures</a></li> <li><a href="/wiki/Interpersonal_relationship" title="Interpersonal relationship">Interpersonal relationship</a></li> <li><a href="/wiki/Social_norm" title="Social norm">Social norm</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Other</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Emoticon" title="Emoticon">Emoticon</a> / <a href="/wiki/Smiley" title="Smiley">Smiley</a></li> <li><a href="/wiki/One-bit_message" title="One-bit message">One-bit message</a> <ul><li><a href="/wiki/Missed_call" title="Missed call">Missed call</a></li></ul></li> <li><a href="/wiki/Silent_service_code" title="Silent service code">Silent service code</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Unconscious_communication" title="Unconscious communication">Unconscious</a></th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Microexpression" title="Microexpression">Microexpression</a></li> <li><a href="/wiki/Non-verbal_leakage" title="Non-verbal leakage">Non-verbal leakage</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Multi-faceted</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Affect_display" title="Affect display">Affect display</a></li> <li><a href="/wiki/Deception" title="Deception">Deception</a></li> <li><a class="mw-selflink selflink">Emotion recognition</a></li> <li><a href="/wiki/First_impression_(psychology)" title="First impression (psychology)">First impression</a></li> <li><a href="/wiki/Intimate_relationship" title="Intimate relationship">Intimacy</a></li></ul> </div></td></tr></tbody></table><div></div></td></tr></tbody></table><div></div></td></tr><tr><td colspan="2" class="navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks mw-collapsible mw-collapsed navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="col" class="navbox-title" colspan="2"><div id="Broader_concepts97" style="font-size:114%;margin:0 4em">Broader concepts</div></th></tr><tr><td colspan="2" class="navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Basic_interpersonal_communicative_skills" class="mw-redirect" title="Basic interpersonal communicative skills">Basic interpersonal communicative skills</a></li> <li><a href="/wiki/Communication" title="Communication">Communication</a></li> <li><a href="/wiki/Emotional_intelligence" title="Emotional intelligence">Emotional intelligence</a></li> <li><a href="/wiki/Nunchi" title="Nunchi">Nunchi</a></li> <li><a href="/wiki/People_skills" title="People skills">People skills</a></li> <li><a href="/wiki/Semiotics" title="Semiotics">Semiotics</a></li> <li><a href="/wiki/Social_behavior" title="Social behavior">Social behavior</a></li> <li><a href="/wiki/Social_cue" title="Social cue">Social cue</a></li> <li><a href="/wiki/Social_competence" title="Social competence">Social competence</a></li> <li><a href="/wiki/Social_skills" title="Social skills">Social skills</a></li> <li><a href="/wiki/Unsaid" title="Unsaid">Unsaid</a></li></ul> </div></td></tr></tbody></table><div></div></td></tr><tr><td colspan="2" class="navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks mw-collapsible mw-collapsed navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="col" class="navbox-title" colspan="2"><div id="Further_information97" style="font-size:114%;margin:0 4em">Further information</div></th></tr><tr><td colspan="2" class="navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="row" class="navbox-group" style="width:1%">Disorders</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Aprosodia" title="Aprosodia">Aprosodia</a> <ul><li><a href="/wiki/Asperger_syndrome" title="Asperger syndrome">Asperger syndrome</a></li> <li><a href="/wiki/Autism" title="Autism">Autism</a></li> <li><a href="/wiki/Fragile_X_syndrome" title="Fragile X syndrome">Fragile X</a></li> <li><a href="/wiki/Pervasive_developmental_disorder_not_otherwise_specified" title="Pervasive developmental disorder not otherwise specified">Pervasive developmental disorder not otherwise specified</a></li> <li><a href="/wiki/Childhood_disintegrative_disorder" title="Childhood disintegrative disorder">Childhood disintegrative disorder</a></li> <li><a href="/wiki/Rett_syndrome" title="Rett syndrome">Rett syndrome</a></li></ul></li> <li><a href="/wiki/Dyssemia" title="Dyssemia">Dyssemia</a></li> <li><a href="/wiki/Nonverbal_learning_disorder" title="Nonverbal learning disorder">Nonverbal learning disorder</a></li> <li><a href="/wiki/Social_(pragmatic)_communication_disorder" title="Social (pragmatic) communication disorder">Social (pragmatic) communication disorder</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Neuroanatomy" title="Neuroanatomy">Neuroanatomy</a></th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Limbic_system" title="Limbic system">Limbic system</a> / <a href="/wiki/Limbic_lobe" title="Limbic lobe">Limbic lobe</a></li> <li><a href="/wiki/Mirror_neuron" title="Mirror neuron">Mirror neuron</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Applications</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Cold_reading" title="Cold reading">Cold reading</a></li> <li><a href="/wiki/Lie_detection" title="Lie detection">Lie detection</a></li> <li><a href="/wiki/Freudian_slip" title="Freudian slip">Freudian slip</a></li> <li><a href="/wiki/Tell_(poker)" title="Tell (poker)">Poker tell</a></li> <li><a href="/wiki/Attention_(advertising)" class="mw-redirect" title="Attention (advertising)">Targeted advertising</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Technology</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Computer_processing_of_body_language" title="Computer processing of body language">Computer processing of body language</a></li> <li><a href="/wiki/Emotion_recognition_in_conversation" title="Emotion recognition in conversation">Emotion recognition in conversation</a></li> <li><a href="/wiki/Gesture_recognition" title="Gesture recognition">Gesture recognition</a></li> <li><a href="/wiki/List_of_facial_expression_databases" title="List of facial expression databases">List of facial expression databases</a></li> <li><a href="/wiki/Sentiment_analysis" title="Sentiment analysis">Sentiment analysis</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Key people</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Ray_Birdwhistell" title="Ray Birdwhistell">Ray Birdwhistell</a></li> <li><a href="/wiki/Charles_Darwin" title="Charles Darwin">Charles Darwin</a></li> <li><a href="/wiki/Paul_Ekman" title="Paul Ekman">Paul Ekman</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Related</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks navbox-subgroup" style="border-spacing:0"><tbody><tr><td colspan="2" class="navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Animal_communication" title="Animal communication">Animal communication</a></li> <li><a href="/wiki/Behavioral_communication" title="Behavioral communication">Behavioral communication</a> <ul><li><a href="/wiki/Aggression" title="Aggression">Aggressive</a></li> <li><a href="/wiki/Assertiveness" title="Assertiveness">Assertive</a></li> <li><a href="/wiki/Deference" title="Deference">Passive</a></li> <li><a href="/wiki/Passive-aggressive_behavior" title="Passive-aggressive behavior">Passive-aggressive</a></li></ul></li> <li><a href="/wiki/Impression_management" title="Impression management">Impression management</a></li> <li><a href="/wiki/Meta-communication" title="Meta-communication">Meta-communication</a></li> <li><a href="/wiki/Monastic_sign_languages" title="Monastic sign languages">Monastic sign lexicons</a></li> <li><a href="/wiki/Linguistics" title="Linguistics">Verbal communication</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Non-verbal language</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Sign_language" title="Sign language">Sign language</a></li> <li><a href="/wiki/Tactile_signing" title="Tactile signing">Tactile signing</a></li> <li><a href="/wiki/Tadoma" title="Tadoma">Tadoma</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Art and literature</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Mime_artist" title="Mime artist">Mime</a></li> <li><a href="/wiki/Mimoplastic_art" title="Mimoplastic art">Mimoplastic art</a></li> <li><a href="/wiki/Subtext" title="Subtext">Subtext</a></li></ul> </div></td></tr></tbody></table><div></div></td></tr></tbody></table><div></div></td></tr></tbody></table><div></div></td></tr></tbody></table></div> <!-- NewPP limit report Parsed by mw‐web.eqiad.main‐5b7d47cbb‐phtlz Cached time: 20250325003457 Cache expiry: 2592000 Reduced expiry: false Complications: [vary‐revision‐sha1, show‐toc] CPU time usage: 1.176 seconds Real time usage: 1.387 seconds Preprocessor visited node count: 4072/1000000 Post‐expand include size: 223098/2097152 bytes Template argument size: 2853/2097152 bytes Highest expansion depth: 12/100 Expensive parser function count: 6/500 Unstrip recursion depth: 1/20 Unstrip post‐expand size: 200832/5000000 bytes Lua time usage: 0.759/10.000 seconds Lua memory usage: 19602974/52428800 bytes Number of Wikibase entities loaded: 0/400 --> <!-- Transclusion expansion time report (%,ms,calls,template) 100.00% 1132.319 1 -total 41.14% 465.826 1 Template:Reflist 25.66% 290.522 5 Template:Navbox 19.59% 221.875 1 Template:Emotion-footer 18.01% 203.978 11 Template:Cite_book 13.78% 156.067 20 Template:Cite_journal 13.37% 151.394 7 Template:Lang 9.69% 109.683 1 Template:Artificial_intelligence 9.42% 106.698 1 Template:Sidebar_with_collapsible_lists 8.39% 94.954 1 Template:Short_description --> <!-- Saved in parser cache with key enwiki:pcache:48198256:|#|:idhash:canonical and timestamp 20250325003457 and revision id 1277684962. Rendering was triggered because: page-view --> </div><!--esi <esi:include src="/esitest-fa8a495983347898/content" /> --><noscript><img src="https://login.wikimedia.org/wiki/Special:CentralAutoLogin/start?useformat=desktop&amp;type=1x1&amp;usesul3=0" alt="" width="1" height="1" style="border: none; position: absolute;"></noscript> <div class="printfooter" data-nosnippet="">Retrieved from "<a dir="ltr" href="https://en.wikipedia.org/w/index.php?title=Emotion_recognition&amp;oldid=1277684962">https://en.wikipedia.org/w/index.php?title=Emotion_recognition&amp;oldid=1277684962</a>"</div></div> <div id="catlinks" class="catlinks" data-mw="interface"><div id="mw-normal-catlinks" class="mw-normal-catlinks"><a href="/wiki/Help:Category" title="Help:Category">Categories</a>: <ul><li><a href="/wiki/Category:Emotion" title="Category:Emotion">Emotion</a></li><li><a href="/wiki/Category:Applications_of_artificial_intelligence" title="Category:Applications of artificial intelligence">Applications of artificial intelligence</a></li><li><a href="/wiki/Category:Affective_computing" title="Category:Affective computing">Affective computing</a></li></ul></div><div id="mw-hidden-catlinks" class="mw-hidden-catlinks mw-hidden-cats-hidden">Hidden categories: <ul><li><a href="/wiki/Category:CS1:_long_volume_value" title="Category:CS1: long volume value">CS1: long volume value</a></li><li><a href="/wiki/Category:Articles_with_short_description" title="Category:Articles with short description">Articles with short description</a></li><li><a href="/wiki/Category:Short_description_is_different_from_Wikidata" title="Category:Short description is different from Wikidata">Short description is different from Wikidata</a></li><li><a href="/wiki/Category:Use_dmy_dates_from_August_2016" title="Category:Use dmy dates from August 2016">Use dmy dates from August 2016</a></li><li><a href="/wiki/Category:All_articles_with_unsourced_statements" title="Category:All articles with unsourced statements">All articles with unsourced statements</a></li><li><a href="/wiki/Category:Articles_with_unsourced_statements_from_September_2019" title="Category:Articles with unsourced statements from September 2019">Articles with unsourced statements from September 2019</a></li><li><a href="/wiki/Category:Webarchive_template_wayback_links" title="Category:Webarchive template wayback links">Webarchive template wayback links</a></li><li><a href="/wiki/Category:Articles_with_unsourced_statements_from_February_2020" title="Category:Articles with unsourced statements from February 2020">Articles with unsourced statements from February 2020</a></li><li><a href="/wiki/Category:Articles_containing_Welsh-language_text" title="Category:Articles containing Welsh-language text">Articles containing Welsh-language text</a></li><li><a href="/wiki/Category:Articles_containing_Danish-language_text" title="Category:Articles containing Danish-language text">Articles containing Danish-language text</a></li><li><a href="/wiki/Category:Articles_containing_Japanese-language_text" title="Category:Articles containing Japanese-language text">Articles containing Japanese-language text</a></li><li><a href="/wiki/Category:Articles_containing_Portuguese-language_text" title="Category:Articles containing Portuguese-language text">Articles containing Portuguese-language text</a></li><li><a href="/wiki/Category:Articles_containing_German-language_text" title="Category:Articles containing German-language text">Articles containing German-language text</a></li><li><a href="/wiki/Category:Articles_with_excerpts" title="Category:Articles with excerpts">Articles with excerpts</a></li></ul></div></div> </div> </main> </div> <div class="mw-footer-container"> <footer id="footer" class="mw-footer" > <ul id="footer-info"> <li id="footer-info-lastmod"> This page was last edited on 26 February 2025, at 03:02<span class="anonymous-show">&#160;(UTC)</span>.</li> <li id="footer-info-copyright">Text is available under the <a href="/wiki/Wikipedia:Text_of_the_Creative_Commons_Attribution-ShareAlike_4.0_International_License" title="Wikipedia:Text of the Creative Commons Attribution-ShareAlike 4.0 International License">Creative Commons Attribution-ShareAlike 4.0 License</a>; additional terms may apply. By using this site, you agree to the <a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Terms_of_Use" class="extiw" title="foundation:Special:MyLanguage/Policy:Terms of Use">Terms of Use</a> and <a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Privacy_policy" class="extiw" title="foundation:Special:MyLanguage/Policy:Privacy policy">Privacy Policy</a>. Wikipedia® is a registered trademark of the <a rel="nofollow" class="external text" href="https://wikimediafoundation.org/">Wikimedia Foundation, Inc.</a>, a non-profit organization.</li> </ul> <ul id="footer-places"> <li id="footer-places-privacy"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Privacy_policy">Privacy policy</a></li> <li id="footer-places-about"><a href="/wiki/Wikipedia:About">About Wikipedia</a></li> <li id="footer-places-disclaimers"><a href="/wiki/Wikipedia:General_disclaimer">Disclaimers</a></li> <li id="footer-places-contact"><a href="//en.wikipedia.org/wiki/Wikipedia:Contact_us">Contact Wikipedia</a></li> <li id="footer-places-wm-codeofconduct"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Universal_Code_of_Conduct">Code of Conduct</a></li> <li id="footer-places-developers"><a href="https://developer.wikimedia.org">Developers</a></li> <li id="footer-places-statslink"><a href="https://stats.wikimedia.org/#/en.wikipedia.org">Statistics</a></li> <li id="footer-places-cookiestatement"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Cookie_statement">Cookie statement</a></li> <li id="footer-places-mobileview"><a href="//en.m.wikipedia.org/w/index.php?title=Emotion_recognition&amp;mobileaction=toggle_view_mobile" class="noprint stopMobileRedirectToggle">Mobile view</a></li> </ul> <ul id="footer-icons" class="noprint"> <li id="footer-copyrightico"><a href="https://www.wikimedia.org/" class="cdx-button cdx-button--fake-button cdx-button--size-large cdx-button--fake-button--enabled"><picture><source media="(min-width: 500px)" srcset="/static/images/footer/wikimedia-button.svg" width="84" height="29"><img src="/static/images/footer/wikimedia.svg" width="25" height="25" alt="Wikimedia Foundation" lang="en" loading="lazy"></picture></a></li> <li id="footer-poweredbyico"><a href="https://www.mediawiki.org/" class="cdx-button cdx-button--fake-button cdx-button--size-large cdx-button--fake-button--enabled"><picture><source media="(min-width: 500px)" srcset="/w/resources/assets/poweredby_mediawiki.svg" width="88" height="31"><img src="/w/resources/assets/mediawiki_compact.svg" alt="Powered by MediaWiki" lang="en" width="25" height="25" loading="lazy"></picture></a></li> </ul> </footer> </div> </div> </div> <div class="vector-header-container vector-sticky-header-container"> <div id="vector-sticky-header" class="vector-sticky-header"> <div class="vector-sticky-header-start"> <div class="vector-sticky-header-icon-start vector-button-flush-left vector-button-flush-right" aria-hidden="true"> <button class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-sticky-header-search-toggle" tabindex="-1" data-event-name="ui.vector-sticky-search-form.icon"><span class="vector-icon mw-ui-icon-search mw-ui-icon-wikimedia-search"></span> <span>Search</span> </button> </div> <div role="search" class="vector-search-box-vue vector-search-box-show-thumbnail vector-search-box"> <div class="vector-typeahead-search-container"> <div class="cdx-typeahead-search cdx-typeahead-search--show-thumbnail"> <form action="/w/index.php" id="vector-sticky-search-form" class="cdx-search-input cdx-search-input--has-end-button"> <div class="cdx-search-input__input-wrapper" data-search-loc="header-moved"> <div class="cdx-text-input cdx-text-input--has-start-icon"> <input class="cdx-text-input__input" type="search" name="search" placeholder="Search Wikipedia"> <span class="cdx-text-input__icon cdx-text-input__start-icon"></span> </div> <input type="hidden" name="title" value="Special:Search"> </div> <button class="cdx-button cdx-search-input__end-button">Search</button> </form> </div> </div> </div> <div class="vector-sticky-header-context-bar"> <nav aria-label="Contents" class="vector-toc-landmark"> <div id="vector-sticky-header-toc" class="vector-dropdown mw-portlet mw-portlet-sticky-header-toc vector-sticky-header-toc vector-button-flush-left" > <input type="checkbox" id="vector-sticky-header-toc-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-sticky-header-toc" class="vector-dropdown-checkbox " aria-label="Toggle the table of contents" > <label id="vector-sticky-header-toc-label" for="vector-sticky-header-toc-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-listBullet mw-ui-icon-wikimedia-listBullet"></span> <span class="vector-dropdown-label-text">Toggle the table of contents</span> </label> <div class="vector-dropdown-content"> <div id="vector-sticky-header-toc-unpinned-container" class="vector-unpinned-container"> </div> </div> </div> </nav> <div class="vector-sticky-header-context-bar-primary" aria-hidden="true" ><span class="mw-page-title-main">Emotion recognition</span></div> </div> </div> <div class="vector-sticky-header-end" aria-hidden="true"> <div class="vector-sticky-header-icons"> <a href="#" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only" id="ca-talk-sticky-header" tabindex="-1" data-event-name="talk-sticky-header"><span class="vector-icon mw-ui-icon-speechBubbles mw-ui-icon-wikimedia-speechBubbles"></span> <span></span> </a> <a href="#" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only" id="ca-subject-sticky-header" tabindex="-1" data-event-name="subject-sticky-header"><span class="vector-icon mw-ui-icon-article mw-ui-icon-wikimedia-article"></span> <span></span> </a> <a href="#" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only" id="ca-history-sticky-header" tabindex="-1" data-event-name="history-sticky-header"><span class="vector-icon mw-ui-icon-wikimedia-history mw-ui-icon-wikimedia-wikimedia-history"></span> <span></span> </a> <a href="#" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only mw-watchlink" id="ca-watchstar-sticky-header" tabindex="-1" data-event-name="watch-sticky-header"><span class="vector-icon mw-ui-icon-wikimedia-star mw-ui-icon-wikimedia-wikimedia-star"></span> <span></span> </a> <a href="#" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only" id="ca-edit-sticky-header" tabindex="-1" data-event-name="wikitext-edit-sticky-header"><span class="vector-icon mw-ui-icon-wikimedia-wikiText mw-ui-icon-wikimedia-wikimedia-wikiText"></span> <span></span> </a> <a href="#" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only" id="ca-ve-edit-sticky-header" tabindex="-1" data-event-name="ve-edit-sticky-header"><span class="vector-icon mw-ui-icon-wikimedia-edit mw-ui-icon-wikimedia-wikimedia-edit"></span> <span></span> </a> <a href="#" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only" id="ca-viewsource-sticky-header" tabindex="-1" data-event-name="ve-edit-protected-sticky-header"><span class="vector-icon mw-ui-icon-wikimedia-editLock mw-ui-icon-wikimedia-wikimedia-editLock"></span> <span></span> </a> </div> <div class="vector-sticky-header-buttons"> <button class="cdx-button cdx-button--weight-quiet mw-interlanguage-selector" id="p-lang-btn-sticky-header" tabindex="-1" data-event-name="ui.dropdown-p-lang-btn-sticky-header"><span class="vector-icon mw-ui-icon-wikimedia-language mw-ui-icon-wikimedia-wikimedia-language"></span> <span>6 languages</span> </button> <a href="#" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--action-progressive" id="ca-addsection-sticky-header" tabindex="-1" data-event-name="addsection-sticky-header"><span class="vector-icon mw-ui-icon-speechBubbleAdd-progressive mw-ui-icon-wikimedia-speechBubbleAdd-progressive"></span> <span>Add topic</span> </a> </div> <div class="vector-sticky-header-icon-end"> <div class="vector-user-links"> </div> </div> </div> </div> </div> <div class="mw-portlet mw-portlet-dock-bottom emptyPortlet" id="p-dock-bottom"> <ul> </ul> </div> <script>(RLQ=window.RLQ||[]).push(function(){mw.config.set({"wgHostname":"mw-web.eqiad.main-5b7d47cbb-8tcrp","wgBackendResponseTime":195,"wgPageParseReport":{"limitreport":{"cputime":"1.176","walltime":"1.387","ppvisitednodes":{"value":4072,"limit":1000000},"postexpandincludesize":{"value":223098,"limit":2097152},"templateargumentsize":{"value":2853,"limit":2097152},"expansiondepth":{"value":12,"limit":100},"expensivefunctioncount":{"value":6,"limit":500},"unstrip-depth":{"value":1,"limit":20},"unstrip-size":{"value":200832,"limit":5000000},"entityaccesscount":{"value":0,"limit":400},"timingprofile":["100.00% 1132.319 1 -total"," 41.14% 465.826 1 Template:Reflist"," 25.66% 290.522 5 Template:Navbox"," 19.59% 221.875 1 Template:Emotion-footer"," 18.01% 203.978 11 Template:Cite_book"," 13.78% 156.067 20 Template:Cite_journal"," 13.37% 151.394 7 Template:Lang"," 9.69% 109.683 1 Template:Artificial_intelligence"," 9.42% 106.698 1 Template:Sidebar_with_collapsible_lists"," 8.39% 94.954 1 Template:Short_description"]},"scribunto":{"limitreport-timeusage":{"value":"0.759","limit":"10.000"},"limitreport-memusage":{"value":19602974,"limit":52428800}},"cachereport":{"origin":"mw-web.eqiad.main-5b7d47cbb-phtlz","timestamp":"20250325003457","ttl":2592000,"transientcontent":false}}});});</script> <script type="application/ld+json">{"@context":"https:\/\/schema.org","@type":"Article","name":"Emotion recognition","url":"https:\/\/en.wikipedia.org\/wiki\/Emotion_recognition","sameAs":"http:\/\/www.wikidata.org\/entity\/Q1339090","mainEntity":"http:\/\/www.wikidata.org\/entity\/Q1339090","author":{"@type":"Organization","name":"Contributors to Wikimedia projects"},"publisher":{"@type":"Organization","name":"Wikimedia Foundation, Inc.","logo":{"@type":"ImageObject","url":"https:\/\/www.wikimedia.org\/static\/images\/wmf-hor-googpub.png"}},"datePublished":"2015-10-13T00:24:36Z","dateModified":"2025-02-26T03:02:50Z","headline":"process of identifying human emotion"}</script> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10