CINXE.COM

Deepfake - Wikipedia

<!DOCTYPE html> <html class="client-nojs vector-feature-language-in-header-enabled vector-feature-language-in-main-page-header-disabled vector-feature-sticky-header-disabled vector-feature-page-tools-pinned-disabled vector-feature-toc-pinned-clientpref-1 vector-feature-main-menu-pinned-disabled vector-feature-limited-width-clientpref-1 vector-feature-limited-width-content-enabled vector-feature-custom-font-size-clientpref-1 vector-feature-appearance-pinned-clientpref-1 vector-feature-night-mode-enabled skin-theme-clientpref-day vector-toc-available" lang="en" dir="ltr"> <head> <meta charset="UTF-8"> <title>Deepfake - Wikipedia</title> <script>(function(){var className="client-js vector-feature-language-in-header-enabled vector-feature-language-in-main-page-header-disabled vector-feature-sticky-header-disabled vector-feature-page-tools-pinned-disabled vector-feature-toc-pinned-clientpref-1 vector-feature-main-menu-pinned-disabled vector-feature-limited-width-clientpref-1 vector-feature-limited-width-content-enabled vector-feature-custom-font-size-clientpref-1 vector-feature-appearance-pinned-clientpref-1 vector-feature-night-mode-enabled skin-theme-clientpref-day vector-toc-available";var cookie=document.cookie.match(/(?:^|; )enwikimwclientpreferences=([^;]+)/);if(cookie){cookie[1].split('%2C').forEach(function(pref){className=className.replace(new RegExp('(^| )'+pref.replace(/-clientpref-\w+$|[^\w-]+/g,'')+'-clientpref-\\w+( |$)'),'$1'+pref+'$2');});}document.documentElement.className=className;}());RLCONF={"wgBreakFrames":false,"wgSeparatorTransformTable":["",""],"wgDigitTransformTable":["",""],"wgDefaultDateFormat":"dmy", "wgMonthNames":["","January","February","March","April","May","June","July","August","September","October","November","December"],"wgRequestId":"7c137106-039a-4004-b0a6-81cd8d6d6327","wgCanonicalNamespace":"","wgCanonicalSpecialPageName":false,"wgNamespaceNumber":0,"wgPageName":"Deepfake","wgTitle":"Deepfake","wgCurRevisionId":1258994312,"wgRevisionId":1258994312,"wgArticleId":56641018,"wgIsArticle":true,"wgIsRedirect":false,"wgAction":"view","wgUserName":null,"wgUserGroups":["*"],"wgCategories":["Webarchive template wayback links","CS1 French-language sources (fr)","CS1 German-language sources (de)","CS1 Austrian German-language sources (de-at)","CS1 maint: multiple names: authors list","Articles with short description","Short description is different from Wikidata","Articles that may be too long from November 2024","All articles with unsourced statements","Articles with unsourced statements from June 2024","Articles containing Japanese-language text", "Articles with unsourced statements from September 2023","Articles with limited geographic scope from November 2021","Articles with trivia sections from November 2024","Commons category link from Wikidata","Use British English from April 2023","Use dmy dates from July 2020","Articles containing video clips","Deepfakes","2018 neologisms","Generative artificial intelligence","AI safety","Internet memes introduced in 2020","Internet memes","Media studies","Text-to-image generation","Text-to-video generation"],"wgPageViewLanguage":"en","wgPageContentLanguage":"en","wgPageContentModel":"wikitext","wgRelevantPageName":"Deepfake","wgRelevantArticleId":56641018,"wgIsProbablyEditable":true,"wgRelevantPageIsProbablyEditable":true,"wgRestrictionEdit":[],"wgRestrictionMove":[],"wgNoticeProject":"wikipedia","wgCiteReferencePreviewsActive":false,"wgFlaggedRevsParams":{"tags":{"status":{"levels":1}}},"wgMediaViewerOnClick":true,"wgMediaViewerEnabledByDefault":true,"wgPopupsFlags":0,"wgVisualEditor":{ "pageLanguageCode":"en","pageLanguageDir":"ltr","pageVariantFallbacks":"en"},"wgMFDisplayWikibaseDescriptions":{"search":true,"watchlist":true,"tagline":false,"nearby":true},"wgWMESchemaEditAttemptStepOversample":false,"wgWMEPageLength":200000,"wgRelatedArticlesCompat":[],"wgEditSubmitButtonLabelPublish":true,"wgULSPosition":"interlanguage","wgULSisCompactLinksEnabled":false,"wgVector2022LanguageInHeader":true,"wgULSisLanguageSelectorEmpty":false,"wgWikibaseItemId":"Q49473179","wgCheckUserClientHintsHeadersJsApi":["brands","architecture","bitness","fullVersionList","mobile","model","platform","platformVersion"],"GEHomepageSuggestedEditsEnableTopics":true,"wgGETopicsMatchModeEnabled":false,"wgGEStructuredTaskRejectionReasonTextInputEnabled":false,"wgGELevelingUpEnabledForUser":false};RLSTATE={"ext.globalCssJs.user.styles":"ready","site.styles":"ready","user.styles":"ready","ext.globalCssJs.user":"ready","user":"ready","user.options":"loading","ext.cite.styles":"ready", "ext.tmh.player.styles":"ready","skins.vector.search.codex.styles":"ready","skins.vector.styles":"ready","skins.vector.icons":"ready","jquery.makeCollapsible.styles":"ready","ext.wikimediamessages.styles":"ready","ext.visualEditor.desktopArticleTarget.noscript":"ready","ext.uls.interlanguage":"ready","wikibase.client.init":"ready","ext.wikimediaBadges":"ready"};RLPAGEMODULES=["ext.cite.ux-enhancements","mediawiki.page.media","ext.tmh.player","site","mediawiki.page.ready","jquery.makeCollapsible","mediawiki.toc","skins.vector.js","ext.centralNotice.geoIP","ext.centralNotice.startUp","ext.gadget.ReferenceTooltips","ext.gadget.switcher","ext.urlShortener.toolbar","ext.centralauth.centralautologin","mmv.bootstrap","ext.popups","ext.visualEditor.desktopArticleTarget.init","ext.visualEditor.targetLoader","ext.echo.centralauth","ext.eventLogging","ext.wikimediaEvents","ext.navigationTiming","ext.uls.interface","ext.cx.eventlogging.campaigns","ext.cx.uls.quick.actions", "wikibase.client.vector-2022","ext.checkUser.clientHints","ext.growthExperiments.SuggestedEditSession","wikibase.sidebar.tracking"];</script> <script>(RLQ=window.RLQ||[]).push(function(){mw.loader.impl(function(){return["user.options@12s5i",function($,jQuery,require,module){mw.user.tokens.set({"patrolToken":"+\\","watchToken":"+\\","csrfToken":"+\\"}); }];});});</script> <link rel="stylesheet" href="/w/load.php?lang=en&amp;modules=ext.cite.styles%7Cext.tmh.player.styles%7Cext.uls.interlanguage%7Cext.visualEditor.desktopArticleTarget.noscript%7Cext.wikimediaBadges%7Cext.wikimediamessages.styles%7Cjquery.makeCollapsible.styles%7Cskins.vector.icons%2Cstyles%7Cskins.vector.search.codex.styles%7Cwikibase.client.init&amp;only=styles&amp;skin=vector-2022"> <script async="" src="/w/load.php?lang=en&amp;modules=startup&amp;only=scripts&amp;raw=1&amp;skin=vector-2022"></script> <meta name="ResourceLoaderDynamicStyles" content=""> <link rel="stylesheet" href="/w/load.php?lang=en&amp;modules=site.styles&amp;only=styles&amp;skin=vector-2022"> <meta name="generator" content="MediaWiki 1.44.0-wmf.5"> <meta name="referrer" content="origin"> <meta name="referrer" content="origin-when-cross-origin"> <meta name="robots" content="max-image-preview:standard"> <meta name="format-detection" content="telephone=no"> <meta property="og:image" content="https://upload.wikimedia.org/wikipedia/commons/thumb/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm/1200px--Dictators_-_Kim_Jong-Un_by_RepresentUs.webm.jpg"> <meta property="og:image:width" content="1200"> <meta property="og:image:height" content="675"> <meta property="og:image" content="https://upload.wikimedia.org/wikipedia/commons/thumb/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm/800px--Dictators_-_Kim_Jong-Un_by_RepresentUs.webm.jpg"> <meta property="og:image:width" content="800"> <meta property="og:image:height" content="450"> <meta property="og:image" content="https://upload.wikimedia.org/wikipedia/commons/thumb/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm/640px--Dictators_-_Kim_Jong-Un_by_RepresentUs.webm.jpg"> <meta property="og:image:width" content="640"> <meta property="og:image:height" content="360"> <meta name="viewport" content="width=1120"> <meta property="og:title" content="Deepfake - Wikipedia"> <meta property="og:type" content="website"> <link rel="preconnect" href="//upload.wikimedia.org"> <link rel="alternate" media="only screen and (max-width: 640px)" href="//en.m.wikipedia.org/wiki/Deepfake"> <link rel="alternate" type="application/x-wiki" title="Edit this page" href="/w/index.php?title=Deepfake&amp;action=edit"> <link rel="apple-touch-icon" href="/static/apple-touch/wikipedia.png"> <link rel="icon" href="/static/favicon/wikipedia.ico"> <link rel="search" type="application/opensearchdescription+xml" href="/w/rest.php/v1/search" title="Wikipedia (en)"> <link rel="EditURI" type="application/rsd+xml" href="//en.wikipedia.org/w/api.php?action=rsd"> <link rel="canonical" href="https://en.wikipedia.org/wiki/Deepfake"> <link rel="license" href="https://creativecommons.org/licenses/by-sa/4.0/deed.en"> <link rel="alternate" type="application/atom+xml" title="Wikipedia Atom feed" href="/w/index.php?title=Special:RecentChanges&amp;feed=atom"> <link rel="dns-prefetch" href="//meta.wikimedia.org" /> <link rel="dns-prefetch" href="//login.wikimedia.org"> </head> <body class="skin--responsive skin-vector skin-vector-search-vue mediawiki ltr sitedir-ltr mw-hide-empty-elt ns-0 ns-subject mw-editable page-Deepfake rootpage-Deepfake skin-vector-2022 action-view"><a class="mw-jump-link" href="#bodyContent">Jump to content</a> <div class="vector-header-container"> <header class="vector-header mw-header"> <div class="vector-header-start"> <nav class="vector-main-menu-landmark" aria-label="Site"> <div id="vector-main-menu-dropdown" class="vector-dropdown vector-main-menu-dropdown vector-button-flush-left vector-button-flush-right" > <input type="checkbox" id="vector-main-menu-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-main-menu-dropdown" class="vector-dropdown-checkbox " aria-label="Main menu" > <label id="vector-main-menu-dropdown-label" for="vector-main-menu-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-menu mw-ui-icon-wikimedia-menu"></span> <span class="vector-dropdown-label-text">Main menu</span> </label> <div class="vector-dropdown-content"> <div id="vector-main-menu-unpinned-container" class="vector-unpinned-container"> <div id="vector-main-menu" class="vector-main-menu vector-pinnable-element"> <div class="vector-pinnable-header vector-main-menu-pinnable-header vector-pinnable-header-unpinned" data-feature-name="main-menu-pinned" data-pinnable-element-id="vector-main-menu" data-pinned-container-id="vector-main-menu-pinned-container" data-unpinned-container-id="vector-main-menu-unpinned-container" > <div class="vector-pinnable-header-label">Main menu</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-main-menu.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-main-menu.unpin">hide</button> </div> <div id="p-navigation" class="vector-menu mw-portlet mw-portlet-navigation" > <div class="vector-menu-heading"> Navigation </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="n-mainpage-description" class="mw-list-item"><a href="/wiki/Main_Page" title="Visit the main page [z]" accesskey="z"><span>Main page</span></a></li><li id="n-contents" class="mw-list-item"><a href="/wiki/Wikipedia:Contents" title="Guides to browsing Wikipedia"><span>Contents</span></a></li><li id="n-currentevents" class="mw-list-item"><a href="/wiki/Portal:Current_events" title="Articles related to current events"><span>Current events</span></a></li><li id="n-randompage" class="mw-list-item"><a href="/wiki/Special:Random" title="Visit a randomly selected article [x]" accesskey="x"><span>Random article</span></a></li><li id="n-aboutsite" class="mw-list-item"><a href="/wiki/Wikipedia:About" title="Learn about Wikipedia and how it works"><span>About Wikipedia</span></a></li><li id="n-contactpage" class="mw-list-item"><a href="//en.wikipedia.org/wiki/Wikipedia:Contact_us" title="How to contact Wikipedia"><span>Contact us</span></a></li> </ul> </div> </div> <div id="p-interaction" class="vector-menu mw-portlet mw-portlet-interaction" > <div class="vector-menu-heading"> Contribute </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="n-help" class="mw-list-item"><a href="/wiki/Help:Contents" title="Guidance on how to use and edit Wikipedia"><span>Help</span></a></li><li id="n-introduction" class="mw-list-item"><a href="/wiki/Help:Introduction" title="Learn how to edit Wikipedia"><span>Learn to edit</span></a></li><li id="n-portal" class="mw-list-item"><a href="/wiki/Wikipedia:Community_portal" title="The hub for editors"><span>Community portal</span></a></li><li id="n-recentchanges" class="mw-list-item"><a href="/wiki/Special:RecentChanges" title="A list of recent changes to Wikipedia [r]" accesskey="r"><span>Recent changes</span></a></li><li id="n-upload" class="mw-list-item"><a href="/wiki/Wikipedia:File_upload_wizard" title="Add images or other media for use on Wikipedia"><span>Upload file</span></a></li> </ul> </div> </div> </div> </div> </div> </div> </nav> <a href="/wiki/Main_Page" class="mw-logo"> <img class="mw-logo-icon" src="/static/images/icons/wikipedia.png" alt="" aria-hidden="true" height="50" width="50"> <span class="mw-logo-container skin-invert"> <img class="mw-logo-wordmark" alt="Wikipedia" src="/static/images/mobile/copyright/wikipedia-wordmark-en.svg" style="width: 7.5em; height: 1.125em;"> <img class="mw-logo-tagline" alt="The Free Encyclopedia" src="/static/images/mobile/copyright/wikipedia-tagline-en.svg" width="117" height="13" style="width: 7.3125em; height: 0.8125em;"> </span> </a> </div> <div class="vector-header-end"> <div id="p-search" role="search" class="vector-search-box-vue vector-search-box-collapses vector-search-box-show-thumbnail vector-search-box-auto-expand-width vector-search-box"> <a href="/wiki/Special:Search" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only search-toggle" title="Search Wikipedia [f]" accesskey="f"><span class="vector-icon mw-ui-icon-search mw-ui-icon-wikimedia-search"></span> <span>Search</span> </a> <div class="vector-typeahead-search-container"> <div class="cdx-typeahead-search cdx-typeahead-search--show-thumbnail cdx-typeahead-search--auto-expand-width"> <form action="/w/index.php" id="searchform" class="cdx-search-input cdx-search-input--has-end-button"> <div id="simpleSearch" class="cdx-search-input__input-wrapper" data-search-loc="header-moved"> <div class="cdx-text-input cdx-text-input--has-start-icon"> <input class="cdx-text-input__input" type="search" name="search" placeholder="Search Wikipedia" aria-label="Search Wikipedia" autocapitalize="sentences" title="Search Wikipedia [f]" accesskey="f" id="searchInput" > <span class="cdx-text-input__icon cdx-text-input__start-icon"></span> </div> <input type="hidden" name="title" value="Special:Search"> </div> <button class="cdx-button cdx-search-input__end-button">Search</button> </form> </div> </div> </div> <nav class="vector-user-links vector-user-links-wide" aria-label="Personal tools"> <div class="vector-user-links-main"> <div id="p-vector-user-menu-preferences" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <div id="p-vector-user-menu-userpage" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <nav class="vector-appearance-landmark" aria-label="Appearance"> <div id="vector-appearance-dropdown" class="vector-dropdown " title="Change the appearance of the page&#039;s font size, width, and color" > <input type="checkbox" id="vector-appearance-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-appearance-dropdown" class="vector-dropdown-checkbox " aria-label="Appearance" > <label id="vector-appearance-dropdown-label" for="vector-appearance-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-appearance mw-ui-icon-wikimedia-appearance"></span> <span class="vector-dropdown-label-text">Appearance</span> </label> <div class="vector-dropdown-content"> <div id="vector-appearance-unpinned-container" class="vector-unpinned-container"> </div> </div> </div> </nav> <div id="p-vector-user-menu-notifications" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <div id="p-vector-user-menu-overflow" class="vector-menu mw-portlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-sitesupport-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="https://donate.wikimedia.org/wiki/Special:FundraiserRedirector?utm_source=donate&amp;utm_medium=sidebar&amp;utm_campaign=C13_en.wikipedia.org&amp;uselang=en" class=""><span>Donate</span></a> </li> <li id="pt-createaccount-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="/w/index.php?title=Special:CreateAccount&amp;returnto=Deepfake" title="You are encouraged to create an account and log in; however, it is not mandatory" class=""><span>Create account</span></a> </li> <li id="pt-login-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="/w/index.php?title=Special:UserLogin&amp;returnto=Deepfake" title="You&#039;re encouraged to log in; however, it&#039;s not mandatory. [o]" accesskey="o" class=""><span>Log in</span></a> </li> </ul> </div> </div> </div> <div id="vector-user-links-dropdown" class="vector-dropdown vector-user-menu vector-button-flush-right vector-user-menu-logged-out" title="Log in and more options" > <input type="checkbox" id="vector-user-links-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-user-links-dropdown" class="vector-dropdown-checkbox " aria-label="Personal tools" > <label id="vector-user-links-dropdown-label" for="vector-user-links-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-ellipsis mw-ui-icon-wikimedia-ellipsis"></span> <span class="vector-dropdown-label-text">Personal tools</span> </label> <div class="vector-dropdown-content"> <div id="p-personal" class="vector-menu mw-portlet mw-portlet-personal user-links-collapsible-item" title="User menu" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-sitesupport" class="user-links-collapsible-item mw-list-item"><a href="https://donate.wikimedia.org/wiki/Special:FundraiserRedirector?utm_source=donate&amp;utm_medium=sidebar&amp;utm_campaign=C13_en.wikipedia.org&amp;uselang=en"><span>Donate</span></a></li><li id="pt-createaccount" class="user-links-collapsible-item mw-list-item"><a href="/w/index.php?title=Special:CreateAccount&amp;returnto=Deepfake" title="You are encouraged to create an account and log in; however, it is not mandatory"><span class="vector-icon mw-ui-icon-userAdd mw-ui-icon-wikimedia-userAdd"></span> <span>Create account</span></a></li><li id="pt-login" class="user-links-collapsible-item mw-list-item"><a href="/w/index.php?title=Special:UserLogin&amp;returnto=Deepfake" title="You&#039;re encouraged to log in; however, it&#039;s not mandatory. [o]" accesskey="o"><span class="vector-icon mw-ui-icon-logIn mw-ui-icon-wikimedia-logIn"></span> <span>Log in</span></a></li> </ul> </div> </div> <div id="p-user-menu-anon-editor" class="vector-menu mw-portlet mw-portlet-user-menu-anon-editor" > <div class="vector-menu-heading"> Pages for logged out editors <a href="/wiki/Help:Introduction" aria-label="Learn more about editing"><span>learn more</span></a> </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-anoncontribs" class="mw-list-item"><a href="/wiki/Special:MyContributions" title="A list of edits made from this IP address [y]" accesskey="y"><span>Contributions</span></a></li><li id="pt-anontalk" class="mw-list-item"><a href="/wiki/Special:MyTalk" title="Discussion about edits from this IP address [n]" accesskey="n"><span>Talk</span></a></li> </ul> </div> </div> </div> </div> </nav> </div> </header> </div> <div class="mw-page-container"> <div class="mw-page-container-inner"> <div class="vector-sitenotice-container"> <div id="siteNotice"><!-- CentralNotice --></div> </div> <div class="vector-column-start"> <div class="vector-main-menu-container"> <div id="mw-navigation"> <nav id="mw-panel" class="vector-main-menu-landmark" aria-label="Site"> <div id="vector-main-menu-pinned-container" class="vector-pinned-container"> </div> </nav> </div> </div> <div class="vector-sticky-pinned-container"> <nav id="mw-panel-toc" aria-label="Contents" data-event-name="ui.sidebar-toc" class="mw-table-of-contents-container vector-toc-landmark"> <div id="vector-toc-pinned-container" class="vector-pinned-container"> <div id="vector-toc" class="vector-toc vector-pinnable-element"> <div class="vector-pinnable-header vector-toc-pinnable-header vector-pinnable-header-pinned" data-feature-name="toc-pinned" data-pinnable-element-id="vector-toc" > <h2 class="vector-pinnable-header-label">Contents</h2> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-toc.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-toc.unpin">hide</button> </div> <ul class="vector-toc-contents" id="mw-panel-toc-list"> <li id="toc-mw-content-text" class="vector-toc-list-item vector-toc-level-1"> <a href="#" class="vector-toc-link"> <div class="vector-toc-text">(Top)</div> </a> </li> <li id="toc-History" class="vector-toc-list-item vector-toc-level-1"> <a class="vector-toc-link" href="#History"> <div class="vector-toc-text"> <span class="vector-toc-numb">1</span> <span>History</span> </div> </a> <button aria-controls="toc-History-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle History subsection</span> </button> <ul id="toc-History-sublist" class="vector-toc-list"> <li id="toc-Academic_research" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Academic_research"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.1</span> <span>Academic research</span> </div> </a> <ul id="toc-Academic_research-sublist" class="vector-toc-list"> <li id="toc-Social_science_and_humanities_approaches_to_deepfakes" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Social_science_and_humanities_approaches_to_deepfakes"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.1.1</span> <span>Social science and humanities approaches to deepfakes</span> </div> </a> <ul id="toc-Social_science_and_humanities_approaches_to_deepfakes-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Computer_science_research_on_deepfakes" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Computer_science_research_on_deepfakes"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.1.2</span> <span>Computer science research on deepfakes</span> </div> </a> <ul id="toc-Computer_science_research_on_deepfakes-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Amateur_development" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Amateur_development"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.2</span> <span>Amateur development</span> </div> </a> <ul id="toc-Amateur_development-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Commercial_development" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Commercial_development"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.3</span> <span>Commercial development</span> </div> </a> <ul id="toc-Commercial_development-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Resurrection" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Resurrection"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.4</span> <span>Resurrection</span> </div> </a> <ul id="toc-Resurrection-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Techniques" class="vector-toc-list-item vector-toc-level-1"> <a class="vector-toc-link" href="#Techniques"> <div class="vector-toc-text"> <span class="vector-toc-numb">2</span> <span>Techniques</span> </div> </a> <ul id="toc-Techniques-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Applications" class="vector-toc-list-item vector-toc-level-1"> <a class="vector-toc-link" href="#Applications"> <div class="vector-toc-text"> <span class="vector-toc-numb">3</span> <span>Applications</span> </div> </a> <button aria-controls="toc-Applications-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle Applications subsection</span> </button> <ul id="toc-Applications-sublist" class="vector-toc-list"> <li id="toc-Acting" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Acting"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.1</span> <span>Acting</span> </div> </a> <ul id="toc-Acting-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Art" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Art"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.2</span> <span>Art</span> </div> </a> <ul id="toc-Art-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Blackmail" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Blackmail"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.3</span> <span>Blackmail</span> </div> </a> <ul id="toc-Blackmail-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Entertainment" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Entertainment"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.4</span> <span>Entertainment</span> </div> </a> <ul id="toc-Entertainment-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Fraud_and_scams" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Fraud_and_scams"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.5</span> <span>Fraud and scams</span> </div> </a> <ul id="toc-Fraud_and_scams-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Identity_masking" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Identity_masking"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.6</span> <span>Identity masking</span> </div> </a> <ul id="toc-Identity_masking-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Memes" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Memes"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.7</span> <span>Memes</span> </div> </a> <ul id="toc-Memes-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Politics" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Politics"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.8</span> <span>Politics</span> </div> </a> <ul id="toc-Politics-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Pornography" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Pornography"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.9</span> <span>Pornography</span> </div> </a> <ul id="toc-Pornography-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Social_media" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Social_media"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.10</span> <span>Social media</span> </div> </a> <ul id="toc-Social_media-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Sockpuppets" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Sockpuppets"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.11</span> <span>Sockpuppets</span> </div> </a> <ul id="toc-Sockpuppets-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Concerns_and_countermeasures" class="vector-toc-list-item vector-toc-level-1"> <a class="vector-toc-link" href="#Concerns_and_countermeasures"> <div class="vector-toc-text"> <span class="vector-toc-numb">4</span> <span>Concerns and countermeasures</span> </div> </a> <button aria-controls="toc-Concerns_and_countermeasures-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle Concerns and countermeasures subsection</span> </button> <ul id="toc-Concerns_and_countermeasures-sublist" class="vector-toc-list"> <li id="toc-Credibility_of_information" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Credibility_of_information"> <div class="vector-toc-text"> <span class="vector-toc-numb">4.1</span> <span>Credibility of information</span> </div> </a> <ul id="toc-Credibility_of_information-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Defamation" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Defamation"> <div class="vector-toc-text"> <span class="vector-toc-numb">4.2</span> <span>Defamation</span> </div> </a> <ul id="toc-Defamation-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Detection" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Detection"> <div class="vector-toc-text"> <span class="vector-toc-numb">4.3</span> <span>Detection</span> </div> </a> <ul id="toc-Detection-sublist" class="vector-toc-list"> <li id="toc-Audio" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Audio"> <div class="vector-toc-text"> <span class="vector-toc-numb">4.3.1</span> <span>Audio</span> </div> </a> <ul id="toc-Audio-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Video" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Video"> <div class="vector-toc-text"> <span class="vector-toc-numb">4.3.2</span> <span>Video</span> </div> </a> <ul id="toc-Video-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Prevention" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Prevention"> <div class="vector-toc-text"> <span class="vector-toc-numb">4.4</span> <span>Prevention</span> </div> </a> <ul id="toc-Prevention-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Controversies" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Controversies"> <div class="vector-toc-text"> <span class="vector-toc-numb">4.5</span> <span>Controversies</span> </div> </a> <ul id="toc-Controversies-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Example_events" class="vector-toc-list-item vector-toc-level-1"> <a class="vector-toc-link" href="#Example_events"> <div class="vector-toc-text"> <span class="vector-toc-numb">5</span> <span>Example events</span> </div> </a> <ul id="toc-Example_events-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Responses" class="vector-toc-list-item vector-toc-level-1"> <a class="vector-toc-link" href="#Responses"> <div class="vector-toc-text"> <span class="vector-toc-numb">6</span> <span>Responses</span> </div> </a> <button aria-controls="toc-Responses-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle Responses subsection</span> </button> <ul id="toc-Responses-sublist" class="vector-toc-list"> <li id="toc-Social_media_platforms" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Social_media_platforms"> <div class="vector-toc-text"> <span class="vector-toc-numb">6.1</span> <span>Social media platforms</span> </div> </a> <ul id="toc-Social_media_platforms-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Legislation" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Legislation"> <div class="vector-toc-text"> <span class="vector-toc-numb">6.2</span> <span>Legislation</span> </div> </a> <ul id="toc-Legislation-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Response_from_DARPA" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Response_from_DARPA"> <div class="vector-toc-text"> <span class="vector-toc-numb">6.3</span> <span>Response from DARPA</span> </div> </a> <ul id="toc-Response_from_DARPA-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-International_Panel_on_the_Information_Environment" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#International_Panel_on_the_Information_Environment"> <div class="vector-toc-text"> <span class="vector-toc-numb">6.4</span> <span>International Panel on the Information Environment</span> </div> </a> <ul id="toc-International_Panel_on_the_Information_Environment-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-In_popular_culture" class="vector-toc-list-item vector-toc-level-1"> <a class="vector-toc-link" href="#In_popular_culture"> <div class="vector-toc-text"> <span class="vector-toc-numb">7</span> <span>In popular culture</span> </div> </a> <ul id="toc-In_popular_culture-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-See_also" class="vector-toc-list-item vector-toc-level-1"> <a class="vector-toc-link" href="#See_also"> <div class="vector-toc-text"> <span class="vector-toc-numb">8</span> <span>See also</span> </div> </a> <ul id="toc-See_also-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-References" class="vector-toc-list-item vector-toc-level-1"> <a class="vector-toc-link" href="#References"> <div class="vector-toc-text"> <span class="vector-toc-numb">9</span> <span>References</span> </div> </a> <ul id="toc-References-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Further_reading" class="vector-toc-list-item vector-toc-level-1"> <a class="vector-toc-link" href="#Further_reading"> <div class="vector-toc-text"> <span class="vector-toc-numb">10</span> <span>Further reading</span> </div> </a> <ul id="toc-Further_reading-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-External_links" class="vector-toc-list-item vector-toc-level-1"> <a class="vector-toc-link" href="#External_links"> <div class="vector-toc-text"> <span class="vector-toc-numb">11</span> <span>External links</span> </div> </a> <ul id="toc-External_links-sublist" class="vector-toc-list"> </ul> </li> </ul> </div> </div> </nav> </div> </div> <div class="mw-content-container"> <main id="content" class="mw-body"> <header class="mw-body-header vector-page-titlebar"> <nav aria-label="Contents" class="vector-toc-landmark"> <div id="vector-page-titlebar-toc" class="vector-dropdown vector-page-titlebar-toc vector-button-flush-left" > <input type="checkbox" id="vector-page-titlebar-toc-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-page-titlebar-toc" class="vector-dropdown-checkbox " aria-label="Toggle the table of contents" > <label id="vector-page-titlebar-toc-label" for="vector-page-titlebar-toc-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-listBullet mw-ui-icon-wikimedia-listBullet"></span> <span class="vector-dropdown-label-text">Toggle the table of contents</span> </label> <div class="vector-dropdown-content"> <div id="vector-page-titlebar-toc-unpinned-container" class="vector-unpinned-container"> </div> </div> </div> </nav> <h1 id="firstHeading" class="firstHeading mw-first-heading"><span class="mw-page-title-main">Deepfake</span></h1> <div id="p-lang-btn" class="vector-dropdown mw-portlet mw-portlet-lang" > <input type="checkbox" id="p-lang-btn-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-p-lang-btn" class="vector-dropdown-checkbox mw-interlanguage-selector" aria-label="Go to an article in another language. Available in 45 languages" > <label id="p-lang-btn-label" for="p-lang-btn-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--action-progressive mw-portlet-lang-heading-45" aria-hidden="true" ><span class="vector-icon mw-ui-icon-language-progressive mw-ui-icon-wikimedia-language-progressive"></span> <span class="vector-dropdown-label-text">45 languages</span> </label> <div class="vector-dropdown-content"> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li class="interlanguage-link interwiki-af mw-list-item"><a href="https://af.wikipedia.org/wiki/Diepfop" title="Diepfop – Afrikaans" lang="af" hreflang="af" data-title="Diepfop" data-language-autonym="Afrikaans" data-language-local-name="Afrikaans" class="interlanguage-link-target"><span>Afrikaans</span></a></li><li class="interlanguage-link interwiki-ar mw-list-item"><a href="https://ar.wikipedia.org/wiki/%D8%AA%D8%B2%D9%8A%D9%8A%D9%81_%D8%B9%D9%85%D9%8A%D9%82" title="تزييف عميق – Arabic" lang="ar" hreflang="ar" data-title="تزييف عميق" data-language-autonym="العربية" data-language-local-name="Arabic" class="interlanguage-link-target"><span>العربية</span></a></li><li class="interlanguage-link interwiki-az mw-list-item"><a href="https://az.wikipedia.org/wiki/Dipfeyk" title="Dipfeyk – Azerbaijani" lang="az" hreflang="az" data-title="Dipfeyk" data-language-autonym="Azərbaycanca" data-language-local-name="Azerbaijani" class="interlanguage-link-target"><span>Azərbaycanca</span></a></li><li class="interlanguage-link interwiki-bg mw-list-item"><a href="https://bg.wikipedia.org/wiki/%D0%94%D0%B8%D0%B9%D0%BF%D1%84%D0%B5%D0%B9%D0%BA" title="Дийпфейк – Bulgarian" lang="bg" hreflang="bg" data-title="Дийпфейк" data-language-autonym="Български" data-language-local-name="Bulgarian" class="interlanguage-link-target"><span>Български</span></a></li><li class="interlanguage-link interwiki-ca mw-list-item"><a href="https://ca.wikipedia.org/wiki/Deepfake" title="Deepfake – Catalan" lang="ca" hreflang="ca" data-title="Deepfake" data-language-autonym="Català" data-language-local-name="Catalan" class="interlanguage-link-target"><span>Català</span></a></li><li class="interlanguage-link interwiki-cs mw-list-item"><a href="https://cs.wikipedia.org/wiki/Deepfake" title="Deepfake – Czech" lang="cs" hreflang="cs" data-title="Deepfake" data-language-autonym="Čeština" data-language-local-name="Czech" class="interlanguage-link-target"><span>Čeština</span></a></li><li class="interlanguage-link interwiki-de mw-list-item"><a href="https://de.wikipedia.org/wiki/Deepfake" title="Deepfake – German" lang="de" hreflang="de" data-title="Deepfake" data-language-autonym="Deutsch" data-language-local-name="German" class="interlanguage-link-target"><span>Deutsch</span></a></li><li class="interlanguage-link interwiki-et mw-list-item"><a href="https://et.wikipedia.org/wiki/S%C3%BCvav%C3%B5ltsing" title="Süvavõltsing – Estonian" lang="et" hreflang="et" data-title="Süvavõltsing" data-language-autonym="Eesti" data-language-local-name="Estonian" class="interlanguage-link-target"><span>Eesti</span></a></li><li class="interlanguage-link interwiki-es mw-list-item"><a href="https://es.wikipedia.org/wiki/Deepfake" title="Deepfake – Spanish" lang="es" hreflang="es" data-title="Deepfake" data-language-autonym="Español" data-language-local-name="Spanish" class="interlanguage-link-target"><span>Español</span></a></li><li class="interlanguage-link interwiki-eo mw-list-item"><a href="https://eo.wikipedia.org/wiki/Profund-falsa%C4%B5o" title="Profund-falsaĵo – Esperanto" lang="eo" hreflang="eo" data-title="Profund-falsaĵo" data-language-autonym="Esperanto" data-language-local-name="Esperanto" class="interlanguage-link-target"><span>Esperanto</span></a></li><li class="interlanguage-link interwiki-eu mw-list-item"><a href="https://eu.wikipedia.org/wiki/Deepfake" title="Deepfake – Basque" lang="eu" hreflang="eu" data-title="Deepfake" data-language-autonym="Euskara" data-language-local-name="Basque" class="interlanguage-link-target"><span>Euskara</span></a></li><li class="interlanguage-link interwiki-fa mw-list-item"><a href="https://fa.wikipedia.org/wiki/%D8%AC%D8%B9%D9%84_%D8%B9%D9%85%DB%8C%D9%82" title="جعل عمیق – Persian" lang="fa" hreflang="fa" data-title="جعل عمیق" data-language-autonym="فارسی" data-language-local-name="Persian" class="interlanguage-link-target"><span>فارسی</span></a></li><li class="interlanguage-link interwiki-fr mw-list-item"><a href="https://fr.wikipedia.org/wiki/Deepfake" title="Deepfake – French" lang="fr" hreflang="fr" data-title="Deepfake" data-language-autonym="Français" data-language-local-name="French" class="interlanguage-link-target"><span>Français</span></a></li><li class="interlanguage-link interwiki-ga mw-list-item"><a href="https://ga.wikipedia.org/wiki/Domhainbhrionn%C3%BA" title="Domhainbhrionnú – Irish" lang="ga" hreflang="ga" data-title="Domhainbhrionnú" data-language-autonym="Gaeilge" data-language-local-name="Irish" class="interlanguage-link-target"><span>Gaeilge</span></a></li><li class="interlanguage-link interwiki-ko mw-list-item"><a href="https://ko.wikipedia.org/wiki/%EB%94%A5%ED%8E%98%EC%9D%B4%ED%81%AC" title="딥페이크 – Korean" lang="ko" hreflang="ko" data-title="딥페이크" data-language-autonym="한국어" data-language-local-name="Korean" class="interlanguage-link-target"><span>한국어</span></a></li><li class="interlanguage-link interwiki-hy mw-list-item"><a href="https://hy.wikipedia.org/wiki/Deepfake" title="Deepfake – Armenian" lang="hy" hreflang="hy" data-title="Deepfake" data-language-autonym="Հայերեն" data-language-local-name="Armenian" class="interlanguage-link-target"><span>Հայերեն</span></a></li><li class="interlanguage-link interwiki-hi mw-list-item"><a href="https://hi.wikipedia.org/wiki/%E0%A4%A1%E0%A5%80%E0%A4%AA%E0%A4%AB%E0%A4%BC%E0%A5%87%E0%A4%95_%E0%A4%9F%E0%A5%87%E0%A4%95%E0%A5%8D%E0%A4%A8%E0%A5%8B%E0%A4%B2%E0%A5%89%E0%A4%9C%E0%A5%80" title="डीपफ़ेक टेक्नोलॉजी – Hindi" lang="hi" hreflang="hi" data-title="डीपफ़ेक टेक्नोलॉजी" data-language-autonym="हिन्दी" data-language-local-name="Hindi" class="interlanguage-link-target"><span>हिन्दी</span></a></li><li class="interlanguage-link interwiki-io mw-list-item"><a href="https://io.wikipedia.org/wiki/Profund-falsajo" title="Profund-falsajo – Ido" lang="io" hreflang="io" data-title="Profund-falsajo" data-language-autonym="Ido" data-language-local-name="Ido" class="interlanguage-link-target"><span>Ido</span></a></li><li class="interlanguage-link interwiki-id mw-list-item"><a href="https://id.wikipedia.org/wiki/Pemalsuan_dalam" title="Pemalsuan dalam – Indonesian" lang="id" hreflang="id" data-title="Pemalsuan dalam" data-language-autonym="Bahasa Indonesia" data-language-local-name="Indonesian" class="interlanguage-link-target"><span>Bahasa Indonesia</span></a></li><li class="interlanguage-link interwiki-zu mw-list-item"><a href="https://zu.wikipedia.org/wiki/Umbukisonyali" title="Umbukisonyali – Zulu" lang="zu" hreflang="zu" data-title="Umbukisonyali" data-language-autonym="IsiZulu" data-language-local-name="Zulu" class="interlanguage-link-target"><span>IsiZulu</span></a></li><li class="interlanguage-link interwiki-it mw-list-item"><a href="https://it.wikipedia.org/wiki/Deepfake" title="Deepfake – Italian" lang="it" hreflang="it" data-title="Deepfake" data-language-autonym="Italiano" data-language-local-name="Italian" class="interlanguage-link-target"><span>Italiano</span></a></li><li class="interlanguage-link interwiki-he mw-list-item"><a href="https://he.wikipedia.org/wiki/%D7%93%D7%99%D7%A4-%D7%A4%D7%99%D7%99%D7%A7" title="דיפ-פייק – Hebrew" lang="he" hreflang="he" data-title="דיפ-פייק" data-language-autonym="עברית" data-language-local-name="Hebrew" class="interlanguage-link-target"><span>עברית</span></a></li><li class="interlanguage-link interwiki-lv mw-list-item"><a href="https://lv.wikipedia.org/wiki/Dzi%C4%BCviltojums" title="Dziļviltojums – Latvian" lang="lv" hreflang="lv" data-title="Dziļviltojums" data-language-autonym="Latviešu" data-language-local-name="Latvian" class="interlanguage-link-target"><span>Latviešu</span></a></li><li class="interlanguage-link interwiki-lt mw-list-item"><a href="https://lt.wikipedia.org/wiki/I%C5%A1manioji_vaizdo_klastot%C4%97" title="Išmanioji vaizdo klastotė – Lithuanian" lang="lt" hreflang="lt" data-title="Išmanioji vaizdo klastotė" data-language-autonym="Lietuvių" data-language-local-name="Lithuanian" class="interlanguage-link-target"><span>Lietuvių</span></a></li><li class="interlanguage-link interwiki-ms mw-list-item"><a href="https://ms.wikipedia.org/wiki/Pemalsuan_wajah" title="Pemalsuan wajah – Malay" lang="ms" hreflang="ms" data-title="Pemalsuan wajah" data-language-autonym="Bahasa Melayu" data-language-local-name="Malay" class="interlanguage-link-target"><span>Bahasa Melayu</span></a></li><li class="interlanguage-link interwiki-nl mw-list-item"><a href="https://nl.wikipedia.org/wiki/Deepfake" title="Deepfake – Dutch" lang="nl" hreflang="nl" data-title="Deepfake" data-language-autonym="Nederlands" data-language-local-name="Dutch" class="interlanguage-link-target"><span>Nederlands</span></a></li><li class="interlanguage-link interwiki-ja mw-list-item"><a href="https://ja.wikipedia.org/wiki/%E3%83%87%E3%82%A3%E3%83%BC%E3%83%97%E3%83%95%E3%82%A7%E3%82%A4%E3%82%AF" title="ディープフェイク – Japanese" lang="ja" hreflang="ja" data-title="ディープフェイク" data-language-autonym="日本語" data-language-local-name="Japanese" class="interlanguage-link-target"><span>日本語</span></a></li><li class="interlanguage-link interwiki-pa mw-list-item"><a href="https://pa.wikipedia.org/wiki/%E0%A8%A1%E0%A9%80%E0%A8%AA%E0%A8%AB%E0%A9%87%E0%A8%95" title="ਡੀਪਫੇਕ – Punjabi" lang="pa" hreflang="pa" data-title="ਡੀਪਫੇਕ" data-language-autonym="ਪੰਜਾਬੀ" data-language-local-name="Punjabi" class="interlanguage-link-target"><span>ਪੰਜਾਬੀ</span></a></li><li class="interlanguage-link interwiki-pnb mw-list-item"><a href="https://pnb.wikipedia.org/wiki/%DA%88%DB%8C%D9%BE_%D9%81%DB%8C%DA%A9_%D9%B9%DB%8C%DA%A9%D9%86%D8%A7%D9%84%D9%88%D8%AC%DB%8C" title="ڈیپ فیک ٹیکنالوجی – Western Punjabi" lang="pnb" hreflang="pnb" data-title="ڈیپ فیک ٹیکنالوجی" data-language-autonym="پنجابی" data-language-local-name="Western Punjabi" class="interlanguage-link-target"><span>پنجابی</span></a></li><li class="interlanguage-link interwiki-pl mw-list-item"><a href="https://pl.wikipedia.org/wiki/Deepfake" title="Deepfake – Polish" lang="pl" hreflang="pl" data-title="Deepfake" data-language-autonym="Polski" data-language-local-name="Polish" class="interlanguage-link-target"><span>Polski</span></a></li><li class="interlanguage-link interwiki-pt mw-list-item"><a href="https://pt.wikipedia.org/wiki/Deepfake" title="Deepfake – Portuguese" lang="pt" hreflang="pt" data-title="Deepfake" data-language-autonym="Português" data-language-local-name="Portuguese" class="interlanguage-link-target"><span>Português</span></a></li><li class="interlanguage-link interwiki-ru mw-list-item"><a href="https://ru.wikipedia.org/wiki/%D0%94%D0%B8%D0%BF%D1%84%D0%B5%D0%B9%D0%BA" title="Дипфейк – Russian" lang="ru" hreflang="ru" data-title="Дипфейк" data-language-autonym="Русский" data-language-local-name="Russian" class="interlanguage-link-target"><span>Русский</span></a></li><li class="interlanguage-link interwiki-sq mw-list-item"><a href="https://sq.wikipedia.org/wiki/Deepfake" title="Deepfake – Albanian" lang="sq" hreflang="sq" data-title="Deepfake" data-language-autonym="Shqip" data-language-local-name="Albanian" class="interlanguage-link-target"><span>Shqip</span></a></li><li class="interlanguage-link interwiki-simple mw-list-item"><a href="https://simple.wikipedia.org/wiki/Deepfake" title="Deepfake – Simple English" lang="en-simple" hreflang="en-simple" data-title="Deepfake" data-language-autonym="Simple English" data-language-local-name="Simple English" class="interlanguage-link-target"><span>Simple English</span></a></li><li class="interlanguage-link interwiki-sk mw-list-item"><a href="https://sk.wikipedia.org/wiki/Deepfake" title="Deepfake – Slovak" lang="sk" hreflang="sk" data-title="Deepfake" data-language-autonym="Slovenčina" data-language-local-name="Slovak" class="interlanguage-link-target"><span>Slovenčina</span></a></li><li class="interlanguage-link interwiki-sr mw-list-item"><a href="https://sr.wikipedia.org/wiki/%D0%94%D0%B8%D0%BF%D1%84%D0%B5%D1%98%D0%BA" title="Дипфејк – Serbian" lang="sr" hreflang="sr" data-title="Дипфејк" data-language-autonym="Српски / srpski" data-language-local-name="Serbian" class="interlanguage-link-target"><span>Српски / srpski</span></a></li><li class="interlanguage-link interwiki-fi mw-list-item"><a href="https://fi.wikipedia.org/wiki/Deepfake" title="Deepfake – Finnish" lang="fi" hreflang="fi" data-title="Deepfake" data-language-autonym="Suomi" data-language-local-name="Finnish" class="interlanguage-link-target"><span>Suomi</span></a></li><li class="interlanguage-link interwiki-sv mw-list-item"><a href="https://sv.wikipedia.org/wiki/Deepfake" title="Deepfake – Swedish" lang="sv" hreflang="sv" data-title="Deepfake" data-language-autonym="Svenska" data-language-local-name="Swedish" class="interlanguage-link-target"><span>Svenska</span></a></li><li class="interlanguage-link interwiki-te mw-list-item"><a href="https://te.wikipedia.org/wiki/%E0%B0%A1%E0%B1%80%E0%B0%AA%E0%B1%8D_%E0%B0%AB%E0%B1%87%E0%B0%95%E0%B1%8D" title="డీప్ ఫేక్ – Telugu" lang="te" hreflang="te" data-title="డీప్ ఫేక్" data-language-autonym="తెలుగు" data-language-local-name="Telugu" class="interlanguage-link-target"><span>తెలుగు</span></a></li><li class="interlanguage-link interwiki-th mw-list-item"><a href="https://th.wikipedia.org/wiki/%E0%B8%AA%E0%B8%B7%E0%B9%88%E0%B8%AD%E0%B8%A5%E0%B8%A7%E0%B8%87%E0%B8%A5%E0%B8%B6%E0%B8%81" title="สื่อลวงลึก – Thai" lang="th" hreflang="th" data-title="สื่อลวงลึก" data-language-autonym="ไทย" data-language-local-name="Thai" class="interlanguage-link-target"><span>ไทย</span></a></li><li class="interlanguage-link interwiki-tr mw-list-item"><a href="https://tr.wikipedia.org/wiki/Deepfake" title="Deepfake – Turkish" lang="tr" hreflang="tr" data-title="Deepfake" data-language-autonym="Türkçe" data-language-local-name="Turkish" class="interlanguage-link-target"><span>Türkçe</span></a></li><li class="interlanguage-link interwiki-uk mw-list-item"><a href="https://uk.wikipedia.org/wiki/%D0%94%D0%B8%D0%BF%D1%84%D0%B5%D0%B9%D0%BA" title="Дипфейк – Ukrainian" lang="uk" hreflang="uk" data-title="Дипфейк" data-language-autonym="Українська" data-language-local-name="Ukrainian" class="interlanguage-link-target"><span>Українська</span></a></li><li class="interlanguage-link interwiki-ur mw-list-item"><a href="https://ur.wikipedia.org/wiki/%DA%88%DB%8C%D9%BE_%D9%81%DB%8C%DA%A9_%D9%B9%DB%8C%DA%A9%D9%86%D8%A7%D9%84%D9%88%D8%AC%DB%8C" title="ڈیپ فیک ٹیکنالوجی – Urdu" lang="ur" hreflang="ur" data-title="ڈیپ فیک ٹیکنالوجی" data-language-autonym="اردو" data-language-local-name="Urdu" class="interlanguage-link-target"><span>اردو</span></a></li><li class="interlanguage-link interwiki-vi mw-list-item"><a href="https://vi.wikipedia.org/wiki/Deepfake" title="Deepfake – Vietnamese" lang="vi" hreflang="vi" data-title="Deepfake" data-language-autonym="Tiếng Việt" data-language-local-name="Vietnamese" class="interlanguage-link-target"><span>Tiếng Việt</span></a></li><li class="interlanguage-link interwiki-zh mw-list-item"><a href="https://zh.wikipedia.org/wiki/%E6%B7%B1%E4%BC%AA%E6%8A%80%E6%9C%AF" title="深伪技术 – Chinese" lang="zh" hreflang="zh" data-title="深伪技术" data-language-autonym="中文" data-language-local-name="Chinese" class="interlanguage-link-target"><span>中文</span></a></li> </ul> <div class="after-portlet after-portlet-lang"><span class="wb-langlinks-edit wb-langlinks-link"><a href="https://www.wikidata.org/wiki/Special:EntityPage/Q49473179#sitelinks-wikipedia" title="Edit interlanguage links" class="wbc-editpage">Edit links</a></span></div> </div> </div> </div> </header> <div class="vector-page-toolbar"> <div class="vector-page-toolbar-container"> <div id="left-navigation"> <nav aria-label="Namespaces"> <div id="p-associated-pages" class="vector-menu vector-menu-tabs mw-portlet mw-portlet-associated-pages" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-nstab-main" class="selected vector-tab-noicon mw-list-item"><a href="/wiki/Deepfake" title="View the content page [c]" accesskey="c"><span>Article</span></a></li><li id="ca-talk" class="vector-tab-noicon mw-list-item"><a href="/wiki/Talk:Deepfake" rel="discussion" title="Discuss improvements to the content page [t]" accesskey="t"><span>Talk</span></a></li> </ul> </div> </div> <div id="vector-variants-dropdown" class="vector-dropdown emptyPortlet" > <input type="checkbox" id="vector-variants-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-variants-dropdown" class="vector-dropdown-checkbox " aria-label="Change language variant" > <label id="vector-variants-dropdown-label" for="vector-variants-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet" aria-hidden="true" ><span class="vector-dropdown-label-text">English</span> </label> <div class="vector-dropdown-content"> <div id="p-variants" class="vector-menu mw-portlet mw-portlet-variants emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> </div> </div> </nav> </div> <div id="right-navigation" class="vector-collapsible"> <nav aria-label="Views"> <div id="p-views" class="vector-menu vector-menu-tabs mw-portlet mw-portlet-views" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-view" class="selected vector-tab-noicon mw-list-item"><a href="/wiki/Deepfake"><span>Read</span></a></li><li id="ca-edit" class="vector-tab-noicon mw-list-item"><a href="/w/index.php?title=Deepfake&amp;action=edit" title="Edit this page [e]" accesskey="e"><span>Edit</span></a></li><li id="ca-history" class="vector-tab-noicon mw-list-item"><a href="/w/index.php?title=Deepfake&amp;action=history" title="Past revisions of this page [h]" accesskey="h"><span>View history</span></a></li> </ul> </div> </div> </nav> <nav class="vector-page-tools-landmark" aria-label="Page tools"> <div id="vector-page-tools-dropdown" class="vector-dropdown vector-page-tools-dropdown" > <input type="checkbox" id="vector-page-tools-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-page-tools-dropdown" class="vector-dropdown-checkbox " aria-label="Tools" > <label id="vector-page-tools-dropdown-label" for="vector-page-tools-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet" aria-hidden="true" ><span class="vector-dropdown-label-text">Tools</span> </label> <div class="vector-dropdown-content"> <div id="vector-page-tools-unpinned-container" class="vector-unpinned-container"> <div id="vector-page-tools" class="vector-page-tools vector-pinnable-element"> <div class="vector-pinnable-header vector-page-tools-pinnable-header vector-pinnable-header-unpinned" data-feature-name="page-tools-pinned" data-pinnable-element-id="vector-page-tools" data-pinned-container-id="vector-page-tools-pinned-container" data-unpinned-container-id="vector-page-tools-unpinned-container" > <div class="vector-pinnable-header-label">Tools</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-page-tools.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-page-tools.unpin">hide</button> </div> <div id="p-cactions" class="vector-menu mw-portlet mw-portlet-cactions emptyPortlet vector-has-collapsible-items" title="More options" > <div class="vector-menu-heading"> Actions </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-more-view" class="selected vector-more-collapsible-item mw-list-item"><a href="/wiki/Deepfake"><span>Read</span></a></li><li id="ca-more-edit" class="vector-more-collapsible-item mw-list-item"><a href="/w/index.php?title=Deepfake&amp;action=edit" title="Edit this page [e]" accesskey="e"><span>Edit</span></a></li><li id="ca-more-history" class="vector-more-collapsible-item mw-list-item"><a href="/w/index.php?title=Deepfake&amp;action=history"><span>View history</span></a></li> </ul> </div> </div> <div id="p-tb" class="vector-menu mw-portlet mw-portlet-tb" > <div class="vector-menu-heading"> General </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="t-whatlinkshere" class="mw-list-item"><a href="/wiki/Special:WhatLinksHere/Deepfake" title="List of all English Wikipedia pages containing links to this page [j]" accesskey="j"><span>What links here</span></a></li><li id="t-recentchangeslinked" class="mw-list-item"><a href="/wiki/Special:RecentChangesLinked/Deepfake" rel="nofollow" title="Recent changes in pages linked from this page [k]" accesskey="k"><span>Related changes</span></a></li><li id="t-upload" class="mw-list-item"><a href="/wiki/Wikipedia:File_Upload_Wizard" title="Upload files [u]" accesskey="u"><span>Upload file</span></a></li><li id="t-specialpages" class="mw-list-item"><a href="/wiki/Special:SpecialPages" title="A list of all special pages [q]" accesskey="q"><span>Special pages</span></a></li><li id="t-permalink" class="mw-list-item"><a href="/w/index.php?title=Deepfake&amp;oldid=1258994312" title="Permanent link to this revision of this page"><span>Permanent link</span></a></li><li id="t-info" class="mw-list-item"><a href="/w/index.php?title=Deepfake&amp;action=info" title="More information about this page"><span>Page information</span></a></li><li id="t-cite" class="mw-list-item"><a href="/w/index.php?title=Special:CiteThisPage&amp;page=Deepfake&amp;id=1258994312&amp;wpFormIdentifier=titleform" title="Information on how to cite this page"><span>Cite this page</span></a></li><li id="t-urlshortener" class="mw-list-item"><a href="/w/index.php?title=Special:UrlShortener&amp;url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FDeepfake"><span>Get shortened URL</span></a></li><li id="t-urlshortener-qrcode" class="mw-list-item"><a href="/w/index.php?title=Special:QrCode&amp;url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FDeepfake"><span>Download QR code</span></a></li> </ul> </div> </div> <div id="p-coll-print_export" class="vector-menu mw-portlet mw-portlet-coll-print_export" > <div class="vector-menu-heading"> Print/export </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="coll-download-as-rl" class="mw-list-item"><a href="/w/index.php?title=Special:DownloadAsPdf&amp;page=Deepfake&amp;action=show-download-screen" title="Download this page as a PDF file"><span>Download as PDF</span></a></li><li id="t-print" class="mw-list-item"><a href="/w/index.php?title=Deepfake&amp;printable=yes" title="Printable version of this page [p]" accesskey="p"><span>Printable version</span></a></li> </ul> </div> </div> <div id="p-wikibase-otherprojects" class="vector-menu mw-portlet mw-portlet-wikibase-otherprojects" > <div class="vector-menu-heading"> In other projects </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li class="wb-otherproject-link wb-otherproject-commons mw-list-item"><a href="https://commons.wikimedia.org/wiki/Category:Deepfake" hreflang="en"><span>Wikimedia Commons</span></a></li><li id="t-wikibase" class="wb-otherproject-link wb-otherproject-wikibase-dataitem mw-list-item"><a href="https://www.wikidata.org/wiki/Special:EntityPage/Q49473179" title="Structured data on this page hosted by Wikidata [g]" accesskey="g"><span>Wikidata item</span></a></li> </ul> </div> </div> </div> </div> </div> </div> </nav> </div> </div> </div> <div class="vector-column-end"> <div class="vector-sticky-pinned-container"> <nav class="vector-page-tools-landmark" aria-label="Page tools"> <div id="vector-page-tools-pinned-container" class="vector-pinned-container"> </div> </nav> <nav class="vector-appearance-landmark" aria-label="Appearance"> <div id="vector-appearance-pinned-container" class="vector-pinned-container"> <div id="vector-appearance" class="vector-appearance vector-pinnable-element"> <div class="vector-pinnable-header vector-appearance-pinnable-header vector-pinnable-header-pinned" data-feature-name="appearance-pinned" data-pinnable-element-id="vector-appearance" data-pinned-container-id="vector-appearance-pinned-container" data-unpinned-container-id="vector-appearance-unpinned-container" > <div class="vector-pinnable-header-label">Appearance</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-appearance.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-appearance.unpin">hide</button> </div> </div> </div> </nav> </div> </div> <div id="bodyContent" class="vector-body" aria-labelledby="firstHeading" data-mw-ve-target-container> <div class="vector-body-before-content"> <div class="mw-indicators"> </div> <div id="siteSub" class="noprint">From Wikipedia, the free encyclopedia</div> </div> <div id="contentSub"><div id="mw-content-subtitle"></div></div> <div id="mw-content-text" class="mw-body-content"><div class="mw-content-ltr mw-parser-output" lang="en" dir="ltr"><div class="shortdescription nomobile noexcerpt noprint searchaux" style="display:none">Realistic artificially generated media</div> <style data-mw-deduplicate="TemplateStyles:r1251242444">.mw-parser-output .ambox{border:1px solid #a2a9b1;border-left:10px solid #36c;background-color:#fbfbfb;box-sizing:border-box}.mw-parser-output .ambox+link+.ambox,.mw-parser-output .ambox+link+style+.ambox,.mw-parser-output .ambox+link+link+.ambox,.mw-parser-output .ambox+.mw-empty-elt+link+.ambox,.mw-parser-output .ambox+.mw-empty-elt+link+style+.ambox,.mw-parser-output .ambox+.mw-empty-elt+link+link+.ambox{margin-top:-1px}html body.mediawiki .mw-parser-output .ambox.mbox-small-left{margin:4px 1em 4px 0;overflow:hidden;width:238px;border-collapse:collapse;font-size:88%;line-height:1.25em}.mw-parser-output .ambox-speedy{border-left:10px solid #b32424;background-color:#fee7e6}.mw-parser-output .ambox-delete{border-left:10px solid #b32424}.mw-parser-output .ambox-content{border-left:10px solid #f28500}.mw-parser-output .ambox-style{border-left:10px solid #fc3}.mw-parser-output .ambox-move{border-left:10px solid #9932cc}.mw-parser-output .ambox-protection{border-left:10px solid #a2a9b1}.mw-parser-output .ambox .mbox-text{border:none;padding:0.25em 0.5em;width:100%}.mw-parser-output .ambox .mbox-image{border:none;padding:2px 0 2px 0.5em;text-align:center}.mw-parser-output .ambox .mbox-imageright{border:none;padding:2px 0.5em 2px 0;text-align:center}.mw-parser-output .ambox .mbox-empty-cell{border:none;padding:0;width:1px}.mw-parser-output .ambox .mbox-image-div{width:52px}@media(min-width:720px){.mw-parser-output .ambox{margin:0 10%}}@media print{body.ns-0 .mw-parser-output .ambox{display:none!important}}</style><table class="box-Very_long plainlinks metadata ambox ambox-style ambox-very_long" role="presentation"><tbody><tr><td class="mbox-image"><div class="mbox-image-div"><span typeof="mw:File"><span><img alt="" src="//upload.wikimedia.org/wikipedia/en/thumb/f/f2/Edit-clear.svg/40px-Edit-clear.svg.png" decoding="async" width="40" height="40" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/en/thumb/f/f2/Edit-clear.svg/60px-Edit-clear.svg.png 1.5x, //upload.wikimedia.org/wikipedia/en/thumb/f/f2/Edit-clear.svg/80px-Edit-clear.svg.png 2x" data-file-width="48" data-file-height="48" /></span></span></div></td><td class="mbox-text"><div class="mbox-text-span">This article <b>may be <a href="/wiki/Wikipedia:Article_size" title="Wikipedia:Article size">too long</a> to read and navigate comfortably</b>.<span class="hide-when-compact"> Consider <a href="/wiki/Wikipedia:Splitting" title="Wikipedia:Splitting">splitting</a> content into sub-articles, <a href="/wiki/Wikipedia:Summary_style" title="Wikipedia:Summary style">condensing</a> it, or adding <a href="/wiki/Help:Section#Subsections" title="Help:Section">subheadings</a>. Please discuss this issue on the article's <a href="/wiki/Talk:Deepfake" title="Talk:Deepfake">talk page</a>.</span> <span class="date-container"><i>(<span class="date">November 2024</span>)</i></span></div></td></tr></tbody></table> <figure class="mw-default-size" typeof="mw:File/Thumb"><span><video id="mwe_player_0" poster="//upload.wikimedia.org/wikipedia/commons/thumb/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm/260px--Dictators_-_Kim_Jong-Un_by_RepresentUs.webm.jpg" controls="" preload="none" data-mw-tmh="" class="mw-file-element" width="260" height="146" data-durationhint="50" data-mwtitle="Dictators_-_Kim_Jong-Un_by_RepresentUs.webm" data-mwprovider="wikimediacommons" resource="/wiki/File:Dictators_-_Kim_Jong-Un_by_RepresentUs.webm"><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm.480p.vp9.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-transcodekey="480p.vp9.webm" data-width="854" data-height="480" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm.720p.vp9.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-transcodekey="720p.vp9.webm" data-width="1280" data-height="720" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm.1080p.vp9.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-transcodekey="1080p.vp9.webm" data-width="1920" data-height="1080" /><source src="//upload.wikimedia.org/wikipedia/commons/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-width="1920" data-height="1080" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm.144p.mjpeg.mov" type="video/quicktime" data-transcodekey="144p.mjpeg.mov" data-width="256" data-height="144" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm.240p.vp9.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-transcodekey="240p.vp9.webm" data-width="426" data-height="240" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm.360p.vp9.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-transcodekey="360p.vp9.webm" data-width="640" data-height="360" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/9/9c/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm.360p.webm" type="video/webm; codecs=&quot;vp8, vorbis&quot;" data-transcodekey="360p.webm" data-width="640" data-height="360" /><track src="https://commons.wikimedia.org/w/api.php?action=timedtext&amp;title=File%3ADictators_-_Kim_Jong-Un_by_RepresentUs.webm&amp;lang=de-formal&amp;trackformat=vtt&amp;origin=%2A" kind="subtitles" type="text/vtt" srclang="de-x-formal" label="Deutsch (Sie-Form) ‪(de-formal)‬" data-dir="ltr" /><track src="https://commons.wikimedia.org/w/api.php?action=timedtext&amp;title=File%3ADictators_-_Kim_Jong-Un_by_RepresentUs.webm&amp;lang=en&amp;trackformat=vtt&amp;origin=%2A" kind="subtitles" type="text/vtt" srclang="en" label="English ‪(en)‬" data-dir="ltr" /></video></span><figcaption>A video deepfake of <a href="/wiki/Kim_Jong_Un" title="Kim Jong Un">Kim Jong Un</a> created in 2020 by a nonpartisan advocacy group <a href="/wiki/RepresentUs" title="RepresentUs">RepresentUs</a></figcaption></figure> <style data-mw-deduplicate="TemplateStyles:r1129693374">.mw-parser-output .hlist dl,.mw-parser-output .hlist ol,.mw-parser-output .hlist ul{margin:0;padding:0}.mw-parser-output .hlist dd,.mw-parser-output .hlist dt,.mw-parser-output .hlist li{margin:0;display:inline}.mw-parser-output .hlist.inline,.mw-parser-output .hlist.inline dl,.mw-parser-output .hlist.inline ol,.mw-parser-output .hlist.inline ul,.mw-parser-output .hlist dl dl,.mw-parser-output .hlist dl ol,.mw-parser-output .hlist dl ul,.mw-parser-output .hlist ol dl,.mw-parser-output .hlist ol ol,.mw-parser-output .hlist ol ul,.mw-parser-output .hlist ul dl,.mw-parser-output .hlist ul ol,.mw-parser-output .hlist ul ul{display:inline}.mw-parser-output .hlist .mw-empty-li{display:none}.mw-parser-output .hlist dt::after{content:": "}.mw-parser-output .hlist dd::after,.mw-parser-output .hlist li::after{content:" · ";font-weight:bold}.mw-parser-output .hlist dd:last-child::after,.mw-parser-output .hlist dt:last-child::after,.mw-parser-output .hlist li:last-child::after{content:none}.mw-parser-output .hlist dd dd:first-child::before,.mw-parser-output .hlist dd dt:first-child::before,.mw-parser-output .hlist dd li:first-child::before,.mw-parser-output .hlist dt dd:first-child::before,.mw-parser-output .hlist dt dt:first-child::before,.mw-parser-output .hlist dt li:first-child::before,.mw-parser-output .hlist li dd:first-child::before,.mw-parser-output .hlist li dt:first-child::before,.mw-parser-output .hlist li li:first-child::before{content:" (";font-weight:normal}.mw-parser-output .hlist dd dd:last-child::after,.mw-parser-output .hlist dd dt:last-child::after,.mw-parser-output .hlist dd li:last-child::after,.mw-parser-output .hlist dt dd:last-child::after,.mw-parser-output .hlist dt dt:last-child::after,.mw-parser-output .hlist dt li:last-child::after,.mw-parser-output .hlist li dd:last-child::after,.mw-parser-output .hlist li dt:last-child::after,.mw-parser-output .hlist li li:last-child::after{content:")";font-weight:normal}.mw-parser-output .hlist ol{counter-reset:listitem}.mw-parser-output .hlist ol>li{counter-increment:listitem}.mw-parser-output .hlist ol>li::before{content:" "counter(listitem)"\a0 "}.mw-parser-output .hlist dd ol>li:first-child::before,.mw-parser-output .hlist dt ol>li:first-child::before,.mw-parser-output .hlist li ol>li:first-child::before{content:" ("counter(listitem)"\a0 "}</style><style data-mw-deduplicate="TemplateStyles:r1246091330">.mw-parser-output .sidebar{width:22em;float:right;clear:right;margin:0.5em 0 1em 1em;background:var(--background-color-neutral-subtle,#f8f9fa);border:1px solid var(--border-color-base,#a2a9b1);padding:0.2em;text-align:center;line-height:1.4em;font-size:88%;border-collapse:collapse;display:table}body.skin-minerva .mw-parser-output .sidebar{display:table!important;float:right!important;margin:0.5em 0 1em 1em!important}.mw-parser-output .sidebar-subgroup{width:100%;margin:0;border-spacing:0}.mw-parser-output .sidebar-left{float:left;clear:left;margin:0.5em 1em 1em 0}.mw-parser-output .sidebar-none{float:none;clear:both;margin:0.5em 1em 1em 0}.mw-parser-output .sidebar-outer-title{padding:0 0.4em 0.2em;font-size:125%;line-height:1.2em;font-weight:bold}.mw-parser-output .sidebar-top-image{padding:0.4em}.mw-parser-output .sidebar-top-caption,.mw-parser-output .sidebar-pretitle-with-top-image,.mw-parser-output .sidebar-caption{padding:0.2em 0.4em 0;line-height:1.2em}.mw-parser-output .sidebar-pretitle{padding:0.4em 0.4em 0;line-height:1.2em}.mw-parser-output .sidebar-title,.mw-parser-output .sidebar-title-with-pretitle{padding:0.2em 0.8em;font-size:145%;line-height:1.2em}.mw-parser-output .sidebar-title-with-pretitle{padding:0.1em 0.4em}.mw-parser-output .sidebar-image{padding:0.2em 0.4em 0.4em}.mw-parser-output .sidebar-heading{padding:0.1em 0.4em}.mw-parser-output .sidebar-content{padding:0 0.5em 0.4em}.mw-parser-output .sidebar-content-with-subgroup{padding:0.1em 0.4em 0.2em}.mw-parser-output .sidebar-above,.mw-parser-output .sidebar-below{padding:0.3em 0.8em;font-weight:bold}.mw-parser-output .sidebar-collapse .sidebar-above,.mw-parser-output .sidebar-collapse .sidebar-below{border-top:1px solid #aaa;border-bottom:1px solid #aaa}.mw-parser-output .sidebar-navbar{text-align:right;font-size:115%;padding:0 0.4em 0.4em}.mw-parser-output .sidebar-list-title{padding:0 0.4em;text-align:left;font-weight:bold;line-height:1.6em;font-size:105%}.mw-parser-output .sidebar-list-title-c{padding:0 0.4em;text-align:center;margin:0 3.3em}@media(max-width:640px){body.mediawiki .mw-parser-output .sidebar{width:100%!important;clear:both;float:none!important;margin-left:0!important;margin-right:0!important}}body.skin--responsive .mw-parser-output .sidebar a>img{max-width:none!important}@media screen{html.skin-theme-clientpref-night .mw-parser-output .sidebar:not(.notheme) .sidebar-list-title,html.skin-theme-clientpref-night .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle{background:transparent!important}html.skin-theme-clientpref-night .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle a{color:var(--color-progressive)!important}}@media screen and (prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .sidebar:not(.notheme) .sidebar-list-title,html.skin-theme-clientpref-os .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle{background:transparent!important}html.skin-theme-clientpref-os .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle a{color:var(--color-progressive)!important}}@media print{body.ns-0 .mw-parser-output .sidebar{display:none!important}}</style><table class="sidebar sidebar-collapse nomobile nowraplinks hlist"><tbody><tr><td class="sidebar-pretitle">Part of a series on</td></tr><tr><th class="sidebar-title-with-pretitle"><a href="/wiki/Artificial_intelligence" title="Artificial intelligence">Artificial intelligence</a></th></tr><tr><td class="sidebar-image"><figure class="mw-halign-center" typeof="mw:File"><a href="/wiki/File:Dall-e_3_(jan_%2724)_artificial_intelligence_icon.png" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/6/64/Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png/100px-Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png" decoding="async" width="100" height="100" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/6/64/Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png/150px-Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/6/64/Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png/200px-Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png 2x" data-file-width="820" data-file-height="820" /></a><figcaption></figcaption></figure></td></tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/Artificial_intelligence#Goals" title="Artificial intelligence">Major goals</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Artificial_general_intelligence" title="Artificial general intelligence">Artificial general intelligence</a></li> <li><a href="/wiki/Intelligent_agent" title="Intelligent agent">Intelligent agent</a></li> <li><a href="/wiki/Recursive_self-improvement" title="Recursive self-improvement">Recursive self-improvement</a></li> <li><a href="/wiki/Automated_planning_and_scheduling" title="Automated planning and scheduling">Planning</a></li> <li><a href="/wiki/Computer_vision" title="Computer vision">Computer vision</a></li> <li><a href="/wiki/General_game_playing" title="General game playing">General game playing</a></li> <li><a href="/wiki/Knowledge_representation_and_reasoning" title="Knowledge representation and reasoning">Knowledge reasoning</a></li> <li><a href="/wiki/Natural_language_processing" title="Natural language processing">Natural language processing</a></li> <li><a href="/wiki/Robotics" title="Robotics">Robotics</a></li> <li><a href="/wiki/AI_safety" title="AI safety">AI safety</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)">Approaches</div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Machine_learning" title="Machine learning">Machine learning</a></li> <li><a href="/wiki/Symbolic_artificial_intelligence" title="Symbolic artificial intelligence">Symbolic</a></li> <li><a href="/wiki/Deep_learning" title="Deep learning">Deep learning</a></li> <li><a href="/wiki/Bayesian_network" title="Bayesian network">Bayesian networks</a></li> <li><a href="/wiki/Evolutionary_algorithm" title="Evolutionary algorithm">Evolutionary algorithms</a></li> <li><a href="/wiki/Hybrid_intelligent_system" title="Hybrid intelligent system">Hybrid intelligent systems</a></li> <li><a href="/wiki/Artificial_intelligence_systems_integration" title="Artificial intelligence systems integration">Systems integration</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/Applications_of_artificial_intelligence" title="Applications of artificial intelligence">Applications</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Machine_learning_in_bioinformatics" title="Machine learning in bioinformatics">Bioinformatics</a></li> <li><a class="mw-selflink selflink">Deepfake</a></li> <li><a href="/wiki/Machine_learning_in_earth_sciences" title="Machine learning in earth sciences">Earth sciences</a></li> <li><a href="/wiki/Applications_of_artificial_intelligence#Finance" title="Applications of artificial intelligence"> Finance </a></li> <li><a href="/wiki/Generative_artificial_intelligence" title="Generative artificial intelligence">Generative AI</a> <ul><li><a href="/wiki/Artificial_intelligence_art" title="Artificial intelligence art">Art</a></li> <li><a href="/wiki/Generative_audio" title="Generative audio">Audio</a></li> <li><a href="/wiki/Music_and_artificial_intelligence" title="Music and artificial intelligence">Music</a></li></ul></li> <li><a href="/wiki/Artificial_intelligence_in_government" title="Artificial intelligence in government">Government</a></li> <li><a href="/wiki/Artificial_intelligence_in_healthcare" title="Artificial intelligence in healthcare">Healthcare</a> <ul><li><a href="/wiki/Artificial_intelligence_in_mental_health" title="Artificial intelligence in mental health">Mental health</a></li></ul></li> <li><a href="/wiki/Artificial_intelligence_in_industry" title="Artificial intelligence in industry">Industry</a></li> <li><a href="/wiki/Machine_translation" title="Machine translation">Translation</a></li> <li><a href="/wiki/Artificial_intelligence_arms_race" title="Artificial intelligence arms race"> Military </a></li> <li><a href="/wiki/Machine_learning_in_physics" title="Machine learning in physics">Physics</a></li> <li><a href="/wiki/List_of_artificial_intelligence_projects" title="List of artificial intelligence projects">Projects</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/Philosophy_of_artificial_intelligence" title="Philosophy of artificial intelligence">Philosophy</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Artificial_consciousness" title="Artificial consciousness">Artificial consciousness</a></li> <li><a href="/wiki/Chinese_room" title="Chinese room">Chinese room</a></li> <li><a href="/wiki/Friendly_artificial_intelligence" title="Friendly artificial intelligence">Friendly AI</a></li> <li><a href="/wiki/AI_control_problem" class="mw-redirect" title="AI control problem">Control problem</a>/<a href="/wiki/AI_takeover" title="AI takeover">Takeover</a></li> <li><a href="/wiki/Ethics_of_artificial_intelligence" title="Ethics of artificial intelligence">Ethics</a></li> <li><a href="/wiki/Existential_risk_from_artificial_general_intelligence" class="mw-redirect" title="Existential risk from artificial general intelligence">Existential risk</a></li> <li><a href="/wiki/Regulation_of_artificial_intelligence" title="Regulation of artificial intelligence">Regulation</a></li> <li><a href="/wiki/Turing_test" title="Turing test">Turing test</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/History_of_artificial_intelligence" title="History of artificial intelligence">History</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Timeline_of_artificial_intelligence" title="Timeline of artificial intelligence">Timeline</a></li> <li><a href="/wiki/Progress_in_artificial_intelligence" title="Progress in artificial intelligence">Progress</a></li> <li><a href="/wiki/AI_winter" title="AI winter">AI winter</a></li> <li><a href="/wiki/AI_boom" title="AI boom">AI boom</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)">Glossary</div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Glossary_of_artificial_intelligence" title="Glossary of artificial intelligence">Glossary</a></li></ul></div></div></td> </tr><tr><td class="sidebar-navbar"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><style data-mw-deduplicate="TemplateStyles:r1239400231">.mw-parser-output .navbar{display:inline;font-size:88%;font-weight:normal}.mw-parser-output .navbar-collapse{float:left;text-align:left}.mw-parser-output .navbar-boxtext{word-spacing:0}.mw-parser-output .navbar ul{display:inline-block;white-space:nowrap;line-height:inherit}.mw-parser-output .navbar-brackets::before{margin-right:-0.125em;content:"[ "}.mw-parser-output .navbar-brackets::after{margin-left:-0.125em;content:" ]"}.mw-parser-output .navbar li{word-spacing:-0.125em}.mw-parser-output .navbar a>span,.mw-parser-output .navbar a>abbr{text-decoration:inherit}.mw-parser-output .navbar-mini abbr{font-variant:small-caps;border-bottom:none;text-decoration:none;cursor:inherit}.mw-parser-output .navbar-ct-full{font-size:114%;margin:0 7em}.mw-parser-output .navbar-ct-mini{font-size:114%;margin:0 4em}html.skin-theme-clientpref-night .mw-parser-output .navbar li a abbr{color:var(--color-base)!important}@media(prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .navbar li a abbr{color:var(--color-base)!important}}@media print{.mw-parser-output .navbar{display:none!important}}</style><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Artificial_intelligence" title="Template:Artificial intelligence"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Artificial_intelligence" title="Template talk:Artificial intelligence"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Artificial_intelligence" title="Special:EditPage/Template:Artificial intelligence"><abbr title="Edit this template">e</abbr></a></li></ul></div></td></tr></tbody></table> <p><i><b>Deepfakes</b></i> (a <a href="/wiki/Portmanteau" class="mw-redirect" title="Portmanteau">portmanteau</a> of <span class="gloss-quot">'</span><span class="gloss-text"><a href="/wiki/Deep_learning" title="Deep learning">deep learning</a></span><span class="gloss-quot">'</span> and <span class="gloss-quot">'</span><span class="gloss-text">fake</span><span class="gloss-quot">'</span><sup id="cite_ref-FoxNews2018_1-0" class="reference"><a href="#cite_note-FoxNews2018-1"><span class="cite-bracket">&#91;</span>1<span class="cite-bracket">&#93;</span></a></sup>) are images, videos, or audio which are edited or generated <a href="/wiki/Generative_artificial_intelligence" title="Generative artificial intelligence">using artificial intelligence</a> tools, and which may depict real or non-existent people. They are a type of <a href="/wiki/Synthetic_media" title="Synthetic media">synthetic media</a><sup id="cite_ref-2" class="reference"><a href="#cite_note-2"><span class="cite-bracket">&#91;</span>2<span class="cite-bracket">&#93;</span></a></sup> and modern form of a <a href="/wiki/Media_prank" title="Media prank">Media prank</a>. </p><p>While the act of creating fake content is not new, deepfakes uniquely leverage the technological tools and techniques of <a href="/wiki/Machine_learning" title="Machine learning">machine learning</a> and <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">artificial intelligence</a>,<sup id="cite_ref-3" class="reference"><a href="#cite_note-3"><span class="cite-bracket">&#91;</span>3<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Kietzmann-2020_4-0" class="reference"><a href="#cite_note-Kietzmann-2020-4"><span class="cite-bracket">&#91;</span>4<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Waldrop_5-0" class="reference"><a href="#cite_note-Waldrop-5"><span class="cite-bracket">&#91;</span>5<span class="cite-bracket">&#93;</span></a></sup> including <a href="/wiki/Facial_recognition_system" title="Facial recognition system">facial recognition</a> algorithms and artificial <a href="/wiki/Neural_network_(machine_learning)" title="Neural network (machine learning)">neural networks</a> such as <a href="/wiki/Variational_autoencoder" title="Variational autoencoder">variational autoencoders</a> (VAEs) and <a href="/wiki/Generative_adversarial_network" title="Generative adversarial network">generative adversarial networks</a> (GANs).<sup id="cite_ref-Kietzmann-2020_4-1" class="reference"><a href="#cite_note-Kietzmann-2020-4"><span class="cite-bracket">&#91;</span>4<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Schwartz_6-0" class="reference"><a href="#cite_note-Schwartz-6"><span class="cite-bracket">&#91;</span>6<span class="cite-bracket">&#93;</span></a></sup> In turn the field of image forensics develops techniques to <a href="/wiki/Artificial_intelligence_content_detection" title="Artificial intelligence content detection">detect manipulated images</a>.<sup id="cite_ref-Farid_7-0" class="reference"><a href="#cite_note-Farid-7"><span class="cite-bracket">&#91;</span>7<span class="cite-bracket">&#93;</span></a></sup> Deepfakes have garnered widespread attention for their potential use in creating <a href="/wiki/Child_sexual_abuse" title="Child sexual abuse">child sexual abuse</a> material, <a href="/wiki/Celebrity_sex_tape" title="Celebrity sex tape">celebrity pornographic videos</a>, <a href="/wiki/Revenge_porn" title="Revenge porn">revenge porn</a>, <a href="/wiki/Fake_news" title="Fake news">fake news</a>, <a href="/wiki/Hoax" title="Hoax">hoaxes</a>, <a href="/wiki/Bullying" title="Bullying">bullying</a>, and <a href="/wiki/Accounting_scandals" title="Accounting scandals">financial fraud</a>.<sup id="cite_ref-HighSnobiety2018_8-0" class="reference"><a href="#cite_note-HighSnobiety2018-8"><span class="cite-bracket">&#91;</span>8<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-9" class="reference"><a href="#cite_note-9"><span class="cite-bracket">&#91;</span>9<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-10" class="reference"><a href="#cite_note-10"><span class="cite-bracket">&#91;</span>10<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-11" class="reference"><a href="#cite_note-11"><span class="cite-bracket">&#91;</span>11<span class="cite-bracket">&#93;</span></a></sup> </p><p>Academics have raised concerns about the potential for deep fakes to be used to promote disinformation and hate speech, and interfere with elections. The <a href="/wiki/Information_technology" title="Information technology">information technology</a> industry and governments have responded with recommendations to detect and limit their use. </p><p>From traditional <a href="/wiki/Entertainment" title="Entertainment">entertainment</a> to <a href="/wiki/Video_game" title="Video game">gaming</a>, deepfake technology has evolved to be increasingly convincing<sup id="cite_ref-12" class="reference"><a href="#cite_note-12"><span class="cite-bracket">&#91;</span>12<span class="cite-bracket">&#93;</span></a></sup> and available to the public, allowing for the disruption of the entertainment and <a href="/wiki/Media_industry" class="mw-redirect" title="Media industry">media</a> industries.<sup id="cite_ref-13" class="reference"><a href="#cite_note-13"><span class="cite-bracket">&#91;</span>13<span class="cite-bracket">&#93;</span></a></sup> </p> <meta property="mw:PageProp/toc" /> <div class="mw-heading mw-heading2"><h2 id="History">History</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=1" title="Edit section: History"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <figure class="mw-default-size" typeof="mw:File/Thumb"><a href="/wiki/File:53271755473_-_Flickr_-_torstenbehrens.jpg" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/3/30/53271755473_-_Flickr_-_torstenbehrens.jpg/220px-53271755473_-_Flickr_-_torstenbehrens.jpg" decoding="async" width="220" height="220" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/3/30/53271755473_-_Flickr_-_torstenbehrens.jpg/330px-53271755473_-_Flickr_-_torstenbehrens.jpg 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/3/30/53271755473_-_Flickr_-_torstenbehrens.jpg/440px-53271755473_-_Flickr_-_torstenbehrens.jpg 2x" data-file-width="2048" data-file-height="2048" /></a><figcaption>Portrait of actress <a href="/wiki/Sydney_Sweeney" title="Sydney Sweeney">Sydney Sweeney</a> generated by <a href="/wiki/Stable_Diffusion" title="Stable Diffusion">Stable Diffusion</a></figcaption></figure> <p><a href="/wiki/Photo_manipulation" class="mw-redirect" title="Photo manipulation">Photo manipulation</a> was developed in the 19th century and soon applied to motion pictures. Technology steadily improved during the 20th century, and more quickly with the advent of <a href="/wiki/Digital_video" title="Digital video">digital video</a>. </p><p>Deepfake technology has been developed by researchers at academic institutions beginning in the 1990s, and later by amateurs in online communities.<sup id="cite_ref-Harwell-2019_14-0" class="reference"><a href="#cite_note-Harwell-2019-14"><span class="cite-bracket">&#91;</span>14<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-15" class="reference"><a href="#cite_note-15"><span class="cite-bracket">&#91;</span>15<span class="cite-bracket">&#93;</span></a></sup> More recently the methods have been adopted by industry.<sup id="cite_ref-Porter-2019_16-0" class="reference"><a href="#cite_note-Porter-2019-16"><span class="cite-bracket">&#91;</span>16<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Academic_research">Academic research</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=2" title="Edit section: Academic research"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Academic research related to deepfakes is split between the field of <a href="/wiki/Computer_vision" title="Computer vision">computer vision</a>, a sub-field of computer science,<sup id="cite_ref-Harwell-2019_14-1" class="reference"><a href="#cite_note-Harwell-2019-14"><span class="cite-bracket">&#91;</span>14<span class="cite-bracket">&#93;</span></a></sup> which develops techniques for creating and identifying deepfakes, and humanities and social science approaches that study the social, ethical and aesthetic implications of deepfakes. </p> <div class="mw-heading mw-heading4"><h4 id="Social_science_and_humanities_approaches_to_deepfakes">Social science and humanities approaches to deepfakes</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=3" title="Edit section: Social science and humanities approaches to deepfakes"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>In cinema studies, deepfakes demonstrate how "the human face is emerging as a central object of ambivalence in the digital age".<sup id="cite_ref-17" class="reference"><a href="#cite_note-17"><span class="cite-bracket">&#91;</span>17<span class="cite-bracket">&#93;</span></a></sup> Video artists have used deepfakes to "playfully rewrite film history by retrofitting canonical cinema with new star performers".<sup id="cite_ref-Holliday-2021_18-0" class="reference"><a href="#cite_note-Holliday-2021-18"><span class="cite-bracket">&#91;</span>18<span class="cite-bracket">&#93;</span></a></sup> Film scholar Christopher Holliday analyses how switching out the gender and race of performers in familiar movie scenes destabilizes gender classifications and categories.<sup id="cite_ref-Holliday-2021_18-1" class="reference"><a href="#cite_note-Holliday-2021-18"><span class="cite-bracket">&#91;</span>18<span class="cite-bracket">&#93;</span></a></sup> The idea of "<a href="/wiki/Queering" title="Queering">queering</a>" deepfakes is also discussed in Oliver M. Gingrich's discussion of media artworks that use deepfakes to reframe gender,<sup id="cite_ref-19" class="reference"><a href="#cite_note-19"><span class="cite-bracket">&#91;</span>19<span class="cite-bracket">&#93;</span></a></sup> including British artist <a href="/wiki/Jake_Elwes" title="Jake Elwes">Jake Elwes'</a> <i>Zizi: Queering the Dataset</i>, an artwork that uses deepfakes of drag queens to intentionally play with gender. The aesthetic potentials of deepfakes are also beginning to be explored. Theatre historian John Fletcher notes that early demonstrations of deepfakes are presented as performances, and situates these in the context of theater, discussing "some of the more troubling paradigm shifts" that deepfakes represent as a performance genre.<sup id="cite_ref-20" class="reference"><a href="#cite_note-20"><span class="cite-bracket">&#91;</span>20<span class="cite-bracket">&#93;</span></a></sup> </p><p>Philosophers and media scholars have discussed the ethics of deepfakes especially in relation to pornography.<sup id="cite_ref-21" class="reference"><a href="#cite_note-21"><span class="cite-bracket">&#91;</span>21<span class="cite-bracket">&#93;</span></a></sup> Media scholar Emily van der Nagel draws upon research in photography studies on manipulated images to discuss verification systems, that allow women to consent to uses of their images.<sup id="cite_ref-22" class="reference"><a href="#cite_note-22"><span class="cite-bracket">&#91;</span>22<span class="cite-bracket">&#93;</span></a></sup> </p><p>Beyond pornography, deepfakes have been framed by philosophers as an "epistemic threat" to knowledge and thus to society.<sup id="cite_ref-23" class="reference"><a href="#cite_note-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup> There are several other suggestions for how to deal with the risks deepfakes give rise beyond pornography, but also to corporations, politicians and others, of "exploitation, intimidation, and personal sabotage",<sup id="cite_ref-24" class="reference"><a href="#cite_note-24"><span class="cite-bracket">&#91;</span>24<span class="cite-bracket">&#93;</span></a></sup> and there are several scholarly discussions of potential legal and regulatory responses both in legal studies and media studies.<sup id="cite_ref-25" class="reference"><a href="#cite_note-25"><span class="cite-bracket">&#91;</span>25<span class="cite-bracket">&#93;</span></a></sup> In psychology and media studies, scholars discuss the effects of <a href="/wiki/Disinformation" title="Disinformation">disinformation</a> that uses deepfakes,<sup id="cite_ref-26" class="reference"><a href="#cite_note-26"><span class="cite-bracket">&#91;</span>26<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-27" class="reference"><a href="#cite_note-27"><span class="cite-bracket">&#91;</span>27<span class="cite-bracket">&#93;</span></a></sup> and the social impact of deepfakes.<sup id="cite_ref-28" class="reference"><a href="#cite_note-28"><span class="cite-bracket">&#91;</span>28<span class="cite-bracket">&#93;</span></a></sup> </p><p>While most English-language academic studies of deepfakes focus on the Western anxieties about disinformation and pornography, digital anthropologist Gabriele de Seta has analyzed the Chinese reception of deepfakes, which are known as <i>huanlian</i>, which translates to "changing faces". The Chinese term does not contain the "fake" of the English deepfake, and de Seta argues that this cultural context may explain why the Chinese response has been more about practical regulatory responses to "fraud risks, image rights, economic profit, and ethical imbalances".<sup id="cite_ref-29" class="reference"><a href="#cite_note-29"><span class="cite-bracket">&#91;</span>29<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading4"><h4 id="Computer_science_research_on_deepfakes">Computer science research on deepfakes</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=4" title="Edit section: Computer science research on deepfakes"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>An early landmark project was the Video Rewrite program, published in 1997. The program modified existing video footage of a person speaking to depict that person mouthing the words contained in a different audio track.<sup id="cite_ref-Bregler-1997_30-0" class="reference"><a href="#cite_note-Bregler-1997-30"><span class="cite-bracket">&#91;</span>30<span class="cite-bracket">&#93;</span></a></sup> It was the first system to fully automate this kind of facial reanimation, and it did so using machine learning techniques to make connections between the sounds produced by a video's subject and the shape of the subject's face.<sup id="cite_ref-Bregler-1997_30-1" class="reference"><a href="#cite_note-Bregler-1997-30"><span class="cite-bracket">&#91;</span>30<span class="cite-bracket">&#93;</span></a></sup> </p><p>Contemporary academic projects have focused on creating more realistic videos and on improving techniques.<sup id="cite_ref-Suwajanakorn-2017_31-0" class="reference"><a href="#cite_note-Suwajanakorn-2017-31"><span class="cite-bracket">&#91;</span>31<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Thies-2016_32-0" class="reference"><a href="#cite_note-Thies-2016-32"><span class="cite-bracket">&#91;</span>32<span class="cite-bracket">&#93;</span></a></sup> The "Synthesizing Obama" program, published in 2017, modifies video footage of former president <a href="/wiki/Barack_Obama" title="Barack Obama">Barack Obama</a> to depict him mouthing the words contained in a separate audio track.<sup id="cite_ref-Suwajanakorn-2017_31-1" class="reference"><a href="#cite_note-Suwajanakorn-2017-31"><span class="cite-bracket">&#91;</span>31<span class="cite-bracket">&#93;</span></a></sup> The project lists as a main research contribution its <a href="/wiki/Photorealistic" class="mw-redirect" title="Photorealistic">photorealistic</a> technique for synthesizing mouth shapes from audio.<sup id="cite_ref-Suwajanakorn-2017_31-2" class="reference"><a href="#cite_note-Suwajanakorn-2017-31"><span class="cite-bracket">&#91;</span>31<span class="cite-bracket">&#93;</span></a></sup> The Face2Face program, published in 2016, modifies video footage of a person's face to depict them mimicking the facial expressions of another person in real time.<sup id="cite_ref-Thies-2016_32-1" class="reference"><a href="#cite_note-Thies-2016-32"><span class="cite-bracket">&#91;</span>32<span class="cite-bracket">&#93;</span></a></sup> The project lists as a main research contribution the first method for re-enacting facial expressions in real time using a camera that does not capture depth, making it possible for the technique to be performed using common consumer cameras.<sup id="cite_ref-Thies-2016_32-2" class="reference"><a href="#cite_note-Thies-2016-32"><span class="cite-bracket">&#91;</span>32<span class="cite-bracket">&#93;</span></a></sup> </p><p>In August 2018, researchers at the <a href="/wiki/University_of_California,_Berkeley" title="University of California, Berkeley">University of California, Berkeley</a> published a paper introducing a fake dancing app that can create the impression of masterful dancing ability using AI.<sup id="cite_ref-The_Verge-2019_33-0" class="reference"><a href="#cite_note-The_Verge-2019-33"><span class="cite-bracket">&#91;</span>33<span class="cite-bracket">&#93;</span></a></sup> This project expands the application of deepfakes to the entire body; previous works focused on the head or parts of the face.<sup id="cite_ref-Farquhar-2018_34-0" class="reference"><a href="#cite_note-Farquhar-2018-34"><span class="cite-bracket">&#91;</span>34<span class="cite-bracket">&#93;</span></a></sup> </p><p>Researchers have also shown that deepfakes are expanding into other domains such as tampering with medical imagery.<sup id="cite_ref-35" class="reference"><a href="#cite_note-35"><span class="cite-bracket">&#91;</span>35<span class="cite-bracket">&#93;</span></a></sup> In this work, it was shown how an attacker can automatically inject or remove lung cancer in a patient's <a href="/wiki/Optical_coherence_tomography" title="Optical coherence tomography">3D CT scan</a>. The result was so convincing that it fooled three radiologists and a state-of-the-art lung cancer detection AI. To demonstrate the threat, the authors successfully performed the attack on a hospital in a <a href="/wiki/White_hat_(computer_security)" title="White hat (computer security)">White hat penetration test</a>.<sup id="cite_ref-36" class="reference"><a href="#cite_note-36"><span class="cite-bracket">&#91;</span>36<span class="cite-bracket">&#93;</span></a></sup> </p><p>A survey of deepfakes, published in May 2020, provides a timeline of how the creation and detection deepfakes have advanced over the last few years.<sup id="cite_ref-37" class="reference"><a href="#cite_note-37"><span class="cite-bracket">&#91;</span>37<span class="cite-bracket">&#93;</span></a></sup> The survey identifies that researchers have been focusing on resolving the following challenges of deepfake creation: </p> <ul><li>Generalization. High-quality deepfakes are often achieved by training on hours of footage of the target. This challenge is to minimize the amount of training data and the time to train the model required to produce quality images and to enable the execution of trained models on <i>new</i> identities (unseen during training).</li> <li>Paired Training. Training a supervised model can produce high-quality results, but requires data pairing. This is the process of finding examples of inputs and their desired outputs for the model to learn from. Data pairing is laborious and impractical when training on multiple identities and facial behaviors. Some solutions include self-supervised training (using frames from the same video), the use of unpaired networks such as Cycle-GAN, or the manipulation of network embeddings.</li> <li>Identity leakage. This is where the identity of the driver (i.e., the actor controlling the face in a reenactment) is partially transferred to the generated face. Some solutions proposed include attention mechanisms, few-shot learning, disentanglement, boundary conversions, and skip connections.</li> <li>Occlusions. When part of the face is obstructed with a hand, hair, glasses, or any other item then artifacts can occur. A common occlusion is a closed mouth which hides the inside of the mouth and the teeth. Some solutions include image segmentation during training and in-painting.</li> <li>Temporal coherence. In videos containing deepfakes, artifacts such as flickering and jitter can occur because the network has no context of the preceding frames. Some researchers provide this context or use novel temporal coherence losses to help improve realism. As the technology improves, the interference is diminishing.</li></ul> <p>Overall, deepfakes are expected to have several implications in media and society, media production, media representations, media audiences, gender, law, and regulation, and politics.<sup id="cite_ref-38" class="reference"><a href="#cite_note-38"><span class="cite-bracket">&#91;</span>38<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Amateur_development">Amateur development</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=5" title="Edit section: Amateur development"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>The term deepfakes originated around the end of 2017 from a <a href="/wiki/Reddit" title="Reddit">Reddit</a> user named "deepfakes".<sup id="cite_ref-Cole-2018b_39-0" class="reference"><a href="#cite_note-Cole-2018b-39"><span class="cite-bracket">&#91;</span>39<span class="cite-bracket">&#93;</span></a></sup> He, as well as others in the Reddit community r/deepfakes, shared deepfakes they created; many videos involved celebrities' faces swapped onto the bodies of actors in pornographic videos,<sup id="cite_ref-Cole-2018b_39-1" class="reference"><a href="#cite_note-Cole-2018b-39"><span class="cite-bracket">&#91;</span>39<span class="cite-bracket">&#93;</span></a></sup> while non-pornographic content included many videos with actor <a href="/wiki/Nicolas_Cage" title="Nicolas Cage">Nicolas Cage</a>'s face swapped into various movies.<sup id="cite_ref-40" class="reference"><a href="#cite_note-40"><span class="cite-bracket">&#91;</span>40<span class="cite-bracket">&#93;</span></a></sup> </p><p>Other online communities remain, including Reddit communities that do not share pornography, such as r/SFWdeepfakes (short for "safe for work deepfakes"), in which community members share deepfakes depicting celebrities, politicians, and others in non-pornographic scenarios.<sup id="cite_ref-41" class="reference"><a href="#cite_note-41"><span class="cite-bracket">&#91;</span>41<span class="cite-bracket">&#93;</span></a></sup> Other online communities continue to share pornography on platforms that have not banned deepfake pornography.<sup id="cite_ref-Hathaway-2018_42-0" class="reference"><a href="#cite_note-Hathaway-2018-42"><span class="cite-bracket">&#91;</span>42<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Commercial_development">Commercial development</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=6" title="Edit section: Commercial development"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>In January 2018, a proprietary desktop application called FakeApp was launched.<sup id="cite_ref-43" class="reference"><a href="#cite_note-43"><span class="cite-bracket">&#91;</span>43<span class="cite-bracket">&#93;</span></a></sup> This app allows users to easily create and share videos with their faces swapped with each other.<sup id="cite_ref-44" class="reference"><a href="#cite_note-44"><span class="cite-bracket">&#91;</span>44<span class="cite-bracket">&#93;</span></a></sup> As of 2019, FakeApp has been superseded by open-source alternatives such as Faceswap, command line-based DeepFaceLab, and web-based apps such as DeepfakesWeb.com <sup id="cite_ref-45" class="reference"><a href="#cite_note-45"><span class="cite-bracket">&#91;</span>45<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Faceswap-2019_46-0" class="reference"><a href="#cite_note-Faceswap-2019-46"><span class="cite-bracket">&#91;</span>46<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Github-2022_47-0" class="reference"><a href="#cite_note-Github-2022-47"><span class="cite-bracket">&#91;</span>47<span class="cite-bracket">&#93;</span></a></sup> </p><p>Larger companies started to use deepfakes.<sup id="cite_ref-Porter-2019_16-1" class="reference"><a href="#cite_note-Porter-2019-16"><span class="cite-bracket">&#91;</span>16<span class="cite-bracket">&#93;</span></a></sup> Corporate training videos can be created using deepfaked avatars and their voices, for example <a href="/wiki/Synthesia_(company)" title="Synthesia (company)">Synthesia</a>, which uses deepfake technology with avatars to create personalized videos.<sup id="cite_ref-48" class="reference"><a href="#cite_note-48"><span class="cite-bracket">&#91;</span>48<span class="cite-bracket">&#93;</span></a></sup> The mobile app <a href="/wiki/Momo_(software)" title="Momo (software)">Momo</a> created the application Zao which allows users to superimpose their face on television and movie clips with a single picture.<sup id="cite_ref-Porter-2019_16-2" class="reference"><a href="#cite_note-Porter-2019-16"><span class="cite-bracket">&#91;</span>16<span class="cite-bracket">&#93;</span></a></sup> As of 2019 the Japanese AI company DataGrid made a full body deepfake that could create a person from scratch.<sup id="cite_ref-49" class="reference"><a href="#cite_note-49"><span class="cite-bracket">&#91;</span>49<span class="cite-bracket">&#93;</span></a></sup> </p><p>As of 2020 <a href="/wiki/Audio_deepfake" title="Audio deepfake">audio deepfakes</a>, and AI software capable of detecting deepfakes and <a href="/wiki/Digital_cloning" title="Digital cloning">cloning human voices</a> after 5 seconds of listening time also exist.<sup id="cite_ref-50" class="reference"><a href="#cite_note-50"><span class="cite-bracket">&#91;</span>50<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-51" class="reference"><a href="#cite_note-51"><span class="cite-bracket">&#91;</span>51<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-52" class="reference"><a href="#cite_note-52"><span class="cite-bracket">&#91;</span>52<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-53" class="reference"><a href="#cite_note-53"><span class="cite-bracket">&#91;</span>53<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-54" class="reference"><a href="#cite_note-54"><span class="cite-bracket">&#91;</span>54<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-55" class="reference"><a href="#cite_note-55"><span class="cite-bracket">&#91;</span>55<span class="cite-bracket">&#93;</span></a></sup> A mobile deepfake app, Impressions, was launched in March 2020. It was the first app for the creation of celebrity deepfake videos from mobile phones.<sup id="cite_ref-56" class="reference"><a href="#cite_note-56"><span class="cite-bracket">&#91;</span>56<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-57" class="reference"><a href="#cite_note-57"><span class="cite-bracket">&#91;</span>57<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Resurrection">Resurrection</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=7" title="Edit section: Resurrection"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Deepfake technology's ability to fabricate messages and actions of others can include deceased individuals. On 29 October 2020, <a href="/wiki/Kim_Kardashian" title="Kim Kardashian">Kim Kardashian</a> posted a video featuring a <a href="/wiki/Holography" title="Holography">hologram</a> of her late father <a href="/wiki/Robert_Kardashian" title="Robert Kardashian">Robert Kardashian</a> created by the company Kaleida, which used a combination of performance, motion tracking, SFX, VFX and <a href="/wiki/Deepfake_pornography" title="Deepfake pornography">DeepFake</a> technologies to create the illusion.<sup id="cite_ref-58" class="reference"><a href="#cite_note-58"><span class="cite-bracket">&#91;</span>58<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-59" class="reference"><a href="#cite_note-59"><span class="cite-bracket">&#91;</span>59<span class="cite-bracket">&#93;</span></a></sup> </p><p>In 2020, a deepfake video of Joaquin Oliver, a victim of the <a href="/wiki/Stoneman_Douglas_High_School_shooting" class="mw-redirect" title="Stoneman Douglas High School shooting">Parkland shooting</a> was created as part of a gun safety campaign. Oliver's parents partnered with nonprofit Change the Ref and McCann Health to produce a video in which Oliver to encourage people to support gun safety legislation and politicians who back do so as well.<sup id="cite_ref-60" class="reference"><a href="#cite_note-60"><span class="cite-bracket">&#91;</span>60<span class="cite-bracket">&#93;</span></a></sup> </p><p>In 2022, a deepfake video of <a href="/wiki/Elvis_Presley" title="Elvis Presley">Elvis Presley</a> was used on the program <i><a href="/wiki/America%27s_Got_Talent_season_17" title="America&#39;s Got Talent season 17">America's Got Talent 17</a></i>.<sup id="cite_ref-61" class="reference"><a href="#cite_note-61"><span class="cite-bracket">&#91;</span>61<span class="cite-bracket">&#93;</span></a></sup> </p><p>A TV commercial used a deepfake video of <a href="/wiki/The_Beatles" title="The Beatles">Beatles</a> member <a href="/wiki/John_Lennon" title="John Lennon">John Lennon</a>, who was murdered in 1980.<sup id="cite_ref-62" class="reference"><a href="#cite_note-62"><span class="cite-bracket">&#91;</span>62<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Techniques">Techniques</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=8" title="Edit section: Techniques"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Deepfakes rely on a type of <a href="/wiki/Artificial_neural_network" class="mw-redirect" title="Artificial neural network">neural network</a> called an <a href="/wiki/Autoencoder" title="Autoencoder">autoencoder</a>.<sup id="cite_ref-63" class="reference"><a href="#cite_note-63"><span class="cite-bracket">&#91;</span>63<span class="cite-bracket">&#93;</span></a></sup> These consist of an encoder, which reduces an image to a lower dimensional <a href="/wiki/Latent_space" title="Latent space">latent space</a>, and a decoder, which reconstructs the image from the latent representation.<sup id="cite_ref-64" class="reference"><a href="#cite_note-64"><span class="cite-bracket">&#91;</span>64<span class="cite-bracket">&#93;</span></a></sup> Deepfakes utilize this architecture by having a universal encoder which encodes a person in to the latent space.<sup class="noprint Inline-Template Template-Fact" style="white-space:nowrap;">&#91;<i><a href="/wiki/Wikipedia:Citation_needed" title="Wikipedia:Citation needed"><span title="This claim needs references to reliable sources. (June 2024)">citation needed</span></a></i>&#93;</sup> The latent representation contains key features about their facial features and body posture. This can then be decoded with a model trained specifically for the target. This means the target's detailed information will be superimposed on the underlying facial and body features of the original video, represented in the latent space.<sup class="noprint Inline-Template Template-Fact" style="white-space:nowrap;">&#91;<i><a href="/wiki/Wikipedia:Citation_needed" title="Wikipedia:Citation needed"><span title="This claim needs references to reliable sources. (June 2024)">citation needed</span></a></i>&#93;</sup> </p><p>A popular upgrade to this architecture attaches a <a href="/wiki/Generative_adversarial_network" title="Generative adversarial network">generative adversarial network</a> to the decoder. A <a href="/wiki/General_adversarial_network" class="mw-redirect" title="General adversarial network">GAN</a> trains a generator, in this case the decoder, and a discriminator in an adversarial relationship. The generator creates new images from the latent representation of the source material, while the discriminator attempts to determine whether or not the image is generated.<sup class="noprint Inline-Template Template-Fact" style="white-space:nowrap;">&#91;<i><a href="/wiki/Wikipedia:Citation_needed" title="Wikipedia:Citation needed"><span title="This claim needs references to reliable sources. (June 2024)">citation needed</span></a></i>&#93;</sup> This causes the generator to create images that mimic reality extremely well as any defects would be caught by the discriminator.<sup id="cite_ref-Wired-1059_65-0" class="reference"><a href="#cite_note-Wired-1059-65"><span class="cite-bracket">&#91;</span>65<span class="cite-bracket">&#93;</span></a></sup> Both algorithms improve constantly in a <a href="/wiki/Zero-sum_game" title="Zero-sum game">zero sum game</a>. This makes deepfakes difficult to combat as they are constantly evolving; any time a defect is determined, it can be corrected.<sup id="cite_ref-Wired-1059_65-1" class="reference"><a href="#cite_note-Wired-1059-65"><span class="cite-bracket">&#91;</span>65<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Applications">Applications</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=9" title="Edit section: Applications"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <div class="mw-heading mw-heading3"><h3 id="Acting">Acting</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=10" title="Edit section: Acting"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p><a href="/wiki/Digital_clone" class="mw-redirect" title="Digital clone">Digital clones</a> of professional actors have appeared in <a href="/wiki/Film" title="Film">films</a> before, and progress in deepfake technology is expected to further the accessibility and effectiveness of such clones.<sup id="cite_ref-66" class="reference"><a href="#cite_note-66"><span class="cite-bracket">&#91;</span>66<span class="cite-bracket">&#93;</span></a></sup> The use of AI technology was a major issue in the <a href="/wiki/2023_SAG-AFTRA_strike" title="2023 SAG-AFTRA strike">2023 SAG-AFTRA strike</a>, as new techniques enabled the capability of generating and storing a digital likeness to use in place of actors.<sup id="cite_ref-67" class="reference"><a href="#cite_note-67"><span class="cite-bracket">&#91;</span>67<span class="cite-bracket">&#93;</span></a></sup> </p><p><a href="/wiki/Disney" class="mw-redirect" title="Disney">Disney</a> has improved their visual effects using high-resolution deepfake face swapping technology.<sup id="cite_ref-High-2020_68-0" class="reference"><a href="#cite_note-High-2020-68"><span class="cite-bracket">&#91;</span>68<span class="cite-bracket">&#93;</span></a></sup> Disney improved their technology through progressive training programmed to identify facial expressions, implementing a face-swapping feature, and iterating in order to stabilize and refine the output.<sup id="cite_ref-High-2020_68-1" class="reference"><a href="#cite_note-High-2020-68"><span class="cite-bracket">&#91;</span>68<span class="cite-bracket">&#93;</span></a></sup> This high-resolution deepfake technology saves significant operational and production costs.<sup id="cite_ref-Naruniec-2020_69-0" class="reference"><a href="#cite_note-Naruniec-2020-69"><span class="cite-bracket">&#91;</span>69<span class="cite-bracket">&#93;</span></a></sup> Disney's deepfake generation model can produce AI-generated media at a 1024 x 1024 resolution, as opposed to common models that produce media at a 256 x 256 resolution.<sup id="cite_ref-Naruniec-2020_69-1" class="reference"><a href="#cite_note-Naruniec-2020-69"><span class="cite-bracket">&#91;</span>69<span class="cite-bracket">&#93;</span></a></sup> The technology allows Disney to <span class="nowrap"><a href="/wiki/De-aging_in_motion_pictures" class="mw-redirect" title="De-aging in motion pictures">de-age</a></span> characters or revive deceased actors.<sup id="cite_ref-70" class="reference"><a href="#cite_note-70"><span class="cite-bracket">&#91;</span>70<span class="cite-bracket">&#93;</span></a></sup> Similar technology was initially used by fans to unofficially insert faces into existing media, such as overlaying <a href="/wiki/Harrison_Ford" title="Harrison Ford">Harrison Ford</a>'s young face onto Han Solo's face in <i><a href="/wiki/Solo:_A_Star_Wars_Story" title="Solo: A Star Wars Story">Solo: A Star Wars Story</a></i>.<sup id="cite_ref-71" class="reference"><a href="#cite_note-71"><span class="cite-bracket">&#91;</span>71<span class="cite-bracket">&#93;</span></a></sup> Disney used deepfakes for the characters of Princess Leia and <a href="/wiki/Grand_Moff_Tarkin" title="Grand Moff Tarkin">Grand Moff Tarkin</a> in <i><a href="/wiki/Rogue_One" title="Rogue One">Rogue One</a>.</i><sup id="cite_ref-72" class="reference"><a href="#cite_note-72"><span class="cite-bracket">&#91;</span>72<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-73" class="reference"><a href="#cite_note-73"><span class="cite-bracket">&#91;</span>73<span class="cite-bracket">&#93;</span></a></sup> </p><p>The 2020 documentary <i><a href="/wiki/Welcome_to_Chechnya" title="Welcome to Chechnya">Welcome to Chechnya</a></i> used deepfake technology to obscure the identity of the people interviewed, so as to protect them from retaliation.<sup id="cite_ref-74" class="reference"><a href="#cite_note-74"><span class="cite-bracket">&#91;</span>74<span class="cite-bracket">&#93;</span></a></sup> </p><p><a href="/wiki/Creative_Artists_Agency" title="Creative Artists Agency">Creative Artists Agency</a> has developed a facility to capture the likeness of an actor "in a single day", to develop a digital clone of the actor, which would be controlled by the actor or their estate alongside other <a href="/wiki/Personality_rights" title="Personality rights">personality rights</a>.<sup id="cite_ref-75" class="reference"><a href="#cite_note-75"><span class="cite-bracket">&#91;</span>75<span class="cite-bracket">&#93;</span></a></sup> </p><p>Companies which have used digital clones of professional actors in advertisements include <a href="/wiki/Puma_(brand)" title="Puma (brand)">Puma</a>, <a href="/wiki/Nike,_Inc." title="Nike, Inc.">Nike</a> and <a href="/wiki/Procter_%26_Gamble" title="Procter &amp; Gamble">Procter &amp; Gamble</a>.<sup id="cite_ref-76" class="reference"><a href="#cite_note-76"><span class="cite-bracket">&#91;</span>76<span class="cite-bracket">&#93;</span></a></sup> </p><p>Deep fake allowed portray David Beckham to able to publish in a campaign in nearly nine languages to raise awareness the fight against Malaria.<sup id="cite_ref-77" class="reference"><a href="#cite_note-77"><span class="cite-bracket">&#91;</span>77<span class="cite-bracket">&#93;</span></a></sup> </p><p>In the 2024 Indian <a href="/wiki/Tamil_language" title="Tamil language">Tamil</a> <a href="/wiki/Science_fiction_film" title="Science fiction film">science fiction</a> <a href="/wiki/Action_film" title="Action film">action thriller</a> <a href="/wiki/The_Greatest_of_All_Time" title="The Greatest of All Time"><i>The Greatest of All Time</i></a>, the teenage version of <a href="/wiki/Vijay_(actor)" title="Vijay (actor)">Vijay</a>'s character Jeevan is portrayed by Ayaz Khan. Vijay's teenage face was then attained by <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">AI</a> deepfake.<sup id="cite_ref-78" class="reference"><a href="#cite_note-78"><span class="cite-bracket">&#91;</span>78<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Art">Art</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=11" title="Edit section: Art"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>In March 2018 the multidisciplinary artist Joseph Ayerle published the <a href="/wiki/Video_art" title="Video art">video artwork</a> <i>Un'emozione per sempre 2.0</i> (English title: <i>The Italian Game</i>). The artist worked with Deepfake technology to create an <i>AI actor,</i> a synthetic version of 80s movie star <a href="/wiki/Ornella_Muti" title="Ornella Muti">Ornella Muti</a>, Deepfakes are also being used in education and media to create realistic videos and interactive content, which offer new ways to engage audiences. However, they also bring risks, especially for spreading false information, which has led to calls for responsible use and clear rules. traveling in time from 1978 to 2018. The <a href="/wiki/Massachusetts_Institute_of_Technology" title="Massachusetts Institute of Technology">Massachusetts Institute of Technology</a> referred this artwork in the study "Collective Wisdom".<sup id="cite_ref-79" class="reference"><a href="#cite_note-79"><span class="cite-bracket">&#91;</span>79<span class="cite-bracket">&#93;</span></a></sup> The artist used Ornella Muti's <a href="/wiki/Time_travel" title="Time travel">time travel</a> to explore generational reflections, while also investigating questions about the role of provocation in the world of art.<sup id="cite_ref-80" class="reference"><a href="#cite_note-80"><span class="cite-bracket">&#91;</span>80<span class="cite-bracket">&#93;</span></a></sup> For the technical realization Ayerle used scenes of photo model <a href="/wiki/Kendall_Jenner" title="Kendall Jenner">Kendall Jenner</a>. The program replaced Jenner's face by an AI calculated face of Ornella Muti. As a result, the AI actor has the face of the Italian actor Ornella Muti and the body of Kendall Jenner. </p><p>Deepfakes have been widely used in <a href="/wiki/Satire" title="Satire">satire</a> or to parody celebrities and politicians. The 2020 webseries <i><a href="/wiki/Sassy_Justice" title="Sassy Justice">Sassy Justice</a></i>, created by <a href="/wiki/Trey_Parker" title="Trey Parker">Trey Parker</a> and <a href="/wiki/Matt_Stone" title="Matt Stone">Matt Stone</a>, heavily features the use of deepfaked public figures to satirize current events and raise awareness of deepfake technology.<sup id="cite_ref-81" class="reference"><a href="#cite_note-81"><span class="cite-bracket">&#91;</span>81<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Blackmail">Blackmail</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=12" title="Edit section: Blackmail"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Deepfakes can be used to generate blackmail materials that falsely incriminate a victim. A report by the American <a href="/wiki/Congressional_Research_Service" title="Congressional Research Service">Congressional Research Service</a> warned that deepfakes could be used to blackmail elected officials or those with access to <a href="/wiki/Classified_information" title="Classified information">classified information</a> for <a href="/wiki/Espionage" title="Espionage">espionage</a> or <a href="/wiki/Foreign_electoral_intervention" title="Foreign electoral intervention">influence</a> purposes.<sup id="cite_ref-CRS1_82-0" class="reference"><a href="#cite_note-CRS1-82"><span class="cite-bracket">&#91;</span>82<span class="cite-bracket">&#93;</span></a></sup> </p><p>Alternatively, since the fakes cannot reliably be distinguished from genuine materials, victims of actual blackmail can now claim that the true artifacts are fakes, granting them plausible deniability. The effect is to void credibility of existing blackmail materials, which erases loyalty to blackmailers and destroys the blackmailer's control. This phenomenon can be termed "blackmail inflation", since it "devalues" real blackmail, rendering it worthless.<sup id="cite_ref-83" class="reference"><a href="#cite_note-83"><span class="cite-bracket">&#91;</span>83<span class="cite-bracket">&#93;</span></a></sup> It is possible to utilize commodity GPU hardware with a small software program to generate this blackmail content for any number of subjects in huge quantities, driving up the supply of fake blackmail content limitlessly and in highly scalable fashion.<sup id="cite_ref-84" class="reference"><a href="#cite_note-84"><span class="cite-bracket">&#91;</span>84<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Entertainment">Entertainment</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=13" title="Edit section: Entertainment"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>On June 8, 2022,<sup id="cite_ref-85" class="reference"><a href="#cite_note-85"><span class="cite-bracket">&#91;</span>85<span class="cite-bracket">&#93;</span></a></sup> Daniel Emmet, a former <a href="/wiki/America%27s_Got_Talent" title="America&#39;s Got Talent">AGT</a> contestant, teamed up with the <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">AI</a> <a href="/wiki/Startup_company" title="Startup company">startup</a><sup id="cite_ref-86" class="reference"><a href="#cite_note-86"><span class="cite-bracket">&#91;</span>86<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-87" class="reference"><a href="#cite_note-87"><span class="cite-bracket">&#91;</span>87<span class="cite-bracket">&#93;</span></a></sup> <a href="/w/index.php?title=Metaphysic_AI&amp;action=edit&amp;redlink=1" class="new" title="Metaphysic AI (page does not exist)">Metaphysic AI</a>, to create a hyperrealistic deepfake to make it appear as <a href="/wiki/Simon_Cowell" title="Simon Cowell">Simon Cowell</a>. Cowell, notoriously known for severely critiquing contestants,<sup id="cite_ref-88" class="reference"><a href="#cite_note-88"><span class="cite-bracket">&#91;</span>88<span class="cite-bracket">&#93;</span></a></sup> was on stage interpreting "<a href="/wiki/You%27re_the_Inspiration" title="You&#39;re the Inspiration">You're The Inspiration</a>" by <a href="/wiki/Chicago_(band)" title="Chicago (band)">Chicago</a>. Emmet sang on stage as an image of Simon Cowell emerged on the screen behind him in flawless synchronicity.<sup id="cite_ref-89" class="reference"><a href="#cite_note-89"><span class="cite-bracket">&#91;</span>89<span class="cite-bracket">&#93;</span></a></sup> </p><p>On August 30, 2022, Metaphysic AI had 'deep-fake' <a href="/wiki/Simon_Cowell" title="Simon Cowell">Simon Cowell</a>, <a href="/wiki/Howie_Mandel" title="Howie Mandel">Howie Mandel</a> and <a href="/wiki/Terry_Crews" title="Terry Crews">Terry Crews</a> singing <a href="/wiki/Opera" title="Opera">opera</a> on stage.<sup id="cite_ref-90" class="reference"><a href="#cite_note-90"><span class="cite-bracket">&#91;</span>90<span class="cite-bracket">&#93;</span></a></sup> </p><p>On September 13, 2022, Metaphysic AI performed with a <a href="/wiki/Synthetic_media" title="Synthetic media">synthetic</a> version of <a href="/wiki/Elvis_Presley" title="Elvis Presley">Elvis Presley</a> for the finals of <i>America's Got Talent</i>.<sup id="cite_ref-91" class="reference"><a href="#cite_note-91"><span class="cite-bracket">&#91;</span>91<span class="cite-bracket">&#93;</span></a></sup> </p><p>The <a href="/wiki/MIT" class="mw-redirect" title="MIT">MIT</a> artificial intelligence project <a href="/wiki/15.ai" title="15.ai">15.ai</a> has been used for content creation for multiple Internet <a href="/wiki/Fandoms" class="mw-redirect" title="Fandoms">fandoms</a>, particularly on social media.<sup id="cite_ref-kotaku_92-0" class="reference"><a href="#cite_note-kotaku-92"><span class="cite-bracket">&#91;</span>92<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-gameinformer_93-0" class="reference"><a href="#cite_note-gameinformer-93"><span class="cite-bracket">&#91;</span>93<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-pcgamer_94-0" class="reference"><a href="#cite_note-pcgamer-94"><span class="cite-bracket">&#91;</span>94<span class="cite-bracket">&#93;</span></a></sup> </p><p>In 2023 the bands <a href="/wiki/ABBA_Voyage" title="ABBA Voyage">ABBA</a> and <a href="/wiki/Kiss_(band)" title="Kiss (band)">KISS</a> partnered with <a href="/wiki/Industrial_Light_%26_Magic" title="Industrial Light &amp; Magic">Industrial Light &amp; Magic</a> and <a href="/wiki/Pophouse_Entertainment" title="Pophouse Entertainment">Pophouse Entertainment</a> to develop deepfake avatars capable of performing <a href="/wiki/Virtual_concert" title="Virtual concert">virtual concerts</a>.<sup id="cite_ref-95" class="reference"><a href="#cite_note-95"><span class="cite-bracket">&#91;</span>95<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Fraud_and_scams">Fraud and scams</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=14" title="Edit section: Fraud and scams"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Fraudsters and scammers make use of deepfakes to trick people into fake investment schemes, <a href="/wiki/Financial_fraud" class="mw-redirect" title="Financial fraud">financial fraud</a>, <a href="/wiki/Cryptocurrencies" class="mw-redirect" title="Cryptocurrencies">cryptocurrencies</a>, <a href="/wiki/Wire_transfer" title="Wire transfer">sending money</a>, and following <a href="/wiki/Endorsement_(advertising)" class="mw-redirect" title="Endorsement (advertising)">endorsements</a>. The likenesses of celebrities and politicians have been used for large-scale scams, as well as those of private individuals, which are used in <a href="/wiki/Spearphish" class="mw-redirect" title="Spearphish">spearphishing</a> attacks. According to the <a href="/wiki/Better_Business_Bureau" title="Better Business Bureau">Better Business Bureau</a>, deepfake scams are becoming more prevalent.<sup id="cite_ref-:0_96-0" class="reference"><a href="#cite_note-:0-96"><span class="cite-bracket">&#91;</span>96<span class="cite-bracket">&#93;</span></a></sup> </p><p>Fake endorsements have misused the identities of celebrities like <a href="/wiki/Taylor_Swift" title="Taylor Swift">Taylor Swift</a>,<sup id="cite_ref-:1_97-0" class="reference"><a href="#cite_note-:1-97"><span class="cite-bracket">&#91;</span>97<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-:0_96-1" class="reference"><a href="#cite_note-:0-96"><span class="cite-bracket">&#91;</span>96<span class="cite-bracket">&#93;</span></a></sup> <a href="/wiki/Tom_Hanks" title="Tom Hanks">Tom Hanks</a>,<sup id="cite_ref-:2_98-0" class="reference"><a href="#cite_note-:2-98"><span class="cite-bracket">&#91;</span>98<span class="cite-bracket">&#93;</span></a></sup> <a href="/wiki/Oprah_Winfrey" title="Oprah Winfrey">Oprah Winfrey</a>,<sup id="cite_ref-:3_99-0" class="reference"><a href="#cite_note-:3-99"><span class="cite-bracket">&#91;</span>99<span class="cite-bracket">&#93;</span></a></sup> and <a href="/wiki/Elon_Musk" title="Elon Musk">Elon Musk</a>;<sup id="cite_ref-:4_100-0" class="reference"><a href="#cite_note-:4-100"><span class="cite-bracket">&#91;</span>100<span class="cite-bracket">&#93;</span></a></sup> <a href="/wiki/News_anchors" class="mw-redirect" title="News anchors">news anchors</a><sup id="cite_ref-101" class="reference"><a href="#cite_note-101"><span class="cite-bracket">&#91;</span>101<span class="cite-bracket">&#93;</span></a></sup> like <a href="/wiki/Gayle_King" title="Gayle King">Gayle King</a><sup id="cite_ref-:2_98-1" class="reference"><a href="#cite_note-:2-98"><span class="cite-bracket">&#91;</span>98<span class="cite-bracket">&#93;</span></a></sup> and <a href="/wiki/Sally_Bundock" title="Sally Bundock">Sally Bundock</a>;<sup id="cite_ref-102" class="reference"><a href="#cite_note-102"><span class="cite-bracket">&#91;</span>102<span class="cite-bracket">&#93;</span></a></sup> and politicians like <a href="/wiki/Lee_Hsien_Loong" title="Lee Hsien Loong">Lee Hsien Loong</a><sup id="cite_ref-:5_103-0" class="reference"><a href="#cite_note-:5-103"><span class="cite-bracket">&#91;</span>103<span class="cite-bracket">&#93;</span></a></sup> and <a href="/wiki/Jim_Chalmers" title="Jim Chalmers">Jim Chalmers</a>.<sup id="cite_ref-104" class="reference"><a href="#cite_note-104"><span class="cite-bracket">&#91;</span>104<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-105" class="reference"><a href="#cite_note-105"><span class="cite-bracket">&#91;</span>105<span class="cite-bracket">&#93;</span></a></sup> Videos of them have appeared in <a href="/wiki/Online_advertisements" class="mw-redirect" title="Online advertisements">online advertisements</a> on <a href="/wiki/YouTube" title="YouTube">YouTube</a>, <a href="/wiki/Facebook" title="Facebook">Facebook</a>, and <a href="/wiki/TikTok" title="TikTok">TikTok</a>, who have policies against <a href="/wiki/Synthetic_media" title="Synthetic media">synthetic and manipulated media</a>.<sup id="cite_ref-:6_106-0" class="reference"><a href="#cite_note-:6-106"><span class="cite-bracket">&#91;</span>106<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-:1_97-1" class="reference"><a href="#cite_note-:1-97"><span class="cite-bracket">&#91;</span>97<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-:7_107-0" class="reference"><a href="#cite_note-:7-107"><span class="cite-bracket">&#91;</span>107<span class="cite-bracket">&#93;</span></a></sup> Ads running these videos are seen by millions of people. A single <a href="/wiki/Medicare_fraud" title="Medicare fraud">Medicare fraud</a> campaign had been viewed more than 195 million times across thousands of videos.<sup id="cite_ref-:6_106-1" class="reference"><a href="#cite_note-:6-106"><span class="cite-bracket">&#91;</span>106<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-108" class="reference"><a href="#cite_note-108"><span class="cite-bracket">&#91;</span>108<span class="cite-bracket">&#93;</span></a></sup> Deepfakes have been used for: a fake giveaway of <a href="/wiki/Le_Creuset" title="Le Creuset">Le Creuset</a> cookware for a "shipping fee" without receiving the products, except for hidden monthly charges;<sup id="cite_ref-:1_97-2" class="reference"><a href="#cite_note-:1-97"><span class="cite-bracket">&#91;</span>97<span class="cite-bracket">&#93;</span></a></sup> weight-loss gummies that charge significantly more than what was said;<sup id="cite_ref-:3_99-1" class="reference"><a href="#cite_note-:3-99"><span class="cite-bracket">&#91;</span>99<span class="cite-bracket">&#93;</span></a></sup> a fake iPhone giveaway;<sup id="cite_ref-:1_97-3" class="reference"><a href="#cite_note-:1-97"><span class="cite-bracket">&#91;</span>97<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-:7_107-1" class="reference"><a href="#cite_note-:7-107"><span class="cite-bracket">&#91;</span>107<span class="cite-bracket">&#93;</span></a></sup> and fraudulent <a href="/wiki/Get-rich-quick_scheme" title="Get-rich-quick scheme">get-rich-quick</a>,<sup id="cite_ref-:4_100-1" class="reference"><a href="#cite_note-:4-100"><span class="cite-bracket">&#91;</span>100<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-109" class="reference"><a href="#cite_note-109"><span class="cite-bracket">&#91;</span>109<span class="cite-bracket">&#93;</span></a></sup> investment,<sup id="cite_ref-110" class="reference"><a href="#cite_note-110"><span class="cite-bracket">&#91;</span>110<span class="cite-bracket">&#93;</span></a></sup> and <a href="/wiki/Cryptocurrency" title="Cryptocurrency">cryptocurrency</a> schemes.<sup id="cite_ref-:5_103-1" class="reference"><a href="#cite_note-:5-103"><span class="cite-bracket">&#91;</span>103<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-111" class="reference"><a href="#cite_note-111"><span class="cite-bracket">&#91;</span>111<span class="cite-bracket">&#93;</span></a></sup> </p><p>Many ads pair AI <a href="/wiki/Voice_cloning" class="mw-redirect" title="Voice cloning">voice cloning</a> with "decontextualized video of the celebrity" to mimic authenticity. Others use a whole clip from a celebrity before moving to a different actor or voice.<sup id="cite_ref-:6_106-2" class="reference"><a href="#cite_note-:6-106"><span class="cite-bracket">&#91;</span>106<span class="cite-bracket">&#93;</span></a></sup> Some scams may involve real-time deepfakes.<sup id="cite_ref-:7_107-2" class="reference"><a href="#cite_note-:7-107"><span class="cite-bracket">&#91;</span>107<span class="cite-bracket">&#93;</span></a></sup> </p><p>Celebrities have been warning people of these fake endorsements, and to be more vigilant against them.<sup id="cite_ref-:0_96-2" class="reference"><a href="#cite_note-:0-96"><span class="cite-bracket">&#91;</span>96<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-:1_97-4" class="reference"><a href="#cite_note-:1-97"><span class="cite-bracket">&#91;</span>97<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-:3_99-2" class="reference"><a href="#cite_note-:3-99"><span class="cite-bracket">&#91;</span>99<span class="cite-bracket">&#93;</span></a></sup> Celebrities are unlikely to file lawsuits against every person operating deepfake scams, as "finding and suing anonymous social media users is resource intensive," though <a href="/wiki/Cease_and_desist" title="Cease and desist">cease and desist</a> letters to social media companies work in getting videos and ads taken down.<sup id="cite_ref-112" class="reference"><a href="#cite_note-112"><span class="cite-bracket">&#91;</span>112<span class="cite-bracket">&#93;</span></a></sup> </p><p><a href="/wiki/Audio_deepfake" title="Audio deepfake">Audio deepfakes</a> have been used as part of <a href="/wiki/Social_engineering_(security)" title="Social engineering (security)">social engineering</a> scams, fooling people into thinking they are receiving instructions from a trusted individual.<sup id="cite_ref-Statt-2019_113-0" class="reference"><a href="#cite_note-Statt-2019-113"><span class="cite-bracket">&#91;</span>113<span class="cite-bracket">&#93;</span></a></sup> In 2019, a U.K.-based energy firm's CEO was scammed over the phone when he was ordered to transfer €220,000 into a Hungarian bank account by an individual who reportedly used audio deepfake technology to impersonate the voice of the firm's parent company's chief executive.<sup id="cite_ref-Damiani-2019_114-0" class="reference"><a href="#cite_note-Damiani-2019-114"><span class="cite-bracket">&#91;</span>114<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-115" class="reference"><a href="#cite_note-115"><span class="cite-bracket">&#91;</span>115<span class="cite-bracket">&#93;</span></a></sup> </p><p>As of 2023, the combination advances in deepfake technology, which could clone an individual's voice from a recording of a few seconds to a minute, and new <a href="/wiki/Generative_artificial_intelligence" title="Generative artificial intelligence">text generation tools</a>, enabled automated impersonation scams, targeting victims using a convincing digital clone of a friend or relative.<sup id="cite_ref-116" class="reference"><a href="#cite_note-116"><span class="cite-bracket">&#91;</span>116<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Identity_masking">Identity masking</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=15" title="Edit section: Identity masking"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Audio deepfakes can be used to mask a user's real identity. In <a href="/wiki/Online_game" title="Online game">online gaming</a>, for example, a <a href="/wiki/Gamer" title="Gamer">player</a> may want to choose a voice that sounds like their <a href="/wiki/Player_character" title="Player character">in-game character</a> when speaking to other players. Those who are subject to <a href="/wiki/Cyberbullying#in_gaming" title="Cyberbullying">harassment</a>, such as women, children, and transgender people, can use these "voice skins" to hide their gender or age.<sup id="cite_ref-Deepfakes_explained_117-0" class="reference"><a href="#cite_note-Deepfakes_explained-117"><span class="cite-bracket">&#91;</span>117<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Memes">Memes</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=16" title="Edit section: Memes"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>In 2020, an <a href="/wiki/Internet_meme" title="Internet meme">internet meme</a> emerged utilizing deepfakes to generate videos of people singing the chorus of "Baka Mitai"<span style="font-weight: normal"> (<span title="Japanese-language text"><span lang="ja">ばかみたい</span></span>)</span>, a song from the game <i><a href="/wiki/Yakuza_0" title="Yakuza 0">Yakuza 0</a></i> in the video game series <i><a href="/wiki/Like_a_Dragon" class="mw-redirect" title="Like a Dragon">Like a Dragon</a></i>. In the series, the melancholic song is sung by the player in a <a href="/wiki/Karaoke" title="Karaoke">karaoke</a> <a href="/wiki/Minigame" title="Minigame">minigame</a>. Most iterations of this meme use a 2017 video uploaded by user Dobbsyrules, who <a href="/wiki/Lip_sync" title="Lip sync">lip syncs</a> the song, as a template.<sup id="cite_ref-118" class="reference"><a href="#cite_note-118"><span class="cite-bracket">&#91;</span>118<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-119" class="reference"><a href="#cite_note-119"><span class="cite-bracket">&#91;</span>119<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Politics">Politics</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=17" title="Edit section: Politics"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Deepfakes have been used to misrepresent well-known politicians in videos. </p> <ul><li>In February 2018, in separate videos, the face of the Argentine President <a href="/wiki/Mauricio_Macri" title="Mauricio Macri">Mauricio Macri</a> had been replaced by the face of <a href="/wiki/Adolf_Hitler" title="Adolf Hitler">Adolf Hitler</a>, and <a href="/wiki/Angela_Merkel" title="Angela Merkel">Angela Merkel</a>'s face has been replaced with <a href="/wiki/Donald_Trump" title="Donald Trump">Donald Trump</a>'s.<sup id="cite_ref-Bezmalinovic-2018_120-0" class="reference"><a href="#cite_note-Bezmalinovic-2018-120"><span class="cite-bracket">&#91;</span>120<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-121" class="reference"><a href="#cite_note-121"><span class="cite-bracket">&#91;</span>121<span class="cite-bracket">&#93;</span></a></sup></li> <li>In April 2018, <a href="/wiki/Jordan_Peele" title="Jordan Peele">Jordan Peele</a> collaborated with <a href="/wiki/BuzzFeed" title="BuzzFeed">Buzzfeed</a> to create a deepfake of <a href="/wiki/Barack_Obama" title="Barack Obama">Barack Obama</a> with Peele's voice; it served as a <a href="/wiki/Public_service_announcement" title="Public service announcement">public service announcement</a> to increase awareness of deepfakes.<sup id="cite_ref-122" class="reference"><a href="#cite_note-122"><span class="cite-bracket">&#91;</span>122<span class="cite-bracket">&#93;</span></a></sup></li> <li>In January 2019, <a href="/wiki/Fox_Broadcasting_Company" title="Fox Broadcasting Company">Fox</a> affiliate <a href="/wiki/KCPQ" title="KCPQ">KCPQ</a> aired a deepfake of Trump during <a href="/wiki/January_2019_Oval_Office_address" class="mw-redirect" title="January 2019 Oval Office address">his Oval Office address</a>, mocking his appearance and skin colour. The employee found responsible for the video was subsequently fired.<sup id="cite_ref-123" class="reference"><a href="#cite_note-123"><span class="cite-bracket">&#91;</span>123<span class="cite-bracket">&#93;</span></a></sup></li> <li>In June 2019, the United States <a href="/wiki/House_Intelligence_Committee" class="mw-redirect" title="House Intelligence Committee">House Intelligence Committee</a> held hearings on the potential malicious use of deepfakes to sway elections.<sup id="cite_ref-124" class="reference"><a href="#cite_note-124"><span class="cite-bracket">&#91;</span>124<span class="cite-bracket">&#93;</span></a></sup></li> <li>In April 2020, the Belgian branch of <a href="/wiki/Extinction_Rebellion" title="Extinction Rebellion">Extinction Rebellion</a> published a deepfake video of Belgian Prime Minister <a href="/wiki/Sophie_Wilm%C3%A8s" title="Sophie Wilmès">Sophie Wilmès</a> on Facebook.<sup id="cite_ref-125" class="reference"><a href="#cite_note-125"><span class="cite-bracket">&#91;</span>125<span class="cite-bracket">&#93;</span></a></sup> The video promoted a possible link between <a href="/wiki/Deforestation" title="Deforestation">deforestation</a> and <a href="/wiki/COVID-19" title="COVID-19">COVID-19</a>. It had more than 100,000 views within 24 hours and received many comments. On the Facebook page where the video appeared, many users interpreted the deepfake video as genuine.<sup id="cite_ref-126" class="reference"><a href="#cite_note-126"><span class="cite-bracket">&#91;</span>126<span class="cite-bracket">&#93;</span></a></sup></li> <li>During the <a href="/wiki/2020_United_States_presidential_election" title="2020 United States presidential election">2020 US presidential campaign</a>, many deep fakes surfaced purporting <a href="/wiki/Joe_Biden" title="Joe Biden">Joe Biden</a> in cognitive decline—falling asleep during an interview, getting lost, and misspeaking—all bolstering rumors of his decline.<sup id="cite_ref-Carnahan_2020_127-0" class="reference"><a href="#cite_note-Carnahan_2020-127"><span class="cite-bracket">&#91;</span>127<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Parker_2020_128-0" class="reference"><a href="#cite_note-Parker_2020-128"><span class="cite-bracket">&#91;</span>128<span class="cite-bracket">&#93;</span></a></sup></li> <li>During the <a href="/wiki/2020_Delhi_Legislative_Assembly_election" title="2020 Delhi Legislative Assembly election">2020 Delhi Legislative Assembly election</a> campaign, the Delhi Bharatiya Janata Party used similar technology to distribute a version of an English-language campaign advertisement by its leader, <a href="/wiki/Manoj_Tiwari_(Delhi_politician)" title="Manoj Tiwari (Delhi politician)">Manoj Tiwari</a>, translated into <a href="/wiki/Haryanvi_language" title="Haryanvi language">Haryanvi</a> to target <a href="/wiki/Haryana" title="Haryana">Haryana</a> voters. A voiceover was provided by an actor, and AI trained using video of Tiwari speeches was used to lip-sync the video to the new voiceover. A party staff member described it as a "positive" use of deepfake technology, which allowed them to "convincingly approach the target audience even if the candidate didn't speak the language of the voter."<sup id="cite_ref-129" class="reference"><a href="#cite_note-129"><span class="cite-bracket">&#91;</span>129<span class="cite-bracket">&#93;</span></a></sup></li> <li>In 2020, <a href="/wiki/Bruno_Sartori" title="Bruno Sartori">Bruno Sartori</a> produced deepfakes parodying politicians like <a href="/wiki/Jair_Bolsonaro" title="Jair Bolsonaro">Jair Bolsonaro</a> and <a href="/wiki/Donald_Trump" title="Donald Trump">Donald Trump</a>.<sup id="cite_ref-130" class="reference"><a href="#cite_note-130"><span class="cite-bracket">&#91;</span>130<span class="cite-bracket">&#93;</span></a></sup></li> <li>In April 2021, politicians in a number of European countries were approached by pranksters <a href="/wiki/Vovan_and_Lexus" title="Vovan and Lexus">Vovan and Lexus</a>, who are accused by critics of working for the <a href="/wiki/Russia" title="Russia">Russian</a> state. They impersonated <a href="/wiki/Leonid_Volkov_(politician)" title="Leonid Volkov (politician)">Leonid Volkov</a>, a Russian opposition politician and chief of staff of the Russian opposition leader <a href="/wiki/Alexei_Navalny" title="Alexei Navalny">Alexei Navalny</a>'s campaign, allegedly through deepfake technology.<sup id="cite_ref-131" class="reference"><a href="#cite_note-131"><span class="cite-bracket">&#91;</span>131<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-132" class="reference"><a href="#cite_note-132"><span class="cite-bracket">&#91;</span>132<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-133" class="reference"><a href="#cite_note-133"><span class="cite-bracket">&#91;</span>133<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-134" class="reference"><a href="#cite_note-134"><span class="cite-bracket">&#91;</span>134<span class="cite-bracket">&#93;</span></a></sup> However, the pair told <i><a href="/wiki/The_Verge" title="The Verge">The Verge</a></i> that they did not use deepfakes, and just used a <a href="/wiki/Look-alike" title="Look-alike">look-alike</a>.<sup id="cite_ref-135" class="reference"><a href="#cite_note-135"><span class="cite-bracket">&#91;</span>135<span class="cite-bracket">&#93;</span></a></sup></li> <li>In May 2023, a deepfake video of Vice President <a href="/wiki/Kamala_Harris" title="Kamala Harris">Kamala Harris</a> supposedly slurring her words and speaking nonsensically about today, tomorrow and yesterday went viral on social media.<sup id="cite_ref-136" class="reference"><a href="#cite_note-136"><span class="cite-bracket">&#91;</span>136<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-137" class="reference"><a href="#cite_note-137"><span class="cite-bracket">&#91;</span>137<span class="cite-bracket">&#93;</span></a></sup></li> <li>In June 2023, in the United States, <a href="/wiki/Ron_DeSantis" title="Ron DeSantis">Ron DeSantis</a>'s presidential campaign used a deepfake to misrepresent Donald Trump.<sup id="cite_ref-138" class="reference"><a href="#cite_note-138"><span class="cite-bracket">&#91;</span>138<span class="cite-bracket">&#93;</span></a></sup></li></ul> <ul><li>In March 2024, during India's state assembly elections, deepfake technology was widely employed by political candidates to reach out to voters. Many politicians used AI-generated deepfakes created by an Indian startup The Indian Deepfaker, founder by Divyendra Singh Jadoun <sup id="cite_ref-139" class="reference"><a href="#cite_note-139"><span class="cite-bracket">&#91;</span>139<span class="cite-bracket">&#93;</span></a></sup> to translate their speeches into multiple regional languages, allowing them to engage with diverse linguistic communities across the country. This surge in the use of deepfakes for political campaigns marked a significant shift in electioneering tactics in India.<sup id="cite_ref-140" class="reference"><a href="#cite_note-140"><span class="cite-bracket">&#91;</span>140<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-141" class="reference"><a href="#cite_note-141"><span class="cite-bracket">&#91;</span>141<span class="cite-bracket">&#93;</span></a></sup></li></ul> <div class="mw-heading mw-heading3"><h3 id="Pornography">Pornography</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=18" title="Edit section: Pornography"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1236090951">.mw-parser-output .hatnote{font-style:italic}.mw-parser-output div.hatnote{padding-left:1.6em;margin-bottom:0.5em}.mw-parser-output .hatnote i{font-style:normal}.mw-parser-output .hatnote+link+.hatnote{margin-top:-0.5em}@media print{body.ns-0 .mw-parser-output .hatnote{display:none!important}}</style><div role="note" class="hatnote navigation-not-searchable">Main article: <a href="/wiki/Deepfake_pornography" title="Deepfake pornography">Deepfake pornography</a></div> <p>In 2017, Deepfake pornography prominently surfaced on the Internet, particularly on <a href="/wiki/Reddit" title="Reddit">Reddit</a>.<sup id="cite_ref-Roettgers-2018_142-0" class="reference"><a href="#cite_note-Roettgers-2018-142"><span class="cite-bracket">&#91;</span>142<span class="cite-bracket">&#93;</span></a></sup> As of 2019, many deepfakes on the internet feature pornography of female celebrities whose likeness is typically used without their consent.<sup id="cite_ref-Dickson-2019_143-0" class="reference"><a href="#cite_note-Dickson-2019-143"><span class="cite-bracket">&#91;</span>143<span class="cite-bracket">&#93;</span></a></sup> A report published in October 2019 by Dutch cybersecurity startup Deeptrace estimated that 96% of all deepfakes online were pornographic.<sup id="cite_ref-Ajder-2019_144-0" class="reference"><a href="#cite_note-Ajder-2019-144"><span class="cite-bracket">&#91;</span>144<span class="cite-bracket">&#93;</span></a></sup> As of 2018, a <a href="/wiki/Daisy_Ridley" title="Daisy Ridley">Daisy Ridley</a> deepfake first captured attention,<sup id="cite_ref-Roettgers-2018_142-1" class="reference"><a href="#cite_note-Roettgers-2018-142"><span class="cite-bracket">&#91;</span>142<span class="cite-bracket">&#93;</span></a></sup> among others.<sup id="cite_ref-145" class="reference"><a href="#cite_note-145"><span class="cite-bracket">&#91;</span>145<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-146" class="reference"><a href="#cite_note-146"><span class="cite-bracket">&#91;</span>146<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Cole-2018d_147-0" class="reference"><a href="#cite_note-Cole-2018d-147"><span class="cite-bracket">&#91;</span>147<span class="cite-bracket">&#93;</span></a></sup> As of October 2019, most of the deepfake subjects on the internet were British and American actors.<sup id="cite_ref-Dickson-2019_143-1" class="reference"><a href="#cite_note-Dickson-2019-143"><span class="cite-bracket">&#91;</span>143<span class="cite-bracket">&#93;</span></a></sup> However, around a quarter of the subjects are South Korean, the majority of which are K-pop stars.<sup id="cite_ref-Dickson-2019_143-2" class="reference"><a href="#cite_note-Dickson-2019-143"><span class="cite-bracket">&#91;</span>143<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-148" class="reference"><a href="#cite_note-148"><span class="cite-bracket">&#91;</span>148<span class="cite-bracket">&#93;</span></a></sup> </p><p><span class="anchor" id="DeepNude"></span>In June 2019, a downloadable <a href="/wiki/Windows" class="mw-redirect" title="Windows">Windows</a> and <a href="/wiki/Linux" title="Linux">Linux</a> application called DeepNude was released that used neural networks, specifically <a href="/wiki/Generative_adversarial_networks" class="mw-redirect" title="Generative adversarial networks">generative adversarial networks</a>, to remove clothing from images of women. The app had both a paid and unpaid version, the paid version costing $50.<sup id="cite_ref-Cole-2019a_149-0" class="reference"><a href="#cite_note-Cole-2019a-149"><span class="cite-bracket">&#91;</span>149<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-150" class="reference"><a href="#cite_note-150"><span class="cite-bracket">&#91;</span>150<span class="cite-bracket">&#93;</span></a></sup> On 27 June the creators removed the application and refunded consumers.<sup id="cite_ref-151" class="reference"><a href="#cite_note-151"><span class="cite-bracket">&#91;</span>151<span class="cite-bracket">&#93;</span></a></sup> </p><p>Female celebrities are often a main target when it comes to deepfake pornography. In 2023, deepfake porn videos appeared online of <a href="/wiki/Emma_Watson" title="Emma Watson">Emma Watson</a> and <a href="/wiki/Scarlett_Johansson" title="Scarlett Johansson">Scarlett Johansson</a> in a face swapping app.<sup id="cite_ref-152" class="reference"><a href="#cite_note-152"><span class="cite-bracket">&#91;</span>152<span class="cite-bracket">&#93;</span></a></sup> In 2024, deepfake porn images circulated online of <a href="/wiki/Taylor_Swift" title="Taylor Swift">Taylor Swift</a>.<sup id="cite_ref-153" class="reference"><a href="#cite_note-153"><span class="cite-bracket">&#91;</span>153<span class="cite-bracket">&#93;</span></a></sup> </p><p>Academic studies have reported that women, LGBT people and people of colour (particularly activists, politicians and those questioning power) are at higher risk of being targets of promulgation of deepfake pornography.<sup id="cite_ref-154" class="reference"><a href="#cite_note-154"><span class="cite-bracket">&#91;</span>154<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Social_media">Social media</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=19" title="Edit section: Social media"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Deepfakes have begun to see use in popular social media platforms, notably through Zao, a Chinese deepfake app that allows users to substitute their own faces onto those of characters in scenes from films and television shows such as <i><a href="/wiki/Romeo_%2B_Juliet" title="Romeo + Juliet">Romeo + Juliet</a></i> and <i><a href="/wiki/Game_of_Thrones" title="Game of Thrones">Game of Thrones</a></i>.<sup id="cite_ref-155" class="reference"><a href="#cite_note-155"><span class="cite-bracket">&#91;</span>155<span class="cite-bracket">&#93;</span></a></sup> The app originally faced scrutiny over its invasive user data and privacy policy, after which the company put out a statement claiming it would revise the policy.<sup id="cite_ref-Porter-2019_16-3" class="reference"><a href="#cite_note-Porter-2019-16"><span class="cite-bracket">&#91;</span>16<span class="cite-bracket">&#93;</span></a></sup> In January 2020 Facebook announced that it was introducing new measures to counter this on its platforms.<sup id="cite_ref-156" class="reference"><a href="#cite_note-156"><span class="cite-bracket">&#91;</span>156<span class="cite-bracket">&#93;</span></a></sup> </p><p>The <a href="/wiki/Congressional_Research_Service" title="Congressional Research Service">Congressional Research Service</a> cited unspecified evidence as showing that foreign <a href="/wiki/Intelligence_officer" title="Intelligence officer">intelligence operatives</a> used deepfakes to create social media accounts with the purposes of <a href="/wiki/Recruitment_of_spies" title="Recruitment of spies">recruiting</a> individuals with access to <a href="/wiki/Classified_information" title="Classified information">classified information</a>.<sup id="cite_ref-CRS1_82-1" class="reference"><a href="#cite_note-CRS1-82"><span class="cite-bracket">&#91;</span>82<span class="cite-bracket">&#93;</span></a></sup> </p><p>In 2021, realistic deepfake videos of actor <a href="/wiki/Tom_Cruise" title="Tom Cruise">Tom Cruise</a> were released on <a href="/wiki/TikTok" title="TikTok">TikTok</a>, which went viral and garnered more than tens of millions of views. The deepfake videos featured an "artificial intelligence-generated doppelganger" of Cruise doing various activities such as teeing off at the golf course, showing off a coin trick, and biting into a lollipop. The creator of the clips, <a href="/wiki/Belgium" title="Belgium">Belgian</a> <a href="/wiki/Visual_effects" title="Visual effects">VFX</a> Artist Chris Umé,<sup id="cite_ref-157" class="reference"><a href="#cite_note-157"><span class="cite-bracket">&#91;</span>157<span class="cite-bracket">&#93;</span></a></sup> said he first got interested in deepfakes in 2018 and saw the "creative potential" of them.<sup id="cite_ref-158" class="reference"><a href="#cite_note-158"><span class="cite-bracket">&#91;</span>158<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-159" class="reference"><a href="#cite_note-159"><span class="cite-bracket">&#91;</span>159<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Sockpuppets">Sockpuppets</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=20" title="Edit section: Sockpuppets"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Deepfake photographs can be used to create <a href="/wiki/Sockpuppet_(internet)" class="mw-redirect" title="Sockpuppet (internet)">sockpuppets</a>, non-existent people, who are active both online and in traditional media. A deepfake photograph appears to have been generated together with a legend for an apparently non-existent person named Oliver Taylor, whose identity was described as a university student in the United Kingdom. The Oliver Taylor persona submitted opinion pieces in several newspapers and was active in online media attacking a British legal academic and his wife, as "terrorist sympathizers." The academic had drawn international attention in 2018 when he commenced a lawsuit in Israel against NSO, a surveillance company, on behalf of people in Mexico who alleged they were victims of NSO's <a href="/wiki/Phone_hacking" title="Phone hacking">phone hacking</a> technology. <i>Reuters</i> could find only scant records for Oliver Taylor and "his" university had no records for him. Many experts agreed that the profile photo is a deepfake. Several newspapers have not retracted articles attributed to him or removed them from their websites. It is feared that such techniques are a new battleground in <a href="/wiki/Disinformation" title="Disinformation">disinformation</a>.<sup id="cite_ref-160" class="reference"><a href="#cite_note-160"><span class="cite-bracket">&#91;</span>160<span class="cite-bracket">&#93;</span></a></sup> </p><p>Collections of deepfake photographs of non-existent people on <a href="/wiki/Social_networks" class="mw-redirect" title="Social networks">social networks</a> have also been deployed as part of Israeli <a href="/wiki/Partisan_(politics)" title="Partisan (politics)">partisan</a> propaganda. The <a href="/wiki/Facebook" title="Facebook">Facebook</a> page "Zionist Spring" featured photos of non-existent persons along with their "testimonies" purporting to explain why they have abandoned their left-leaning politics to embrace <a href="/wiki/Right-wing_politics" title="Right-wing politics">right-wing politics</a>, and the page also contained large numbers of posts from <a href="/wiki/Prime_Minister_of_Israel" title="Prime Minister of Israel">Prime Minister of Israel</a> <a href="/wiki/Benjamin_Netanyahu" title="Benjamin Netanyahu">Benjamin Netanyahu</a> and his son and from other Israeli right wing sources. The photographs appear to have been generated by "<a href="/wiki/Human_image_synthesis" title="Human image synthesis">human image synthesis</a>" technology, computer software that takes data from photos of real people to produce a realistic composite image of a non-existent person. In much of the "testimonies," the reason given for embracing the political right was the shock of learning of alleged <a href="/wiki/Incitement" title="Incitement">incitement</a> to violence against the prime minister. Right wing Israeli television broadcasters then broadcast the "testimonies" of these non-existent people based on the fact that they were being "shared" online. The broadcasters aired these "testimonies" despite being unable to find such people, explaining "Why does the origin matter?" Other Facebook fake profiles—profiles of fictitious individuals—contained material that allegedly contained such incitement against the right wing prime minister, in response to which the prime minister complained that there was a plot to murder him.<sup id="cite_ref-161" class="reference"><a href="#cite_note-161"><span class="cite-bracket">&#91;</span>161<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-162" class="reference"><a href="#cite_note-162"><span class="cite-bracket">&#91;</span>162<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Concerns_and_countermeasures">Concerns and countermeasures</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=21" title="Edit section: Concerns and countermeasures"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Though fake photos have long been plentiful, faking motion pictures has been more difficult, and the presence of deepfakes increases the difficulty of classifying videos as genuine or not.<sup id="cite_ref-Bezmalinovic-2018_120-1" class="reference"><a href="#cite_note-Bezmalinovic-2018-120"><span class="cite-bracket">&#91;</span>120<span class="cite-bracket">&#93;</span></a></sup> AI researcher Alex Champandard has said people should know how fast things can be corrupted with deepfake technology, and that the problem is not a technical one, but rather one to be solved by trust in information and journalism.<sup id="cite_ref-Bezmalinovic-2018_120-2" class="reference"><a href="#cite_note-Bezmalinovic-2018-120"><span class="cite-bracket">&#91;</span>120<span class="cite-bracket">&#93;</span></a></sup> Computer science associate professor <a href="/wiki/Hao_Li" title="Hao Li">Hao Li</a> of the <a href="/wiki/University_of_Southern_California" title="University of Southern California">University of Southern California</a> states that deepfakes created for malicious use, such as <a href="/wiki/Fake_news" title="Fake news">fake news</a>, will be even more harmful if nothing is done to spread awareness of deepfake technology.<sup id="cite_ref-Perfect_163-0" class="reference"><a href="#cite_note-Perfect-163"><span class="cite-bracket">&#91;</span>163<span class="cite-bracket">&#93;</span></a></sup> Li predicted that genuine videos and deepfakes would become indistinguishable in as soon as half a year, as of October 2019, due to rapid advancement in <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">artificial intelligence</a> and computer graphics.<sup id="cite_ref-Perfect_163-1" class="reference"><a href="#cite_note-Perfect-163"><span class="cite-bracket">&#91;</span>163<span class="cite-bracket">&#93;</span></a></sup> Former <a href="/wiki/Google" title="Google">Google</a> fraud czar <a href="/wiki/Shuman_Ghosemajumder" title="Shuman Ghosemajumder">Shuman Ghosemajumder</a> has called deepfakes an area of "societal concern" and said that they will inevitably evolve to a point at which they can be generated automatically, and an individual could use that technology to produce millions of deepfake videos.<sup id="cite_ref-164" class="reference"><a href="#cite_note-164"><span class="cite-bracket">&#91;</span>164<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Credibility_of_information">Credibility of information</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=22" title="Edit section: Credibility of information"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>A primary pitfall is that humanity could fall into an age in which it can no longer be determined whether a medium's content corresponds to the truth.<sup id="cite_ref-Bezmalinovic-2018_120-3" class="reference"><a href="#cite_note-Bezmalinovic-2018-120"><span class="cite-bracket">&#91;</span>120<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Vaccari_165-0" class="reference"><a href="#cite_note-Vaccari-165"><span class="cite-bracket">&#91;</span>165<span class="cite-bracket">&#93;</span></a></sup> Deepfakes are one of a number of tools for <a href="/wiki/Disinformation_attack" title="Disinformation attack">disinformation attack</a>, creating doubt, and undermining trust. They have a potential to interfere with democratic functions in societies, such as identifying collective agendas, debating issues, informing decisions, and solving problems though the exercise of political will.<sup id="cite_ref-Pawelec_166-0" class="reference"><a href="#cite_note-Pawelec-166"><span class="cite-bracket">&#91;</span>166<span class="cite-bracket">&#93;</span></a></sup> People may also start to dismiss real events as fake.<sup id="cite_ref-Deepfakes_explained_117-1" class="reference"><a href="#cite_note-Deepfakes_explained-117"><span class="cite-bracket">&#91;</span>117<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Defamation">Defamation</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=23" title="Edit section: Defamation"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Deepfakes possess the ability to damage individual entities tremendously.<sup id="cite_ref-Bateman-2020_167-0" class="reference"><a href="#cite_note-Bateman-2020-167"><span class="cite-bracket">&#91;</span>167<span class="cite-bracket">&#93;</span></a></sup> This is because deepfakes are often targeted at one individual, and/or their relations to others in hopes to create a narrative powerful enough to influence public opinion or beliefs. This can be done through deepfake voice phishing, which manipulates audio to create fake phone calls or conversations.<sup id="cite_ref-Bateman-2020_167-1" class="reference"><a href="#cite_note-Bateman-2020-167"><span class="cite-bracket">&#91;</span>167<span class="cite-bracket">&#93;</span></a></sup> Another method of deepfake use is fabricated private remarks, which manipulate media to convey individuals voicing damaging comments.<sup id="cite_ref-Bateman-2020_167-2" class="reference"><a href="#cite_note-Bateman-2020-167"><span class="cite-bracket">&#91;</span>167<span class="cite-bracket">&#93;</span></a></sup> The quality of a negative video or audio does not need to be that high. As long as someone's likeness and actions are recognizable, a deepfake can hurt their reputation.<sup id="cite_ref-Deepfakes_explained_117-2" class="reference"><a href="#cite_note-Deepfakes_explained-117"><span class="cite-bracket">&#91;</span>117<span class="cite-bracket">&#93;</span></a></sup> </p><p>In September 2020 Microsoft made public that they are developing a Deepfake detection software tool.<sup id="cite_ref-168" class="reference"><a href="#cite_note-168"><span class="cite-bracket">&#91;</span>168<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Detection">Detection</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=24" title="Edit section: Detection"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <div class="mw-heading mw-heading4"><h4 id="Audio">Audio</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=25" title="Edit section: Audio"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Detecting fake audio is a highly complex task that requires careful attention to the audio signal in order to achieve good performance. Using deep learning, preprocessing of feature design and masking augmentation have been proven effective in improving performance.<sup id="cite_ref-169" class="reference"><a href="#cite_note-169"><span class="cite-bracket">&#91;</span>169<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading4"><h4 id="Video">Video</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=26" title="Edit section: Video"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Most of the academic research surrounding deepfakes focuses on the detection of deepfake videos.<sup id="cite_ref-Manke-2019_170-0" class="reference"><a href="#cite_note-Manke-2019-170"><span class="cite-bracket">&#91;</span>170<span class="cite-bracket">&#93;</span></a></sup> One approach to deepfake detection is to use algorithms to recognize patterns and pick up subtle inconsistencies that arise in deepfake videos.<sup id="cite_ref-Manke-2019_170-1" class="reference"><a href="#cite_note-Manke-2019-170"><span class="cite-bracket">&#91;</span>170<span class="cite-bracket">&#93;</span></a></sup> For example, researchers have developed automatic systems that examine videos for errors such as irregular blinking patterns of lighting.<sup id="cite_ref-Hany_171-0" class="reference"><a href="#cite_note-Hany-171"><span class="cite-bracket">&#91;</span>171<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Harwell-2019_14-2" class="reference"><a href="#cite_note-Harwell-2019-14"><span class="cite-bracket">&#91;</span>14<span class="cite-bracket">&#93;</span></a></sup> This approach has been criticized because deepfake detection is characterized by a "<a href="/wiki/Zero-sum_game" title="Zero-sum game">moving goal post</a>" where the production of deepfakes continues to change and improve as algorithms to detect deepfakes improve.<sup id="cite_ref-Manke-2019_170-2" class="reference"><a href="#cite_note-Manke-2019-170"><span class="cite-bracket">&#91;</span>170<span class="cite-bracket">&#93;</span></a></sup> In order to assess the most effective algorithms for detecting deepfakes, a coalition of leading technology companies hosted the Deepfake Detection Challenge to accelerate the technology for identifying manipulated content.<sup id="cite_ref-DDCD-2020_172-0" class="reference"><a href="#cite_note-DDCD-2020-172"><span class="cite-bracket">&#91;</span>172<span class="cite-bracket">&#93;</span></a></sup> The winning model of the Deepfake Detection Challenge was 65% accurate on the holdout set of 4,000 videos.<sup id="cite_ref-173" class="reference"><a href="#cite_note-173"><span class="cite-bracket">&#91;</span>173<span class="cite-bracket">&#93;</span></a></sup> A team at Massachusetts Institute of Technology published a paper in December 2021 demonstrating that ordinary humans are 69-72% accurate at identifying a random sample of 50 of these videos.<sup id="cite_ref-174" class="reference"><a href="#cite_note-174"><span class="cite-bracket">&#91;</span>174<span class="cite-bracket">&#93;</span></a></sup> </p><p>A team at the University of Buffalo published a paper in October 2020 outlining their technique of using reflections of light in the eyes of those depicted to spot deepfakes with a high rate of success, even without the use of an AI detection tool, at least for the time being.<sup id="cite_ref-175" class="reference"><a href="#cite_note-175"><span class="cite-bracket">&#91;</span>175<span class="cite-bracket">&#93;</span></a></sup> </p><p>In the case of well-documented individuals such as political leaders, algorithms have been developed to distinguish identity-based features such as patterns of facial, gestural, and vocal mannerisms and detect deep-fake impersonators.<sup id="cite_ref-Boháček_176-0" class="reference"><a href="#cite_note-Boháček-176"><span class="cite-bracket">&#91;</span>176<span class="cite-bracket">&#93;</span></a></sup> </p><p>Another team led by Wael AbdAlmageed with Visual Intelligence and Multimedia Analytics Laboratory (VIMAL) of the <a href="/wiki/Information_Sciences_Institute" title="Information Sciences Institute">Information Sciences Institute</a> at the <a href="/wiki/University_of_Southern_California" title="University of Southern California">University Of Southern California</a> developed two generations <sup id="cite_ref-Scholar-search-2022a_177-0" class="reference"><a href="#cite_note-Scholar-search-2022a-177"><span class="cite-bracket">&#91;</span>177<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Scholar-search-2022b_178-0" class="reference"><a href="#cite_note-Scholar-search-2022b-178"><span class="cite-bracket">&#91;</span>178<span class="cite-bracket">&#93;</span></a></sup> of deepfake detectors based on <a href="/wiki/Convolutional_neural_network" title="Convolutional neural network">convolutional neural networks</a>. The first generation <sup id="cite_ref-Scholar-search-2022a_177-1" class="reference"><a href="#cite_note-Scholar-search-2022a-177"><span class="cite-bracket">&#91;</span>177<span class="cite-bracket">&#93;</span></a></sup> used <a href="/wiki/Recurrent_neural_network" title="Recurrent neural network">recurrent neural networks</a> to spot spatio-temporal inconsistencies to identify visual artifacts left by the deepfake generation process. The algorithm achieved 96% accuracy on FaceForensics++, the only large-scale deepfake benchmark available at that time. The second generation <sup id="cite_ref-Scholar-search-2022b_178-1" class="reference"><a href="#cite_note-Scholar-search-2022b-178"><span class="cite-bracket">&#91;</span>178<span class="cite-bracket">&#93;</span></a></sup> used end-to-end deep networks to differentiate between artifacts and high-level semantic facial information using two-branch networks. The first branch propagates colour information while the other branch suppresses facial content and amplifies low-level frequencies using <a href="/wiki/Blob_detection#The_Laplacian_of_Gaussian" title="Blob detection">Laplacian of Gaussian (LoG)</a>. Further, they included a new loss function that learns a compact representation of bona fide faces, while dispersing the representations (i.e. features) of deepfakes. VIMAL's approach showed state-of-the-art performance on FaceForensics++ and Celeb-DF benchmarks, and on <a href="#Volodymyr_Zelenskyy">March 16, 2022</a> (the same day of the release), was used to identify the deepfake of Volodymyr Zelensky out-of-the-box without any retraining or knowledge of the algorithm with which the deepfake was created. <sup class="noprint Inline-Template Template-Fact" style="white-space:nowrap;">&#91;<i><a href="/wiki/Wikipedia:Citation_needed" title="Wikipedia:Citation needed"><span title="This claim needs references to reliable sources. (September 2023)">citation needed</span></a></i>&#93;</sup> </p><p>Other techniques suggest that <a href="/wiki/Blockchain" title="Blockchain">blockchain</a> could be used to verify the source of the media.<sup id="cite_ref-Wired-2019b_179-0" class="reference"><a href="#cite_note-Wired-2019b-179"><span class="cite-bracket">&#91;</span>179<span class="cite-bracket">&#93;</span></a></sup> For instance, a video might have to be verified through the ledger before it is shown on social media platforms.<sup id="cite_ref-Wired-2019b_179-1" class="reference"><a href="#cite_note-Wired-2019b-179"><span class="cite-bracket">&#91;</span>179<span class="cite-bracket">&#93;</span></a></sup> With this technology, only videos from trusted sources would be approved, decreasing the spread of possibly harmful deepfake media.<sup id="cite_ref-Wired-2019b_179-2" class="reference"><a href="#cite_note-Wired-2019b-179"><span class="cite-bracket">&#91;</span>179<span class="cite-bracket">&#93;</span></a></sup> </p><p>Digitally signing of all video and imagery by cameras and video cameras, including smartphone cameras, was suggested to fight deepfakes.<sup id="cite_ref-Leetaru-2021_180-0" class="reference"><a href="#cite_note-Leetaru-2021-180"><span class="cite-bracket">&#91;</span>180<span class="cite-bracket">&#93;</span></a></sup> That allows tracing every photograph or video back to its original owner that can be used to pursue dissidents.<sup id="cite_ref-Leetaru-2021_180-1" class="reference"><a href="#cite_note-Leetaru-2021-180"><span class="cite-bracket">&#91;</span>180<span class="cite-bracket">&#93;</span></a></sup> </p><p>One easy way to uncover deepfake video calls consists in asking the caller to turn sideways.<sup id="cite_ref-181" class="reference"><a href="#cite_note-181"><span class="cite-bracket">&#91;</span>181<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Prevention">Prevention</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=27" title="Edit section: Prevention"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Henry Ajder who works for Deeptrace, a company that detects deepfakes, says there are several ways to protect against deepfakes in the workplace. Semantic passwords or secret questions can be used when holding important conversations. Voice authentication and other <a href="/wiki/Biometric_device" title="Biometric device">biometric security features</a> should be up to date. Educate employees about deepfakes.<sup id="cite_ref-Deepfakes_explained_117-3" class="reference"><a href="#cite_note-Deepfakes_explained-117"><span class="cite-bracket">&#91;</span>117<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Controversies">Controversies</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=28" title="Edit section: Controversies"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>In March 2024, a video clip was shown from the <a href="/wiki/Buckingham_Palace" title="Buckingham Palace">Buckingham Palace</a>, where <a href="/wiki/Kate_Middleton" class="mw-redirect" title="Kate Middleton">Kate Middleton</a> had cancer and she was undergoing chemotherapy. However, the clip fuelled rumours that the woman in that clip was an AI deepfake.<sup id="cite_ref-182" class="reference"><a href="#cite_note-182"><span class="cite-bracket">&#91;</span>182<span class="cite-bracket">&#93;</span></a></sup> UCLA's race director Johnathan Perkins doubted she had cancer, and further speculated that she could be in critical condition or dead. <sup id="cite_ref-183" class="reference"><a href="#cite_note-183"><span class="cite-bracket">&#91;</span>183<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Example_events">Example events</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=29" title="Edit section: Example events"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <figure class="mw-default-size" typeof="mw:File/Thumb"><a href="/wiki/File:Trump%E2%80%99s_arrest_(2).jpg" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/f/f9/Trump%E2%80%99s_arrest_%282%29.jpg/220px-Trump%E2%80%99s_arrest_%282%29.jpg" decoding="async" width="220" height="220" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/f/f9/Trump%E2%80%99s_arrest_%282%29.jpg/330px-Trump%E2%80%99s_arrest_%282%29.jpg 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/f/f9/Trump%E2%80%99s_arrest_%282%29.jpg/440px-Trump%E2%80%99s_arrest_%282%29.jpg 2x" data-file-width="1024" data-file-height="1024" /></a><figcaption>A fake <a href="/wiki/Midjourney" title="Midjourney">Midjourney</a>-created image of Donald Trump being arrested<sup id="cite_ref-AP_News_184-0" class="reference"><a href="#cite_note-AP_News-184"><span class="cite-bracket">&#91;</span>184<span class="cite-bracket">&#93;</span></a></sup></figcaption></figure> <figure class="mw-default-size" typeof="mw:File/Thumb"><span><video id="mwe_player_1" poster="//upload.wikimedia.org/wikipedia/commons/thumb/a/af/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm/220px--Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm.jpg" controls="" preload="none" data-mw-tmh="" class="mw-file-element" width="220" height="124" data-durationhint="34" data-mwtitle="Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm" data-mwprovider="wikimediacommons" resource="/wiki/File:Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm"><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/a/af/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm.480p.vp9.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-transcodekey="480p.vp9.webm" data-width="854" data-height="480" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/a/af/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm.720p.vp9.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-transcodekey="720p.vp9.webm" data-width="1280" data-height="720" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/a/af/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm.1080p.vp9.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-transcodekey="1080p.vp9.webm" data-width="1920" data-height="1080" /><source src="//upload.wikimedia.org/wikipedia/commons/a/af/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-width="1920" data-height="1080" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/a/af/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm.240p.vp9.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-transcodekey="240p.vp9.webm" data-width="426" data-height="240" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/a/af/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm.360p.vp9.webm" type="video/webm; codecs=&quot;vp9, opus&quot;" data-transcodekey="360p.vp9.webm" data-width="640" data-height="360" /><source src="//upload.wikimedia.org/wikipedia/commons/transcoded/a/af/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm/Vladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm.360p.webm" type="video/webm; codecs=&quot;vp8, vorbis&quot;" data-transcodekey="360p.webm" data-width="640" data-height="360" /><track src="https://commons.wikimedia.org/w/api.php?action=timedtext&amp;title=File%3AVladimir_Putin_warning_Americans_on_election_interference_and_increasing_political_divide.webm&amp;lang=en&amp;trackformat=vtt&amp;origin=%2A" kind="subtitles" type="text/vtt" srclang="en" label="English ‪(en)‬" data-dir="ltr" /></video></span><figcaption>Deepfake video: Vladimir Putin warning Americans on election interference and increasing political divide</figcaption></figure> <figure class="mw-default-size" typeof="mw:File/Thumb"><a href="/wiki/File:Pope_Francis_in_puffy_winter_jacket.jpg" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/9/98/Pope_Francis_in_puffy_winter_jacket.jpg/170px-Pope_Francis_in_puffy_winter_jacket.jpg" decoding="async" width="170" height="208" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/9/98/Pope_Francis_in_puffy_winter_jacket.jpg/255px-Pope_Francis_in_puffy_winter_jacket.jpg 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/9/98/Pope_Francis_in_puffy_winter_jacket.jpg/340px-Pope_Francis_in_puffy_winter_jacket.jpg 2x" data-file-width="1080" data-file-height="1323" /></a><figcaption>The fake Midjourney-created image of Pope Francis wearing a puffer jacket</figcaption></figure> <dl><dt>Barack Obama</dt> <dd>On April 17, 2018, American actor <a href="/wiki/Jordan_Peele" title="Jordan Peele">Jordan Peele</a>, <a href="/wiki/BuzzFeed" title="BuzzFeed">BuzzFeed</a>, and <a href="/wiki/Monkeypaw_Productions" title="Monkeypaw Productions">Monkeypaw Productions</a> posted a deepfake of Barack Obama to YouTube, which depicted Barack Obama cursing and calling Donald Trump names.<sup id="cite_ref-185" class="reference"><a href="#cite_note-185"><span class="cite-bracket">&#91;</span>185<span class="cite-bracket">&#93;</span></a></sup> In this deepfake, Peele's voice and face were transformed and manipulated into those of Obama. The intent of this video was to portray the dangerous consequences and power of deepfakes, and how deepfakes can make anyone say anything.</dd> <dt>Donald Trump</dt> <dd>On May 5, 2019, Derpfakes posted a deepfake of <a href="/wiki/Donald_Trump" title="Donald Trump">Donald Trump</a> to YouTube, based on a skit <a href="/wiki/Jimmy_Fallon" title="Jimmy Fallon">Jimmy Fallon</a> performed on <i><a href="/wiki/The_Tonight_Show_Starring_Jimmy_Fallon" title="The Tonight Show Starring Jimmy Fallon">The Tonight Show</a></i>.<sup id="cite_ref-Parkin-2019_186-0" class="reference"><a href="#cite_note-Parkin-2019-186"><span class="cite-bracket">&#91;</span>186<span class="cite-bracket">&#93;</span></a></sup> In the original skit (aired May 4, 2016), Jimmy Fallon dressed as Donald Trump and pretended to participate in a phone call with Barack Obama, conversing in a manner that presented him to be bragging about his primary win in Indiana.<sup id="cite_ref-Parkin-2019_186-1" class="reference"><a href="#cite_note-Parkin-2019-186"><span class="cite-bracket">&#91;</span>186<span class="cite-bracket">&#93;</span></a></sup> In the deepfake, Jimmy Fallon's face was transformed into Donald Trump's face, with the audio remaining the same. This deepfake video was produced by Derpfakes with a comedic intent. In March 2023, a series of images appeared to show New York Police Department officers restraining Trump.<sup id="cite_ref-187" class="reference"><a href="#cite_note-187"><span class="cite-bracket">&#91;</span>187<span class="cite-bracket">&#93;</span></a></sup> The images, created using <a href="/wiki/Midjourney" title="Midjourney">Midjourney</a>, were initially posted on <a href="/wiki/Twitter" title="Twitter">Twitter</a> by <a href="/wiki/Eliot_Higgins" title="Eliot Higgins">Eliot Higgins</a> but were later re-shared without context, leading some viewers to believe they were real photographs.<sup id="cite_ref-AP_News_184-1" class="reference"><a href="#cite_note-AP_News-184"><span class="cite-bracket">&#91;</span>184<span class="cite-bracket">&#93;</span></a></sup></dd> <dt>Nancy Pelosi</dt> <dd>In 2019, a clip from <a href="/wiki/Nancy_Pelosi" title="Nancy Pelosi">Nancy Pelosi</a>'s speech at the <a href="/wiki/Center_for_American_Progress" title="Center for American Progress">Center for American Progress</a> (given on May 22, 2019) in which the video was slowed down, in addition to the pitch of the audio being altered, to make it seem as if she were drunk, was widely distributed on social media. Critics argue that this was not a deepfake, but a <span class="nowrap">shallowfake<span style="visibility:hidden; color:transparent; padding-left:2px">&#8205;</span>—<span style="visibility:hidden; color:transparent; padding-left:2px">&#8205;</span></span>a less sophisticated form of video manipulation.<sup id="cite_ref-188" class="reference"><a href="#cite_note-188"><span class="cite-bracket">&#91;</span>188<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-189" class="reference"><a href="#cite_note-189"><span class="cite-bracket">&#91;</span>189<span class="cite-bracket">&#93;</span></a></sup></dd> <dt>Mark Zuckerberg</dt> <dd>In May 2019, two artists collaborating with the company CannyAI created a deepfake video of Facebook founder <a href="/wiki/Mark_Zuckerberg" title="Mark Zuckerberg">Mark Zuckerberg</a> talking about harvesting and controlling data from billions of people. The video was part of an exhibit to educate the public about the dangers of <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">artificial intelligence</a>.<sup id="cite_ref-190" class="reference"><a href="#cite_note-190"><span class="cite-bracket">&#91;</span>190<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-191" class="reference"><a href="#cite_note-191"><span class="cite-bracket">&#91;</span>191<span class="cite-bracket">&#93;</span></a></sup></dd> <dt>Kim Jong-un and Vladimir Putin</dt> <dd>On September 29, 2020, deepfakes of <a href="/wiki/North_Korea" title="North Korea">North Korean</a> leader <a href="/wiki/Kim_Jong-un" class="mw-redirect" title="Kim Jong-un">Kim Jong-un</a> and <a href="/wiki/Russia" title="Russia">Russian</a> President <a href="/wiki/Vladimir_Putin" title="Vladimir Putin">Vladimir Putin</a> were uploaded to YouTube, created by a nonpartisan advocacy group <a href="/wiki/RepresentUs" title="RepresentUs">RepresentUs</a>.<sup id="cite_ref-Hao-2020_192-0" class="reference"><a href="#cite_note-Hao-2020-192"><span class="cite-bracket">&#91;</span>192<span class="cite-bracket">&#93;</span></a></sup> The deepfakes of Kim and Putin were meant to air publicly as commercials to relay the notion that interference by these leaders in US elections would be detrimental to the United States' democracy. The commercials also aimed to shock Americans to realize how fragile democracy is, and how media and news can significantly influence the country's path regardless of credibility.<sup id="cite_ref-Hao-2020_192-1" class="reference"><a href="#cite_note-Hao-2020-192"><span class="cite-bracket">&#91;</span>192<span class="cite-bracket">&#93;</span></a></sup> However, while the commercials included an ending comment detailing that the footage was not real, they ultimately did not air due to fears and sensitivity regarding how Americans may react.<sup id="cite_ref-Hao-2020_192-2" class="reference"><a href="#cite_note-Hao-2020-192"><span class="cite-bracket">&#91;</span>192<span class="cite-bracket">&#93;</span></a></sup> On June 5, 2023, an unknown source broadcast a reported deepfake of Vladimir Putin on multiple radio and television networks. In the clip, Putin appears to deliver a speech announcing the invasion of Russia and calling for a general mobilization of the army.<sup id="cite_ref-NYT1_193-0" class="reference"><a href="#cite_note-NYT1-193"><span class="cite-bracket">&#91;</span>193<span class="cite-bracket">&#93;</span></a></sup></dd> <dt>Volodymyr Zelenskyy</dt> <dd>On March 16, 2022, a one-minute long deepfake video depicting Ukraine's president <a href="/wiki/Volodymyr_Zelenskyy" title="Volodymyr Zelenskyy">Volodymyr Zelenskyy</a> seemingly telling his soldiers to lay down their arms and surrender during the <a href="/wiki/2022_Russian_invasion_of_Ukraine" class="mw-redirect" title="2022 Russian invasion of Ukraine">2022 Russian invasion of Ukraine</a> was circulated on social media.<sup id="cite_ref-Pawelec_166-1" class="reference"><a href="#cite_note-Pawelec-166"><span class="cite-bracket">&#91;</span>166<span class="cite-bracket">&#93;</span></a></sup> Russian social media boosted it, but after it was debunked, Facebook and YouTube removed it. Twitter allowed the video in tweets where it was exposed as a fake, but said it would be taken down if posted to deceive people. Hackers inserted the disinformation into a live scrolling-text news crawl on TV station Ukraine 24, and the video appeared briefly on the station's website in addition to false claims that Zelenskyy had fled his country's capital, <a href="/wiki/Kyiv" title="Kyiv">Kyiv</a>. It was not immediately clear who created the deepfake, to which Zelenskyy responded with his own video, saying, "We don't plan to lay down any arms. Until our victory."<sup id="cite_ref-194" class="reference"><a href="#cite_note-194"><span class="cite-bracket">&#91;</span>194<span class="cite-bracket">&#93;</span></a></sup></dd> <dt>Wolf News</dt> <dd>In late 2022, pro-China propagandists started spreading deepfake videos purporting to be from "Wolf News" that used synthetic actors. The technology was developed by a London company called Synthesia, which markets it as a cheap alternative to live actors for training and HR videos.<sup id="cite_ref-195" class="reference"><a href="#cite_note-195"><span class="cite-bracket">&#91;</span>195<span class="cite-bracket">&#93;</span></a></sup></dd> <dt>Pope Francis</dt> <dd>In March 2023, an anonymous construction worker from Chicago used <a href="/wiki/Midjourney" title="Midjourney">Midjourney</a> to create a fake image of <a href="/wiki/Pope_Francis" title="Pope Francis">Pope Francis</a> in a white <a href="/wiki/Balenciaga" title="Balenciaga">Balenciaga</a> <a href="/wiki/Puffer_jacket" class="mw-redirect" title="Puffer jacket">puffer jacket</a>. The image went viral, receiving over twenty million views.<sup id="cite_ref-196" class="reference"><a href="#cite_note-196"><span class="cite-bracket">&#91;</span>196<span class="cite-bracket">&#93;</span></a></sup> Writer <a href="/wiki/Ryan_Broderick" title="Ryan Broderick">Ryan Broderick</a> dubbed it "the first real mass-level AI misinformation case".<sup id="cite_ref-197" class="reference"><a href="#cite_note-197"><span class="cite-bracket">&#91;</span>197<span class="cite-bracket">&#93;</span></a></sup> Experts consulted by <i><a href="/wiki/Slate_(magazine)" title="Slate (magazine)">Slate</a></i> characterized the image as unsophisticated: "you could have made it on Photoshop five years ago".<sup id="cite_ref-198" class="reference"><a href="#cite_note-198"><span class="cite-bracket">&#91;</span>198<span class="cite-bracket">&#93;</span></a></sup></dd> <dt>Keir Starmer</dt> <dd>In October 2023, a deepfake audio clip of the UK <a href="/wiki/Labour_Party_(UK)" title="Labour Party (UK)">Labour</a> Party leader Keir Starmer abusing staffers was released on the first day of a Labour Party conference. The clip purported to be an audio tape of Starmer abusing his staffers.<sup id="cite_ref-199" class="reference"><a href="#cite_note-199"><span class="cite-bracket">&#91;</span>199<span class="cite-bracket">&#93;</span></a></sup></dd> <dt>Rashmika Mandanna</dt> <dd>In early November 2023, a famous <a href="/wiki/Cinema_of_South_India" title="Cinema of South India">South Indian actor</a>, <a href="/wiki/Rashmika_Mandanna" title="Rashmika Mandanna">Rashmika Mandanna</a> fell prey to DeepFake when a morphed video of a famous British-Indian influencer, Zara Patel, with Rashmika's face started to float on social media. Zara Patel claims to not be involved in its creation.<sup id="cite_ref-200" class="reference"><a href="#cite_note-200"><span class="cite-bracket">&#91;</span>200<span class="cite-bracket">&#93;</span></a></sup></dd> <dt>Bongbong Marcos</dt> <dd>In April 2024, a deepfake video misrepresenting Philippine President <a href="/wiki/Bongbong_Marcos" title="Bongbong Marcos">Bongbong Marcos</a> was released. It is a slideshow accompanied by a deepfake audio of Marcos purportedly ordering the <a href="/wiki/Armed_Forces_of_the_Philippines" title="Armed Forces of the Philippines">Armed Forces of the Philippines</a> and special task force to act "however appropriate" should <a href="/wiki/China" title="China">China</a> attack the Philippines. The video was released amidst tensions related to the <a href="/wiki/South_China_Sea_dispute" class="mw-redirect" title="South China Sea dispute">South China Sea dispute</a>.<sup id="cite_ref-201" class="reference"><a href="#cite_note-201"><span class="cite-bracket">&#91;</span>201<span class="cite-bracket">&#93;</span></a></sup> The <a href="/wiki/Presidential_Communications_Group" title="Presidential Communications Group">Presidential Communications Office</a> has said that there is no such directive from the president and said a foreign actor might be behind the fabricated media.<sup id="cite_ref-202" class="reference"><a href="#cite_note-202"><span class="cite-bracket">&#91;</span>202<span class="cite-bracket">&#93;</span></a></sup> Criminal charges has been filed by the <a href="/wiki/Kapisanan_ng_mga_Brodkaster_ng_Pilipinas" title="Kapisanan ng mga Brodkaster ng Pilipinas">Kapisanan ng mga Brodkaster ng Pilipinas</a> in relation to the deepfake media.<sup id="cite_ref-203" class="reference"><a href="#cite_note-203"><span class="cite-bracket">&#91;</span>203<span class="cite-bracket">&#93;</span></a></sup> On July 22, 2024, <a href="/wiki/Polvoron_video" title="Polvoron video">a video</a> of Marcos purportedly snorting illegal drugs was released by Claire Contreras, a former supporter of Marcos. Dubbed as the <a href="/wiki/Polvoron_video" title="Polvoron video"><i>polvoron</i> video</a>, the media noted its consistency with the insinuation of Marcos' predecessor—<a href="/wiki/Rodrigo_Duterte" title="Rodrigo Duterte">Rodrigo Duterte</a>—that Marcos is a drug addict; the video was also shown at a <i>Hakbang ng Maisug</i> rally organized by people aligned with Duterte.<sup id="cite_ref-204" class="reference"><a href="#cite_note-204"><span class="cite-bracket">&#91;</span>204<span class="cite-bracket">&#93;</span></a></sup> Two days later, the <a href="/wiki/Philippine_National_Police" title="Philippine National Police">Philippine National Police</a> and the <a href="/wiki/National_Bureau_of_Investigation_(Philippines)" title="National Bureau of Investigation (Philippines)">National Bureau of Investigation</a>, based on their own findings, concluded that the video was created using AI; they further pointed out inconsistencies with the person on the video with Marcos, such as details on the two people's ears.<sup id="cite_ref-205" class="reference"><a href="#cite_note-205"><span class="cite-bracket">&#91;</span>205<span class="cite-bracket">&#93;</span></a></sup></dd> <dt>Joe Biden</dt> <dd>Prior to the <a href="/wiki/2024_United_States_presidential_election" title="2024 United States presidential election">2024 United States presidential election</a>, phone calls imitating the voice of the incumbent <a href="/wiki/Joe_Biden" title="Joe Biden">Joe Biden</a> were made to dissuade people from voting for him. The person responsible for the calls was charged with <a href="/wiki/Voter_suppression" title="Voter suppression">voter suppression</a> and impersonating a candidate. The FCC proposed to fine him <a href="/wiki/US$" class="mw-redirect" title="US$">US$</a>6&#160;million and <a href="/wiki/Lingo_(VoIP_Service_operator)" title="Lingo (VoIP Service operator)">Lingo Telecom</a>, the company that allegedly relayed the calls, $2&#160;million.<sup id="cite_ref-206" class="reference"><a href="#cite_note-206"><span class="cite-bracket">&#91;</span>206<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-207" class="reference"><a href="#cite_note-207"><span class="cite-bracket">&#91;</span>207<span class="cite-bracket">&#93;</span></a></sup></dd></dl> <div class="mw-heading mw-heading2"><h2 id="Responses">Responses</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=30" title="Edit section: Responses"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <div class="mw-heading mw-heading3"><h3 id="Social_media_platforms">Social media platforms</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=31" title="Edit section: Social media platforms"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p><a href="/wiki/Twitter" title="Twitter">Twitter</a> (later <a href="/wiki/X_(Twitter)" class="mw-redirect" title="X (Twitter)">X</a>) is taking active measures to handle synthetic and manipulated media on their platform. In order to prevent disinformation from spreading, Twitter is placing a notice on tweets that contain manipulated media and/or deepfakes that signal to viewers that the media is manipulated.<sup id="cite_ref-blog.twitter-2020_208-0" class="reference"><a href="#cite_note-blog.twitter-2020-208"><span class="cite-bracket">&#91;</span>208<span class="cite-bracket">&#93;</span></a></sup> There will also be a warning that appears to users who plan on retweeting, liking, or engaging with the tweet.<sup id="cite_ref-blog.twitter-2020_208-1" class="reference"><a href="#cite_note-blog.twitter-2020-208"><span class="cite-bracket">&#91;</span>208<span class="cite-bracket">&#93;</span></a></sup> Twitter will also work to provide users a link next to the tweet containing manipulated or synthetic media that links to a Twitter Moment or credible news article on the related topic—as a debunking action.<sup id="cite_ref-blog.twitter-2020_208-2" class="reference"><a href="#cite_note-blog.twitter-2020-208"><span class="cite-bracket">&#91;</span>208<span class="cite-bracket">&#93;</span></a></sup> Twitter also has the ability to remove any tweets containing deepfakes or manipulated media that may pose a harm to users' safety.<sup id="cite_ref-blog.twitter-2020_208-3" class="reference"><a href="#cite_note-blog.twitter-2020-208"><span class="cite-bracket">&#91;</span>208<span class="cite-bracket">&#93;</span></a></sup> In order to better improve Twitter's detection of deepfakes and manipulated media, Twitter asked users who are interested in partnering with them to work on deepfake detection solutions to fill out a form.<sup id="cite_ref-209" class="reference"><a href="#cite_note-209"><span class="cite-bracket">&#91;</span>209<span class="cite-bracket">&#93;</span></a></sup> </p><p>"In August 2024, the <a href="/wiki/Secretary_of_state" title="Secretary of state">secretaries of state</a> of Minnesota, Pennsylvania, Washington, Michigan and New Mexico penned an open letter to X owner Elon Musk urging modifications to its AI chatbot <a href="/wiki/Grok_(chatbot)" title="Grok (chatbot)">Grok</a>'s new <a href="/wiki/Text-to-video" class="mw-redirect" title="Text-to-video">text-to-video</a> generator, added in August 2024, stating that it had disseminated election misinformation.<sup id="cite_ref-210" class="reference"><a href="#cite_note-210"><span class="cite-bracket">&#91;</span>210<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-211" class="reference"><a href="#cite_note-211"><span class="cite-bracket">&#91;</span>211<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-212" class="reference"><a href="#cite_note-212"><span class="cite-bracket">&#91;</span>212<span class="cite-bracket">&#93;</span></a></sup> </p><p><a href="/wiki/Facebook" title="Facebook">Facebook</a> has taken efforts towards encouraging the creation of deepfakes in order to develop state of the art deepfake detection software. Facebook was the prominent partner in hosting the Deepfake Detection Challenge (DFDC), held December 2019, to 2114 participants who generated more than 35,000 models.<sup id="cite_ref-Ferrer-2020_213-0" class="reference"><a href="#cite_note-Ferrer-2020-213"><span class="cite-bracket">&#91;</span>213<span class="cite-bracket">&#93;</span></a></sup> The top performing models with the highest detection accuracy were analyzed for similarities and differences; these findings are areas of interest in further research to improve and refine deepfake detection models.<sup id="cite_ref-Ferrer-2020_213-1" class="reference"><a href="#cite_note-Ferrer-2020-213"><span class="cite-bracket">&#91;</span>213<span class="cite-bracket">&#93;</span></a></sup> Facebook has also detailed that the platform will be taking down media generated with artificial intelligence used to alter an individual's speech.<sup id="cite_ref-Paul-2020_214-0" class="reference"><a href="#cite_note-Paul-2020-214"><span class="cite-bracket">&#91;</span>214<span class="cite-bracket">&#93;</span></a></sup> However, media that has been edited to alter the order or context of words in one's message would remain on the site but be labeled as false, since it was not generated by artificial intelligence.<sup id="cite_ref-Paul-2020_214-1" class="reference"><a href="#cite_note-Paul-2020-214"><span class="cite-bracket">&#91;</span>214<span class="cite-bracket">&#93;</span></a></sup> </p><p>On 31 January 2018, <a href="/wiki/Gfycat" title="Gfycat">Gfycat</a> began removing all deepfakes from its site.<sup id="cite_ref-Cole-2018e_215-0" class="reference"><a href="#cite_note-Cole-2018e-215"><span class="cite-bracket">&#91;</span>215<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Ghoshal-2018_216-0" class="reference"><a href="#cite_note-Ghoshal-2018-216"><span class="cite-bracket">&#91;</span>216<span class="cite-bracket">&#93;</span></a></sup> On <a href="/wiki/Reddit" title="Reddit">Reddit</a>, the r/deepfakes subreddit was banned on 7 February 2018, due to the policy violation of "involuntary pornography".<sup id="cite_ref-217" class="reference"><a href="#cite_note-217"><span class="cite-bracket">&#91;</span>217<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-218" class="reference"><a href="#cite_note-218"><span class="cite-bracket">&#91;</span>218<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-219" class="reference"><a href="#cite_note-219"><span class="cite-bracket">&#91;</span>219<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-220" class="reference"><a href="#cite_note-220"><span class="cite-bracket">&#91;</span>220<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-221" class="reference"><a href="#cite_note-221"><span class="cite-bracket">&#91;</span>221<span class="cite-bracket">&#93;</span></a></sup> In the same month, representatives from <a href="/wiki/Twitter" title="Twitter">Twitter</a> stated that they would suspend accounts suspected of posting non-consensual deepfake content.<sup id="cite_ref-Cole-2018a_222-0" class="reference"><a href="#cite_note-Cole-2018a-222"><span class="cite-bracket">&#91;</span>222<span class="cite-bracket">&#93;</span></a></sup> Chat site <a href="/wiki/Discord_(software)" class="mw-redirect" title="Discord (software)">Discord</a> has taken action against deepfakes in the past,<sup id="cite_ref-223" class="reference"><a href="#cite_note-223"><span class="cite-bracket">&#91;</span>223<span class="cite-bracket">&#93;</span></a></sup> and has taken a general stance against deepfakes.<sup id="cite_ref-Ghoshal-2018_216-1" class="reference"><a href="#cite_note-Ghoshal-2018-216"><span class="cite-bracket">&#91;</span>216<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-224" class="reference"><a href="#cite_note-224"><span class="cite-bracket">&#91;</span>224<span class="cite-bracket">&#93;</span></a></sup> In September 2018, <a href="/wiki/Google" title="Google">Google</a> added "involuntary synthetic pornographic imagery" to its ban list, allowing anyone to request the block of results showing their fake nudes.<sup id="cite_ref-Fake-porn_videos_are_being_weaponized_to_harass_and_humiliate_women:_&#39;Everybody_is_a_potential_target&#39;_225-0" class="reference"><a href="#cite_note-Fake-porn_videos_are_being_weaponized_to_harass_and_humiliate_women:_&#39;Everybody_is_a_potential_target&#39;-225"><span class="cite-bracket">&#91;</span>225<span class="cite-bracket">&#93;</span></a></sup><sup class="noprint Inline-Template" style="white-space:nowrap;">&#91;<i><a href="/wiki/Wikipedia:Manual_of_Style#Punctuation" title="Wikipedia:Manual of Style"><span title="Quotation marks may be incomplete, unbalanced or improperly applied.">check quotation syntax</span></a></i>&#93;</sup> </p><p>In February 2018, <a href="/wiki/Pornhub" title="Pornhub">Pornhub</a> said that it would ban deepfake videos on its website because it is considered "non consensual content" which violates their terms of service.<sup id="cite_ref-Cole-2018c_226-0" class="reference"><a href="#cite_note-Cole-2018c-226"><span class="cite-bracket">&#91;</span>226<span class="cite-bracket">&#93;</span></a></sup> They also stated previously to Mashable that they will take down content flagged as deepfakes.<sup id="cite_ref-227" class="reference"><a href="#cite_note-227"><span class="cite-bracket">&#91;</span>227<span class="cite-bracket">&#93;</span></a></sup> Writers from Motherboard reported that searching "deepfakes" on <a href="/wiki/Pornhub" title="Pornhub">Pornhub</a> still returned multiple recent deepfake videos.<sup id="cite_ref-Cole-2018c_226-1" class="reference"><a href="#cite_note-Cole-2018c-226"><span class="cite-bracket">&#91;</span>226<span class="cite-bracket">&#93;</span></a></sup> </p><p><a href="/wiki/Facebook" title="Facebook">Facebook</a> has previously stated that they would not remove deepfakes from their platforms.<sup id="cite_ref-Jee-2019_228-0" class="reference"><a href="#cite_note-Jee-2019-228"><span class="cite-bracket">&#91;</span>228<span class="cite-bracket">&#93;</span></a></sup> The videos will instead be flagged as fake by third-parties and then have a lessened priority in user's feeds.<sup id="cite_ref-Cole-2019b_229-0" class="reference"><a href="#cite_note-Cole-2019b-229"><span class="cite-bracket">&#91;</span>229<span class="cite-bracket">&#93;</span></a></sup> This response was prompted in June 2019 after a deepfake featuring a 2016 video of <a href="/wiki/Mark_Zuckerberg" title="Mark Zuckerberg">Mark Zuckerberg</a> circulated on Facebook and <a href="/wiki/Instagram" title="Instagram">Instagram</a>.<sup id="cite_ref-Jee-2019_228-1" class="reference"><a href="#cite_note-Jee-2019-228"><span class="cite-bracket">&#91;</span>228<span class="cite-bracket">&#93;</span></a></sup> </p><p>In May 2022, <a href="/wiki/Google" title="Google">Google</a> officially changed the terms of service for their <a href="/wiki/Project_Jupyter#Industry_adoption" title="Project Jupyter">Jupyter Notebook colabs</a>, banning the use of their colab service for the purpose of creating deepfakes.<sup id="cite_ref-230" class="reference"><a href="#cite_note-230"><span class="cite-bracket">&#91;</span>230<span class="cite-bracket">&#93;</span></a></sup> This came a few days after a VICE article had been published, claiming that "most deepfakes are non-consensual porn" and that the main use of popular deepfake software DeepFaceLab (DFL), "the most important technology powering the vast majority of this generation of deepfakes" which often was used in combination with Google colabs, would be to create non-consensual pornography, by pointing to the fact that among many other well-known examples of third-party DFL implementations such as deepfakes commissioned by <a href="/wiki/The_Walt_Disney_Company" title="The Walt Disney Company">The Walt Disney Company</a>, official music videos, and web series <i><a href="/wiki/Sassy_Justice" title="Sassy Justice">Sassy Justice</a></i> by the creators of <i><a href="/wiki/South_Park" title="South Park">South Park</a></i>, DFL's <a href="/wiki/GitHub" title="GitHub">GitHub</a> page also links to deepfake porn website <span class="nowrap"><i>Mr.<span style="visibility:hidden; color:transparent; padding-left:2px">&#8205;</span>Deepfakes</i></span> and participants of the DFL Discord server also participate on <span class="nowrap"><i>Mr.<span style="visibility:hidden; color:transparent; padding-left:2px">&#8205;</span>Deepfakes</i></span>.<sup id="cite_ref-231" class="reference"><a href="#cite_note-231"><span class="cite-bracket">&#91;</span>231<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Legislation">Legislation</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=32" title="Edit section: Legislation"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1251242444"><table class="box-Globalize plainlinks metadata ambox ambox-content ambox-globalize" role="presentation"><tbody><tr><td class="mbox-image"><div class="mbox-image-div"><span typeof="mw:File"><span><img alt="Globe icon." src="//upload.wikimedia.org/wikipedia/commons/thumb/b/bd/Ambox_globe_content.svg/48px-Ambox_globe_content.svg.png" decoding="async" width="48" height="40" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/b/bd/Ambox_globe_content.svg/73px-Ambox_globe_content.svg.png 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/b/bd/Ambox_globe_content.svg/97px-Ambox_globe_content.svg.png 2x" data-file-width="350" data-file-height="290" /></span></span></div></td><td class="mbox-text"><div class="mbox-text-span">The examples and perspective in this section <b>may not represent a <a href="/wiki/Wikipedia:WikiProject_Countering_systemic_bias" title="Wikipedia:WikiProject Countering systemic bias">worldwide view</a> of the subject</b>.<span class="hide-when-compact"> You may <a class="external text" href="https://en.wikipedia.org/w/index.php?title=Deepfake&amp;action=edit">improve this section</a>, discuss the issue on the <a href="/wiki/Talk:Deepfake" title="Talk:Deepfake">talk page</a>, or create a new section, as appropriate.</span> <span class="date-container"><i>(<span class="date">November 2021</span>)</i></span><span class="hide-when-compact"><i> (<small><a href="/wiki/Help:Maintenance_template_removal" title="Help:Maintenance template removal">Learn how and when to remove this message</a></small>)</i></span></div></td></tr></tbody></table> <p>In the United States, there have been some responses to the problems posed by deepfakes. In 2018, the Malicious Deep Fake Prohibition Act was introduced to the <a href="/wiki/United_States_Senate" title="United States Senate">US Senate</a>;<sup id="cite_ref-232" class="reference"><a href="#cite_note-232"><span class="cite-bracket">&#91;</span>232<span class="cite-bracket">&#93;</span></a></sup> in 2019, the Deepfakes Accountability Act was introduced in the <a href="/wiki/116th_United_States_Congress" title="116th United States Congress">116th United States Congress</a> by <a href="/wiki/United_States_House_of_Representatives" title="United States House of Representatives">U.S. representative</a> for <a href="/wiki/New_York%27s_9th_congressional_district" title="New York&#39;s 9th congressional district">New York's 9th congressional district</a> <a href="/wiki/Yvette_Clarke" title="Yvette Clarke">Yvette Clarke</a>.<sup id="cite_ref-Clarke-2019_233-0" class="reference"><a href="#cite_note-Clarke-2019-233"><span class="cite-bracket">&#91;</span>233<span class="cite-bracket">&#93;</span></a></sup> Several states have also introduced legislation regarding deepfakes, including Virginia,<sup id="cite_ref-234" class="reference"><a href="#cite_note-234"><span class="cite-bracket">&#91;</span>234<span class="cite-bracket">&#93;</span></a></sup> Texas, California, and New York;<sup id="cite_ref-235" class="reference"><a href="#cite_note-235"><span class="cite-bracket">&#91;</span>235<span class="cite-bracket">&#93;</span></a></sup> charges as varied as <a href="/wiki/Identity_theft" title="Identity theft">identity theft</a>, <a href="/wiki/Cyberstalking" title="Cyberstalking">cyberstalking</a>, and <a href="/wiki/Revenge_porn" title="Revenge porn">revenge porn</a> have been pursued, while more comprehensive statutes are urged.<sup id="cite_ref-Fake-porn_videos_are_being_weaponized_to_harass_and_humiliate_women:_&#39;Everybody_is_a_potential_target&#39;_225-1" class="reference"><a href="#cite_note-Fake-porn_videos_are_being_weaponized_to_harass_and_humiliate_women:_&#39;Everybody_is_a_potential_target&#39;-225"><span class="cite-bracket">&#91;</span>225<span class="cite-bracket">&#93;</span></a></sup> </p><p>Among U.S. legislative efforts, on 3 October 2019, California governor <a href="/wiki/Gavin_Newsom" title="Gavin Newsom">Gavin Newsom</a> signed into law Assembly Bills No. 602 and No. 730.<sup id="cite_ref-AB602_236-0" class="reference"><a href="#cite_note-AB602-236"><span class="cite-bracket">&#91;</span>236<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-AB730_237-0" class="reference"><a href="#cite_note-AB730-237"><span class="cite-bracket">&#91;</span>237<span class="cite-bracket">&#93;</span></a></sup> Assembly Bill No. 602 provides individuals targeted by sexually explicit deepfake content made without their consent with a cause of action against the content's creator.<sup id="cite_ref-AB602_236-1" class="reference"><a href="#cite_note-AB602-236"><span class="cite-bracket">&#91;</span>236<span class="cite-bracket">&#93;</span></a></sup> Assembly Bill No. 730 prohibits the distribution of malicious deepfake audio or visual media targeting a candidate running for public office within 60 days of their election.<sup id="cite_ref-AB730_237-1" class="reference"><a href="#cite_note-AB730-237"><span class="cite-bracket">&#91;</span>237<span class="cite-bracket">&#93;</span></a></sup> U.S. representative Yvette Clarke introduced H.R. 5586: Deepfakes Accountability Act into the <a href="/wiki/118th_United_States_Congress" title="118th United States Congress">118th United States Congress</a> on September 20, 2023 in an effort to protect national security from threats posed by deepfake technology.<sup id="cite_ref-238" class="reference"><a href="#cite_note-238"><span class="cite-bracket">&#91;</span>238<span class="cite-bracket">&#93;</span></a></sup> <a href="/wiki/United_States_House_of_Representatives" title="United States House of Representatives">U.S. representative</a> <a href="/wiki/Mar%C3%ADa_Elvira_Salazar" title="María Elvira Salazar">María Salazar</a> introduced H.R. 6943: No AI Fraud Act into the <a href="/wiki/118th_United_States_Congress" title="118th United States Congress">118th United States Congress</a> on January 10, 2024, to establish specific property rights of individual physicality, including voice.<sup id="cite_ref-239" class="reference"><a href="#cite_note-239"><span class="cite-bracket">&#91;</span>239<span class="cite-bracket">&#93;</span></a></sup> </p><p>In November 2019, China announced that deepfakes and other synthetically faked footage should bear a clear notice about their fakeness starting in 2020. Failure to comply could be considered a <a href="/wiki/Crime" title="Crime">crime</a> the <a href="/wiki/Cyberspace_Administration_of_China" title="Cyberspace Administration of China">Cyberspace Administration of China</a> stated on its website.<sup id="cite_ref-Reuters2019_240-0" class="reference"><a href="#cite_note-Reuters2019-240"><span class="cite-bracket">&#91;</span>240<span class="cite-bracket">&#93;</span></a></sup> The Chinese government seems to be reserving the right to prosecute both users and <a href="/wiki/Online_video_platform" title="Online video platform">online video platforms</a> failing to abide by the rules.<sup id="cite_ref-TheVerge2019_241-0" class="reference"><a href="#cite_note-TheVerge2019-241"><span class="cite-bracket">&#91;</span>241<span class="cite-bracket">&#93;</span></a></sup> The Cyberspace Administration of China, the <a href="/wiki/Ministry_of_Industry_and_Information_Technology" title="Ministry of Industry and Information Technology">Ministry of Industry and Information Technology</a>, and the <a href="/wiki/Ministry_of_Public_Security_(China)" title="Ministry of Public Security (China)">Ministry of Public Security</a> jointly issued the Provision on the Administration of Deep Synthesis Internet Information Service in November 2022.<sup id="cite_ref-242" class="reference"><a href="#cite_note-242"><span class="cite-bracket">&#91;</span>242<span class="cite-bracket">&#93;</span></a></sup> China's updated Deep Synthesis Provisions (Administrative Provisions on Deep Synthesis in Internet-Based Information Services) went into effect in January 2023.<sup id="cite_ref-243" class="reference"><a href="#cite_note-243"><span class="cite-bracket">&#91;</span>243<span class="cite-bracket">&#93;</span></a></sup> </p><p>In the United Kingdom, producers of deepfake material could be prosecuted for harassment, but deepfake production was not a specific crime<sup id="cite_ref-244" class="reference"><a href="#cite_note-244"><span class="cite-bracket">&#91;</span>244<span class="cite-bracket">&#93;</span></a></sup> until 2023, when the <a href="/wiki/Online_Safety_Act_2023" title="Online Safety Act 2023">Online Safety Act</a> was passed, which made deepfakes illegal; the UK plans to expand the Act's scope to criminalize deepfakes created with "intention to cause distress" in 2024.<sup id="cite_ref-245" class="reference"><a href="#cite_note-245"><span class="cite-bracket">&#91;</span>245<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-246" class="reference"><a href="#cite_note-246"><span class="cite-bracket">&#91;</span>246<span class="cite-bracket">&#93;</span></a></sup> </p><p>In Canada, in 2019, the <a href="/wiki/Communications_Security_Establishment" title="Communications Security Establishment">Communications Security Establishment</a> released a report which said that deepfakes could be used to interfere in Canadian politics, particularly to discredit politicians and influence voters.<sup id="cite_ref-247" class="reference"><a href="#cite_note-247"><span class="cite-bracket">&#91;</span>247<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-248" class="reference"><a href="#cite_note-248"><span class="cite-bracket">&#91;</span>248<span class="cite-bracket">&#93;</span></a></sup> As a result, there are multiple ways for citizens in Canada to deal with deepfakes if they are targeted by them.<sup id="cite_ref-249" class="reference"><a href="#cite_note-249"><span class="cite-bracket">&#91;</span>249<span class="cite-bracket">&#93;</span></a></sup> In February 2024, <a href="/wiki/Bill_(law)" title="Bill (law)">bill</a> C-63 was tabled in the <a href="/wiki/44th_Canadian_Parliament" title="44th Canadian Parliament">44th Canadian Parliament</a> in order to enact the <a href="/wiki/Online_Harms_Act" title="Online Harms Act">Online Harms Act</a>, which would amend Criminal Code, and other Acts. An earlier version of the Bill, C-36, was ended by the dissolution of the 43rd Canadian Parliament in September 2021.<sup id="cite_ref-250" class="reference"><a href="#cite_note-250"><span class="cite-bracket">&#91;</span>250<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-251" class="reference"><a href="#cite_note-251"><span class="cite-bracket">&#91;</span>251<span class="cite-bracket">&#93;</span></a></sup> </p><p>In India, there are no direct laws or regulation on AI or deepfakes, but there are provisions under the Indian Penal Code and Information Technology Act 2000/2008, which can be looked at for legal remedies, and the new proposed Digital India Act will have a chapter on AI and deepfakes in particular, as per the MoS Rajeev Chandrasekhar.<sup id="cite_ref-252" class="reference"><a href="#cite_note-252"><span class="cite-bracket">&#91;</span>252<span class="cite-bracket">&#93;</span></a></sup> </p><p>In Europe, the European Union's 2024 <a href="/wiki/Artificial_Intelligence_Act" title="Artificial Intelligence Act">Artificial Intelligence Act</a> (AI Act) takes a risk-based approach to regulating AI systems, including deepfakes. It establishes categories of "unacceptable risk," "high risk," "specific/limited or transparency risk", and "minimal risk" to determine the level of regulatory obligations for AI providers and users. However, the lack of clear definitions for these risk categories in the context of deepfakes creates potential challenges for effective implementation. Legal scholars have raised concerns about the classification of deepfakes intended for political misinformation or the creation of non-consensual intimate imagery. Debate exists over whether such uses should always be considered "high-risk" AI systems, which would lead to stricter regulatory requirements.<sup id="cite_ref-253" class="reference"><a href="#cite_note-253"><span class="cite-bracket">&#91;</span>253<span class="cite-bracket">&#93;</span></a></sup> </p><p>In August 2024, the Irish <a href="/wiki/Data_Protection_Commission" class="mw-redirect" title="Data Protection Commission">Data Protection Commission</a> (DPC) launched court proceedings against <a href="/wiki/X_(Twitter)" class="mw-redirect" title="X (Twitter)">X</a> for its unlawful use of the personal data of over 60 million EU/EEA users, in order to train its AI technologies, such as its <a href="/wiki/Chatbot" title="Chatbot">chatbot</a> <a href="/wiki/Grok_(chatbot)" title="Grok (chatbot)">Grok</a>.<sup id="cite_ref-254" class="reference"><a href="#cite_note-254"><span class="cite-bracket">&#91;</span>254<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Response_from_DARPA">Response from DARPA</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=33" title="Edit section: Response from DARPA"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>In 2016, the <a href="/wiki/Defense_Advanced_Research_Projects_Agency" class="mw-redirect" title="Defense Advanced Research Projects Agency">Defense Advanced Research Projects Agency</a> (DARPA) launched the Media Forensics (MediFor) program which was funded through 2020.<sup id="cite_ref-255" class="reference"><a href="#cite_note-255"><span class="cite-bracket">&#91;</span>255<span class="cite-bracket">&#93;</span></a></sup> MediFor aimed at automatically spotting digital manipulation in images and videos, including <a href="/wiki/Deepfakes" class="mw-redirect" title="Deepfakes">Deepfakes</a>.<sup id="cite_ref-256" class="reference"><a href="#cite_note-256"><span class="cite-bracket">&#91;</span>256<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Corvey-2020_257-0" class="reference"><a href="#cite_note-Corvey-2020-257"><span class="cite-bracket">&#91;</span>257<span class="cite-bracket">&#93;</span></a></sup> In the summer of 2018, MediFor held an event where individuals competed to create AI-generated videos, audio, and images as well as automated tools to detect these deepfakes.<sup id="cite_ref-258" class="reference"><a href="#cite_note-258"><span class="cite-bracket">&#91;</span>258<span class="cite-bracket">&#93;</span></a></sup> According to the MediFor program, it established a framework of three tiers of information - digital integrity, physical integrity and semantic integrity - to generate one integrity score in an effort to enable accurate detection of manipulated media.<sup id="cite_ref-259" class="reference"><a href="#cite_note-259"><span class="cite-bracket">&#91;</span>259<span class="cite-bracket">&#93;</span></a></sup> </p><p>In 2019, DARPA hosted a "proposers day" for the Semantic Forensics (SemaFor) program where researchers were driven to prevent viral spread of AI-manipulated media.<sup id="cite_ref-Corrigan-2019_260-0" class="reference"><a href="#cite_note-Corrigan-2019-260"><span class="cite-bracket">&#91;</span>260<span class="cite-bracket">&#93;</span></a></sup> DARPA and the Semantic Forensics Program were also working together to detect AI-manipulated media through efforts in training computers to utilize common sense, logical reasoning.<sup id="cite_ref-Corrigan-2019_260-1" class="reference"><a href="#cite_note-Corrigan-2019-260"><span class="cite-bracket">&#91;</span>260<span class="cite-bracket">&#93;</span></a></sup> Built on the MediFor's technologies, SemaFor's attribution algorithms infer if digital media originates from a particular organization or individual, while characterization algorithms determine whether media was generated or manipulated for malicious purposes.<sup id="cite_ref-261" class="reference"><a href="#cite_note-261"><span class="cite-bracket">&#91;</span>261<span class="cite-bracket">&#93;</span></a></sup> In March 2024, SemaFor published an analytic catalog that offers the public access to open-source resources developed under SemaFor.<sup id="cite_ref-262" class="reference"><a href="#cite_note-262"><span class="cite-bracket">&#91;</span>262<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-263" class="reference"><a href="#cite_note-263"><span class="cite-bracket">&#91;</span>263<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="International_Panel_on_the_Information_Environment">International Panel on the Information Environment</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=34" title="Edit section: International Panel on the Information Environment"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>The <a href="/wiki/International_Panel_on_the_Information_Environment" title="International Panel on the Information Environment">International Panel on the Information Environment</a> was launched in 2023 as a consortium of over 250 scientists working to develop effective countermeasures to deepfakes and other problems created by perverse incentives in organizations disseminating information via the Internet.<sup id="cite_ref-264" class="reference"><a href="#cite_note-264"><span class="cite-bracket">&#91;</span>264<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="In_popular_culture">In popular culture</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=35" title="Edit section: In popular culture"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1251242444"><table class="box-In_popular_culture plainlinks metadata ambox ambox-content" role="presentation"><tbody><tr><td class="mbox-image"><div class="mbox-image-div"><span typeof="mw:File"><span><img alt="" src="//upload.wikimedia.org/wikipedia/en/thumb/b/b4/Ambox_important.svg/40px-Ambox_important.svg.png" decoding="async" width="40" height="40" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/en/thumb/b/b4/Ambox_important.svg/60px-Ambox_important.svg.png 1.5x, //upload.wikimedia.org/wikipedia/en/thumb/b/b4/Ambox_important.svg/80px-Ambox_important.svg.png 2x" data-file-width="40" data-file-height="40" /></span></span></div></td><td class="mbox-text"><div class="mbox-text-span">This article <b>may contain <a href="/wiki/Wikipedia:What_Wikipedia_is_not#Wikipedia_is_not_an_indiscriminate_collection_of_information" title="Wikipedia:What Wikipedia is not">irrelevant</a> references to <a href="/wiki/Wikipedia:Manual_of_Style/Trivia_sections#&quot;In_popular_culture&quot;_and_&quot;Cultural_references&quot;_material" title="Wikipedia:Manual of Style/Trivia sections">popular culture</a></b>.<span class="hide-when-compact"> Please help Wikipedia to <a class="external text" href="https://en.wikipedia.org/w/index.php?title=Deepfake&amp;action=edit">improve this article</a> by removing the content or adding <a href="/wiki/Wikipedia:Citing_sources" title="Wikipedia:Citing sources">citations</a> to <a href="/wiki/Wikipedia:Reliable_sources" title="Wikipedia:Reliable sources">reliable</a> and <a href="/wiki/Wikipedia:Independent_sources" title="Wikipedia:Independent sources">independent sources</a>.</span> <span class="date-container"><i>(<span class="date">November 2024</span>)</i></span></div></td></tr></tbody></table> <ul><li>The 1986 mid-December issue of <i><a href="/wiki/Analog_Science_Fiction_and_Fact" title="Analog Science Fiction and Fact">Analog</a></i> magazine published the novelette "Picaper" by Jack Wodhams. Its plot revolves around digitally enhanced or digitally generated videos produced by skilled hackers serving unscrupulous lawyers and political figures.<sup id="cite_ref-1986-picaper_265-0" class="reference"><a href="#cite_note-1986-picaper-265"><span class="cite-bracket">&#91;</span>265<span class="cite-bracket">&#93;</span></a></sup></li> <li>The 1987 film <i><a href="/wiki/The_Running_Man_(1987_film)" title="The Running Man (1987 film)">The Running Man</a></i> starring <a href="/wiki/Arnold_Schwarzenegger" title="Arnold Schwarzenegger">Arnold Schwarzenegger</a> depicts an autocratic government using computers to digitally replace the faces of actors with those of wanted fugitives to make it appear the fugitives had been neutralized.</li> <li>In the 1992 techno-thriller <i><a href="/wiki/A_Philosophical_Investigation" title="A Philosophical Investigation">A Philosophical Investigation</a></i> by <a href="/wiki/Philip_Kerr" title="Philip Kerr">Philip Kerr</a>, "Wittgenstein", the main character and a serial killer, makes use of both a software similar to deepfake and a virtual reality suit for having sex with an avatar of Isadora "Jake" Jakowicz, the female police lieutenant assigned to catch him.<sup id="cite_ref-1992-API_266-0" class="reference"><a href="#cite_note-1992-API-266"><span class="cite-bracket">&#91;</span>266<span class="cite-bracket">&#93;</span></a></sup></li> <li>The 1993 film <i><a href="/wiki/Rising_Sun_(film)" title="Rising Sun (film)">Rising Sun</a></i> starring <a href="/wiki/Sean_Connery" title="Sean Connery">Sean Connery</a> and <a href="/wiki/Wesley_Snipes" title="Wesley Snipes">Wesley Snipes</a> depicts another character, Jingo Asakuma, who reveals that a computer disc has digitally altered personal identities to implicate a competitor.</li> <li>Deepfake technology is part of the plot of the 2019 <a href="/wiki/BBC_One" title="BBC One">BBC One</a> TV series <i><a href="/wiki/The_Capture_(TV_series)" title="The Capture (TV series)">The Capture</a></i>. The first series follows former <a href="/wiki/British_Army" title="British Army">British Army</a> sergeant Shaun Emery, who is accused of assaulting and abducting his barrister. Expertly doctored <a href="/wiki/CCTV" class="mw-redirect" title="CCTV">CCTV</a> footage is revealed to have framed him and mislead the police investigating the case.<sup id="cite_ref-267" class="reference"><a href="#cite_note-267"><span class="cite-bracket">&#91;</span>267<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-268" class="reference"><a href="#cite_note-268"><span class="cite-bracket">&#91;</span>268<span class="cite-bracket">&#93;</span></a></sup> The second series follows politician Isaac Turner who discovers that another deepfake is tarnishing his reputation until the "correction" is eventually exposed to the public.</li> <li>In June 2020, <a href="/wiki/YouTube" title="YouTube">YouTube</a> deepfake artist Shamook created a deepfake of the 1994 film <i><a href="/wiki/Forrest_Gump" title="Forrest Gump">Forrest Gump</a></i> by replacing the face of beloved actor <a href="/wiki/Tom_Hanks" title="Tom Hanks">Tom Hanks</a> with <a href="/wiki/John_Travolta" title="John Travolta">John Travolta</a>'s. He created this piece using 6,000 high-quality still images of John Travolta's face from several of his films released around the same time as <i>Forrest Gump</i>. Shamook, then, created a 180 degree facial profile that he fed into a machine learning piece of software (DeepFaceLabs), along with Tom Hanks' face from <i>Forrest Gump</i>.<sup id="cite_ref-269" class="reference"><a href="#cite_note-269"><span class="cite-bracket">&#91;</span>269<span class="cite-bracket">&#93;</span></a></sup> The humor and irony of this deepfake traces back to 2007 when John Travolta revealed he turned down the chance to play the lead role in <i>Forrest Gump</i> because he had said yes to <i><a href="/wiki/Pulp_Fiction" title="Pulp Fiction">Pulp Fiction</a></i> instead.<sup id="cite_ref-270" class="reference"><a href="#cite_note-270"><span class="cite-bracket">&#91;</span>270<span class="cite-bracket">&#93;</span></a></sup></li> <li><i>Al Davis vs. the NFL</i>: The narrative structure of this 2021 documentary, part of <a href="/wiki/ESPN" title="ESPN">ESPN</a>'s <i><a href="/wiki/30_for_30" title="30 for 30">30 for 30</a></i> documentary series, uses deepfake versions of the film's two central characters, both deceased—<a href="/wiki/Al_Davis" title="Al Davis">Al Davis</a>, who owned the <a href="/wiki/Las_Vegas_Raiders" title="Las Vegas Raiders">Las Vegas Raiders</a> during the team's tenure in <a href="/wiki/Oakland_Raiders" title="Oakland Raiders">Oakland</a> and <a href="/wiki/Los_Angeles_Raiders" title="Los Angeles Raiders">Los Angeles</a>, and <a href="/wiki/Pete_Rozelle" title="Pete Rozelle">Pete Rozelle</a>, the <a href="/wiki/National_Football_League" title="National Football League">NFL</a> commissioner who frequently clashed with Davis.<sup id="cite_ref-271" class="reference"><a href="#cite_note-271"><span class="cite-bracket">&#91;</span>271<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-272" class="reference"><a href="#cite_note-272"><span class="cite-bracket">&#91;</span>272<span class="cite-bracket">&#93;</span></a></sup></li> <li>Deepfake technology is featured in "Impawster Syndrome", the 57th episode of the Canadian police series <i><a href="/wiki/Hudson_%26_Rex" title="Hudson &amp; Rex">Hudson &amp; Rex</a></i>, first broadcast on 6 January 2022, in which a member of the St. John's police team is investigated on suspicion of robbery and assault due to doctored CCTV footage using his likeness.<sup id="cite_ref-273" class="reference"><a href="#cite_note-273"><span class="cite-bracket">&#91;</span>273<span class="cite-bracket">&#93;</span></a></sup></li> <li>Using deepfake technology in his music video for his 2022 single, "<a href="/wiki/The_Heart_Part_5" title="The Heart Part 5">The Heart Part 5</a>", musician <a href="/wiki/Kendrick_Lamar" title="Kendrick Lamar">Kendrick Lamar</a> transformed into figures resembling <a href="/wiki/Nipsey_Hussle" title="Nipsey Hussle">Nipsey Hussle</a>, <a href="/wiki/O._J._Simpson" title="O. J. Simpson">O.J. Simpson</a>, and <a href="/wiki/Kanye_West" title="Kanye West">Kanye West</a>, among others.<sup id="cite_ref-Wood-2022_274-0" class="reference"><a href="#cite_note-Wood-2022-274"><span class="cite-bracket">&#91;</span>274<span class="cite-bracket">&#93;</span></a></sup> The deepfake technology in the video was created by Deep Voodoo, a studio led by <a href="/wiki/Trey_Parker" title="Trey Parker">Trey Parker</a> and <a href="/wiki/Matt_Stone" title="Matt Stone">Matt Stone</a>, who created <i><a href="/wiki/South_Park" title="South Park">South Park</a></i>.<sup id="cite_ref-Wood-2022_274-1" class="reference"><a href="#cite_note-Wood-2022-274"><span class="cite-bracket">&#91;</span>274<span class="cite-bracket">&#93;</span></a></sup></li> <li><a href="/wiki/Aloe_Blacc" title="Aloe Blacc">Aloe Blacc</a> honored his long-time collaborator <a href="/wiki/Avicii" title="Avicii">Avicii</a> four years after his death by performing their song "<a href="/wiki/Wake_Me_Up_(Avicii_song)" title="Wake Me Up (Avicii song)">Wake Me Up</a>"<sup id="cite_ref-275" class="reference"><a href="#cite_note-275"><span class="cite-bracket">&#91;</span>275<span class="cite-bracket">&#93;</span></a></sup> in <a href="/wiki/English_language" title="English language">English</a>, <a href="/wiki/Spanish_language" title="Spanish language">Spanish</a>, and <a href="/wiki/Mandarin_Chinese" title="Mandarin Chinese">Mandarin</a>, using deepfake technologies.<sup id="cite_ref-276" class="reference"><a href="#cite_note-276"><span class="cite-bracket">&#91;</span>276<span class="cite-bracket">&#93;</span></a></sup></li> <li>In January 2023, <a href="/wiki/ITVX" title="ITVX">ITVX</a> released the series <i>Deep Fake Neighbour Wars</i>, in which various celebrities were played by actors experiencing inane conflicts, the celebrity's face deepfaked onto them.<sup id="cite_ref-277" class="reference"><a href="#cite_note-277"><span class="cite-bracket">&#91;</span>277<span class="cite-bracket">&#93;</span></a></sup></li> <li>In October 2023, <a href="/wiki/Tom_Hanks" title="Tom Hanks">Tom Hanks</a> shared a photo of an apparent deepfake likeness depicting him promoting "some dental plan" to his <a href="/wiki/Instagram" title="Instagram">Instagram</a> page. Hanks warned his fans, "BEWARE . . . I have nothing to do with it."<sup id="cite_ref-:2_98-2" class="reference"><a href="#cite_note-:2-98"><span class="cite-bracket">&#91;</span>98<span class="cite-bracket">&#93;</span></a></sup></li></ul> <div class="mw-heading mw-heading2"><h2 id="See_also">See also</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=36" title="Edit section: See also"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1184024115">.mw-parser-output .div-col{margin-top:0.3em;column-width:30em}.mw-parser-output .div-col-small{font-size:90%}.mw-parser-output .div-col-rules{column-rule:1px solid #aaa}.mw-parser-output .div-col dl,.mw-parser-output .div-col ol,.mw-parser-output .div-col ul{margin-top:0}.mw-parser-output .div-col li,.mw-parser-output .div-col dd{page-break-inside:avoid;break-inside:avoid-column}</style><div class="div-col"> <ul><li><a href="/wiki/15.ai" title="15.ai">15.ai</a></li> <li><a href="/wiki/Artificial_intelligence_and_elections" title="Artificial intelligence and elections">Artificial intelligence and elections</a></li> <li><a href="/wiki/Artificial_intelligence_art" title="Artificial intelligence art">Artificial intelligence art</a></li> <li><a href="/wiki/Computer_facial_animation" title="Computer facial animation">Computer facial animation</a></li> <li><a href="/wiki/Dead_Internet_theory" title="Dead Internet theory">Dead Internet theory</a></li> <li><a href="/wiki/Digital_cloning" title="Digital cloning">Digital cloning</a></li> <li><a href="/wiki/Digital_face_replacement" title="Digital face replacement">Digital face replacement</a></li> <li><a href="/wiki/Facial_motion_capture" title="Facial motion capture">Facial motion capture</a></li> <li><a href="/wiki/Fake_nude_photography" title="Fake nude photography">Fake nude photography</a></li> <li><a href="/wiki/Fifth-generation_warfare" title="Fifth-generation warfare">Fifth-generation warfare</a></li> <li><a href="/wiki/Generative_artificial_intelligence" title="Generative artificial intelligence">Generative artificial intelligence</a></li> <li><a href="/wiki/Hyperreality" title="Hyperreality">Hyperreality</a></li> <li><a href="/wiki/Identity_replacement_technology" title="Identity replacement technology">Identity replacement technology</a></li> <li><a href="/wiki/Interactive_online_characters" class="mw-redirect" title="Interactive online characters">Interactive online characters</a></li> <li><a href="/wiki/Regulation_of_artificial_intelligence" title="Regulation of artificial intelligence">Regulation of artificial intelligence</a></li> <li><a href="/wiki/StyleGAN" title="StyleGAN">StyleGAN</a></li> <li><a href="/wiki/Synthetic_media" title="Synthetic media">Synthetic media</a></li> <li><a href="/wiki/Uncanny_valley" title="Uncanny valley">Uncanny valley</a></li> <li><a href="/wiki/Virtual_actor" title="Virtual actor">Virtual actor</a></li></ul> </div> <div class="mw-heading mw-heading2"><h2 id="References">References</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=37" title="Edit section: References"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1239543626">.mw-parser-output .reflist{margin-bottom:0.5em;list-style-type:decimal}@media screen{.mw-parser-output .reflist{font-size:90%}}.mw-parser-output .reflist .references{font-size:100%;margin-bottom:0;list-style-type:inherit}.mw-parser-output .reflist-columns-2{column-width:30em}.mw-parser-output .reflist-columns-3{column-width:25em}.mw-parser-output .reflist-columns{margin-top:0.3em}.mw-parser-output .reflist-columns ol{margin-top:0}.mw-parser-output .reflist-columns li{page-break-inside:avoid;break-inside:avoid-column}.mw-parser-output .reflist-upper-alpha{list-style-type:upper-alpha}.mw-parser-output .reflist-upper-roman{list-style-type:upper-roman}.mw-parser-output .reflist-lower-alpha{list-style-type:lower-alpha}.mw-parser-output .reflist-lower-greek{list-style-type:lower-greek}.mw-parser-output .reflist-lower-roman{list-style-type:lower-roman}</style><div class="reflist"> <div class="mw-references-wrap mw-references-columns"><ol class="references"> <li id="cite_note-FoxNews2018-1"><span class="mw-cite-backlink"><b><a href="#cite_ref-FoxNews2018_1-0">^</a></b></span> <span class="reference-text"><style data-mw-deduplicate="TemplateStyles:r1238218222">.mw-parser-output cite.citation{font-style:inherit;word-wrap:break-word}.mw-parser-output .citation q{quotes:"\"""\"""'""'"}.mw-parser-output .citation:target{background-color:rgba(0,127,255,0.133)}.mw-parser-output .id-lock-free.id-lock-free a{background:url("//upload.wikimedia.org/wikipedia/commons/6/65/Lock-green.svg")right 0.1em center/9px no-repeat}.mw-parser-output .id-lock-limited.id-lock-limited a,.mw-parser-output .id-lock-registration.id-lock-registration a{background:url("//upload.wikimedia.org/wikipedia/commons/d/d6/Lock-gray-alt-2.svg")right 0.1em center/9px no-repeat}.mw-parser-output .id-lock-subscription.id-lock-subscription a{background:url("//upload.wikimedia.org/wikipedia/commons/a/aa/Lock-red-alt-2.svg")right 0.1em center/9px no-repeat}.mw-parser-output .cs1-ws-icon a{background:url("//upload.wikimedia.org/wikipedia/commons/4/4c/Wikisource-logo.svg")right 0.1em center/12px no-repeat}body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-free a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-limited a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-registration a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-subscription a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .cs1-ws-icon a{background-size:contain;padding:0 1em 0 0}.mw-parser-output .cs1-code{color:inherit;background:inherit;border:none;padding:inherit}.mw-parser-output .cs1-hidden-error{display:none;color:var(--color-error,#d33)}.mw-parser-output .cs1-visible-error{color:var(--color-error,#d33)}.mw-parser-output .cs1-maint{display:none;color:#085;margin-left:0.3em}.mw-parser-output .cs1-kern-left{padding-left:0.2em}.mw-parser-output .cs1-kern-right{padding-right:0.2em}.mw-parser-output .citation .mw-selflink{font-weight:inherit}@media screen{.mw-parser-output .cs1-format{font-size:95%}html.skin-theme-clientpref-night .mw-parser-output .cs1-maint{color:#18911f}}@media screen and (prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .cs1-maint{color:#18911f}}</style><cite id="CITEREFBrandon2018" class="citation news cs1">Brandon, John (16 February 2018). <a rel="nofollow" class="external text" href="https://www.foxnews.com/tech/terrifying-high-tech-porn-creepy-deepfake-videos-are-on-the-rise/">"Terrifying high-tech porn: Creepy 'deepfake' videos are on the rise"</a>. <i>Fox News</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20180615160819/http://www.foxnews.com/tech/2018/02/16/terrifying-high-tech-porn-creepy-deepfake-videos-are-on-rise.html">Archived</a> from the original on 15 June 2018<span class="reference-accessdate">. Retrieved <span class="nowrap">20 February</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Fox+News&amp;rft.atitle=Terrifying+high-tech+porn%3A+Creepy+%27deepfake%27+videos+are+on+the+rise&amp;rft.date=2018-02-16&amp;rft.aulast=Brandon&amp;rft.aufirst=John&amp;rft_id=https%3A%2F%2Fwww.foxnews.com%2Ftech%2Fterrifying-high-tech-porn-creepy-deepfake-videos-are-on-the-rise%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-2"><span class="mw-cite-backlink"><b><a href="#cite_ref-2">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKalpokasKalpokiene2022" class="citation book cs1">Kalpokas, Ignas; Kalpokiene, Julija (2022). <a rel="nofollow" class="external text" href="https://link.springer.com/book/10.1007/978-3-030-93802-4"><i>Deepfakes</i></a>. Springer Cham. pp.&#160;1–2. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2F978-3-030-93802-4">10.1007/978-3-030-93802-4</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-3-030-93801-7" title="Special:BookSources/978-3-030-93801-7"><bdi>978-3-030-93801-7</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Deepfakes&amp;rft.pages=1-2&amp;rft.pub=Springer+Cham&amp;rft.date=2022&amp;rft_id=info%3Adoi%2F10.1007%2F978-3-030-93802-4&amp;rft.isbn=978-3-030-93801-7&amp;rft.aulast=Kalpokas&amp;rft.aufirst=Ignas&amp;rft.au=Kalpokiene%2C+Julija&amp;rft_id=https%3A%2F%2Flink.springer.com%2Fbook%2F10.1007%2F978-3-030-93802-4&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-3"><span class="mw-cite-backlink"><b><a href="#cite_ref-3">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFJuefei-XuWangHuangGuo2022" class="citation journal cs1">Juefei-Xu, Felix; Wang, Run; Huang, Yihao; Guo, Qing; Ma, Lei; Liu, Yang (1 July 2022). <a rel="nofollow" class="external text" href="https://doi.org/10.1007/s11263-022-01606-8">"Countering Malicious DeepFakes: Survey, Battleground, and Horizon"</a>. <i>International Journal of Computer Vision</i>. <b>130</b> (7): 1678–1734. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs11263-022-01606-8">10.1007/s11263-022-01606-8</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1573-1405">1573-1405</a>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a>&#160;<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC9066404">9066404</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a>&#160;<a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/35528632">35528632</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610170121/https://link.springer.com/article/10.1007/s11263-022-01606-8">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">15 July</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=International+Journal+of+Computer+Vision&amp;rft.atitle=Countering+Malicious+DeepFakes%3A+Survey%2C+Battleground%2C+and+Horizon&amp;rft.volume=130&amp;rft.issue=7&amp;rft.pages=1678-1734&amp;rft.date=2022-07-01&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC9066404%23id-name%3DPMC&amp;rft.issn=1573-1405&amp;rft_id=info%3Apmid%2F35528632&amp;rft_id=info%3Adoi%2F10.1007%2Fs11263-022-01606-8&amp;rft.aulast=Juefei-Xu&amp;rft.aufirst=Felix&amp;rft.au=Wang%2C+Run&amp;rft.au=Huang%2C+Yihao&amp;rft.au=Guo%2C+Qing&amp;rft.au=Ma%2C+Lei&amp;rft.au=Liu%2C+Yang&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1007%2Fs11263-022-01606-8&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Kietzmann-2020-4"><span class="mw-cite-backlink">^ <a href="#cite_ref-Kietzmann-2020_4-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Kietzmann-2020_4-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKietzmannLeeMcCarthyKietzmann2020" class="citation journal cs1">Kietzmann, J.; Lee, L. W.; McCarthy, I. P.; Kietzmann, T. C. (2020). <a rel="nofollow" class="external text" href="https://irep.ntu.ac.uk/id/eprint/38737/1/1247050_Lee.pdf">"Deepfakes: Trick or treat?"</a> <span class="cs1-format">(PDF)</span>. <i>Business Horizons</i>. <b>63</b> (2): 135–146. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.bushor.2019.11.006">10.1016/j.bushor.2019.11.006</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:213818098">213818098</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20221229113358/https://irep.ntu.ac.uk/id/eprint/38737/1/1247050_Lee.pdf">Archived</a> <span class="cs1-format">(PDF)</span> from the original on 29 December 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">30 December</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Business+Horizons&amp;rft.atitle=Deepfakes%3A+Trick+or+treat%3F&amp;rft.volume=63&amp;rft.issue=2&amp;rft.pages=135-146&amp;rft.date=2020&amp;rft_id=info%3Adoi%2F10.1016%2Fj.bushor.2019.11.006&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A213818098%23id-name%3DS2CID&amp;rft.aulast=Kietzmann&amp;rft.aufirst=J.&amp;rft.au=Lee%2C+L.+W.&amp;rft.au=McCarthy%2C+I.+P.&amp;rft.au=Kietzmann%2C+T.+C.&amp;rft_id=https%3A%2F%2Firep.ntu.ac.uk%2Fid%2Feprint%2F38737%2F1%2F1247050_Lee.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Waldrop-5"><span class="mw-cite-backlink"><b><a href="#cite_ref-Waldrop_5-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWaldrop2020" class="citation journal cs1">Waldrop, M. Mitchell (16 March 2020). <a rel="nofollow" class="external text" href="https://knowablemagazine.org/article/technology/2020/synthetic-media-real-trouble-deepfakes">"Synthetic media: The real trouble with deepfakes"</a>. <i>Knowable Magazine</i>. Annual Reviews. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1146%2Fknowable-031320-1">10.1146/knowable-031320-1</a></span>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20221119155941/https://knowablemagazine.org/article/technology/2020/synthetic-media-real-trouble-deepfakes">Archived</a> from the original on 19 November 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">19 December</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Knowable+Magazine&amp;rft.atitle=Synthetic+media%3A+The+real+trouble+with+deepfakes&amp;rft.date=2020-03-16&amp;rft_id=info%3Adoi%2F10.1146%2Fknowable-031320-1&amp;rft.aulast=Waldrop&amp;rft.aufirst=M.+Mitchell&amp;rft_id=https%3A%2F%2Fknowablemagazine.org%2Farticle%2Ftechnology%2F2020%2Fsynthetic-media-real-trouble-deepfakes&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Schwartz-6"><span class="mw-cite-backlink"><b><a href="#cite_ref-Schwartz_6-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSchwartz2018" class="citation news cs1">Schwartz, Oscar (12 November 2018). <a rel="nofollow" class="external text" href="https://www.theguardian.com/technology/2018/nov/12/deep-fakes-fake-news-truth">"You thought fake news was bad? Deep fakes are where truth goes to die"</a>. <i>The Guardian</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190616230351/https://www.theguardian.com/technology/2018/nov/12/deep-fakes-fake-news-truth">Archived</a> from the original on 16 June 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">14 November</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Guardian&amp;rft.atitle=You+thought+fake+news+was+bad%3F+Deep+fakes+are+where+truth+goes+to+die&amp;rft.date=2018-11-12&amp;rft.aulast=Schwartz&amp;rft.aufirst=Oscar&amp;rft_id=https%3A%2F%2Fwww.theguardian.com%2Ftechnology%2F2018%2Fnov%2F12%2Fdeep-fakes-fake-news-truth&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Farid-7"><span class="mw-cite-backlink"><b><a href="#cite_ref-Farid_7-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFarid2019" class="citation journal cs1">Farid, Hany (15 September 2019). <a rel="nofollow" class="external text" href="https://www.annualreviews.org/doi/full/10.1146/annurev-vision-091718-014827">"Image Forensics"</a>. <i>Annual Review of Vision Science</i>. <b>5</b> (1): 549–573. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1146%2Fannurev-vision-091718-014827">10.1146/annurev-vision-091718-014827</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2374-4642">2374-4642</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a>&#160;<a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/31525144">31525144</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:263558880">263558880</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610171713/https://www.annualreviews.org/content/journals/10.1146/annurev-vision-091718-014827">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">20 September</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Annual+Review+of+Vision+Science&amp;rft.atitle=Image+Forensics&amp;rft.volume=5&amp;rft.issue=1&amp;rft.pages=549-573&amp;rft.date=2019-09-15&amp;rft.issn=2374-4642&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A263558880%23id-name%3DS2CID&amp;rft_id=info%3Apmid%2F31525144&amp;rft_id=info%3Adoi%2F10.1146%2Fannurev-vision-091718-014827&amp;rft.aulast=Farid&amp;rft.aufirst=Hany&amp;rft_id=https%3A%2F%2Fwww.annualreviews.org%2Fdoi%2Ffull%2F10.1146%2Fannurev-vision-091718-014827&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-HighSnobiety2018-8"><span class="mw-cite-backlink"><b><a href="#cite_ref-HighSnobiety2018_8-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBanks2018" class="citation news cs1">Banks, Alec (20 February 2018). <a rel="nofollow" class="external text" href="https://www.highsnobiety.com/p/what-are-deepfakes-ai-porn/">"What Are Deepfakes &amp; Why the Future of Porn is Terrifying"</a>. <i>Highsnobiety</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210714032914/https://www.highsnobiety.com/p/what-are-deepfakes-ai-porn/">Archived</a> from the original on 14 July 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">20 February</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Highsnobiety&amp;rft.atitle=What+Are+Deepfakes+%26+Why+the+Future+of+Porn+is+Terrifying&amp;rft.date=2018-02-20&amp;rft.aulast=Banks&amp;rft.aufirst=Alec&amp;rft_id=https%3A%2F%2Fwww.highsnobiety.com%2Fp%2Fwhat-are-deepfakes-ai-porn%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-9"><span class="mw-cite-backlink"><b><a href="#cite_ref-9">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFChristian" class="citation web cs1">Christian, Jon. <a rel="nofollow" class="external text" href="https://theoutline.com/post/3179/deepfake-videos-are-freaking-experts-out">"Experts fear face swapping tech could start an international showdown"</a>. <i>The Outline</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200116140157/https://theoutline.com/post/3179/deepfake-videos-are-freaking-experts-out">Archived</a> from the original on 16 January 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">28 February</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Outline&amp;rft.atitle=Experts+fear+face+swapping+tech+could+start+an+international+showdown&amp;rft.aulast=Christian&amp;rft.aufirst=Jon&amp;rft_id=https%3A%2F%2Ftheoutline.com%2Fpost%2F3179%2Fdeepfake-videos-are-freaking-experts-out&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-10"><span class="mw-cite-backlink"><b><a href="#cite_ref-10">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRoose2018" class="citation news cs1">Roose, Kevin (4 March 2018). <span class="id-lock-limited" title="Free access subject to limited trial, subscription normally required"><a rel="nofollow" class="external text" href="https://www.nytimes.com/2018/03/04/technology/fake-videos-deepfakes.html">"Here Come the Fake Videos, Too"</a></span>. <i>The New York Times</i>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0362-4331">0362-4331</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190618203019/https://www.nytimes.com/2018/03/04/technology/fake-videos-deepfakes.html">Archived</a> from the original on 18 June 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">24 March</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+New+York+Times&amp;rft.atitle=Here+Come+the+Fake+Videos%2C+Too&amp;rft.date=2018-03-04&amp;rft.issn=0362-4331&amp;rft.aulast=Roose&amp;rft.aufirst=Kevin&amp;rft_id=https%3A%2F%2Fwww.nytimes.com%2F2018%2F03%2F04%2Ftechnology%2Ffake-videos-deepfakes.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-11"><span class="mw-cite-backlink"><b><a href="#cite_ref-11">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSchreyerSattarovReimerBorth2019" class="citation arxiv cs1">Schreyer, Marco; Sattarov, Timur; Reimer, Bernd; Borth, Damian (October 2019). "Adversarial Learning of Deepfakes in Accounting". <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/1910.03810">1910.03810</a></span> [<a rel="nofollow" class="external text" href="https://arxiv.org/archive/cs.LG">cs.LG</a>].</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=preprint&amp;rft.jtitle=arXiv&amp;rft.atitle=Adversarial+Learning+of+Deepfakes+in+Accounting&amp;rft.date=2019-10&amp;rft_id=info%3Aarxiv%2F1910.03810&amp;rft.aulast=Schreyer&amp;rft.aufirst=Marco&amp;rft.au=Sattarov%2C+Timur&amp;rft.au=Reimer%2C+Bernd&amp;rft.au=Borth%2C+Damian&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-12"><span class="mw-cite-backlink"><b><a href="#cite_ref-12">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCaramancion2021" class="citation book cs1">Caramancion, Kevin Matthe (21 April 2021). <a rel="nofollow" class="external text" href="https://dx.doi.org/10.1109/iemtronics52119.2021.9422597">"The Demographic Profile Most at Risk of being Disinformed"</a>. <i>2021 IEEE International IOT, Electronics and Mechatronics Conference (IEMTRONICS)</i>. IEEE. pp.&#160;1–7. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2Fiemtronics52119.2021.9422597">10.1109/iemtronics52119.2021.9422597</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-1-6654-4067-7" title="Special:BookSources/978-1-6654-4067-7"><bdi>978-1-6654-4067-7</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:234499888">234499888</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610170119/https://ieeexplore.ieee.org/document/9422597/">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">9 June</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=The+Demographic+Profile+Most+at+Risk+of+being+Disinformed&amp;rft.btitle=2021+IEEE+International+IOT%2C+Electronics+and+Mechatronics+Conference+%28IEMTRONICS%29&amp;rft.pages=1-7&amp;rft.pub=IEEE&amp;rft.date=2021-04-21&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A234499888%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1109%2Fiemtronics52119.2021.9422597&amp;rft.isbn=978-1-6654-4067-7&amp;rft.aulast=Caramancion&amp;rft.aufirst=Kevin+Matthe&amp;rft_id=http%3A%2F%2Fdx.doi.org%2F10.1109%2Fiemtronics52119.2021.9422597&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-13"><span class="mw-cite-backlink"><b><a href="#cite_ref-13">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLallaMitraniHarned" class="citation web cs1">Lalla, Vejay; Mitrani, Adine; Harned, Zach. <a rel="nofollow" class="external text" href="https://www.wipo.int/wipo_magazine/en/2022/02/article_0003.html">"Artificial Intelligence: Deepfakes in the Entertainment Industry"</a>. <i><a href="/wiki/World_Intellectual_Property_Organization" title="World Intellectual Property Organization">World Intellectual Property Organization</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20221108121107/https://www.wipo.int/wipo_magazine/en/2022/02/article_0003.html">Archived</a> from the original on 8 November 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=World+Intellectual+Property+Organization&amp;rft.atitle=Artificial+Intelligence%3A+Deepfakes+in+the+Entertainment+Industry&amp;rft.aulast=Lalla&amp;rft.aufirst=Vejay&amp;rft.au=Mitrani%2C+Adine&amp;rft.au=Harned%2C+Zach&amp;rft_id=https%3A%2F%2Fwww.wipo.int%2Fwipo_magazine%2Fen%2F2022%2F02%2Farticle_0003.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Harwell-2019-14"><span class="mw-cite-backlink">^ <a href="#cite_ref-Harwell-2019_14-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Harwell-2019_14-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Harwell-2019_14-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHarwell2019" class="citation news cs1">Harwell, Drew (12 June 2019). <span class="id-lock-subscription" title="Paid subscription required"><a rel="nofollow" class="external text" href="https://www.washingtonpost.com/technology/2019/06/12/top-ai-researchers-race-detect-deepfake-videos-we-are-outgunned/">"Top AI researchers race to detect 'deepfake' videos: 'We are outgunned'<span class="cs1-kern-right"></span>"</a></span>. <i>The Washington Post</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191031051258/https://www.washingtonpost.com/technology/2019/06/12/top-ai-researchers-race-detect-deepfake-videos-we-are-outgunned/">Archived</a> from the original on 31 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Washington+Post&amp;rft.atitle=Top+AI+researchers+race+to+detect+%27deepfake%27+videos%3A+%27We+are+outgunned%27&amp;rft.date=2019-06-12&amp;rft.aulast=Harwell&amp;rft.aufirst=Drew&amp;rft_id=https%3A%2F%2Fwww.washingtonpost.com%2Ftechnology%2F2019%2F06%2F12%2Ftop-ai-researchers-race-detect-deepfake-videos-we-are-outgunned%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-15"><span class="mw-cite-backlink"><b><a href="#cite_ref-15">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSanchez2018" class="citation web cs1">Sanchez, Julian (8 February 2018). <a rel="nofollow" class="external text" href="https://www.nbcnews.com/think/opinion/thanks-ai-future-fake-news-may-be-easily-faked-video-ncna845726">"Thanks to AI, the future of 'fake news' is being pioneered in homemade porn"</a>. <i>NBC News</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191109084341/https://www.nbcnews.com/think/opinion/thanks-ai-future-fake-news-may-be-easily-faked-video-ncna845726">Archived</a> from the original on 9 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=NBC+News&amp;rft.atitle=Thanks+to+AI%2C+the+future+of+%27fake+news%27+is+being+pioneered+in+homemade+porn&amp;rft.date=2018-02-08&amp;rft.aulast=Sanchez&amp;rft.aufirst=Julian&amp;rft_id=https%3A%2F%2Fwww.nbcnews.com%2Fthink%2Fopinion%2Fthanks-ai-future-fake-news-may-be-easily-faked-video-ncna845726&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Porter-2019-16"><span class="mw-cite-backlink">^ <a href="#cite_ref-Porter-2019_16-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Porter-2019_16-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Porter-2019_16-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-Porter-2019_16-3"><sup><i><b>d</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPorter2019" class="citation web cs1">Porter, Jon (2 September 2019). <a rel="nofollow" class="external text" href="https://www.theverge.com/2019/9/2/20844338/zao-deepfake-app-movie-tv-show-face-replace-privacy-policy-concerns">"Another convincing deepfake app goes viral prompting immediate privacy backlash"</a>. <i>The Verge</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190903202859/https://www.theverge.com/2019/9/2/20844338/zao-deepfake-app-movie-tv-show-face-replace-privacy-policy-concerns">Archived</a> from the original on 3 September 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Verge&amp;rft.atitle=Another+convincing+deepfake+app+goes+viral+prompting+immediate+privacy+backlash&amp;rft.date=2019-09-02&amp;rft.aulast=Porter&amp;rft.aufirst=Jon&amp;rft_id=https%3A%2F%2Fwww.theverge.com%2F2019%2F9%2F2%2F20844338%2Fzao-deepfake-app-movie-tv-show-face-replace-privacy-policy-concerns&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-17"><span class="mw-cite-backlink"><b><a href="#cite_ref-17">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBodeLeesGolding2021" class="citation journal cs1">Bode, Lisa; Lees, Dominic; Golding, Dan (29 July 2021). <a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F13548565211034044">"The Digital Face and Deepfakes on Screen"</a>. <i><a href="/wiki/Convergence_(journal)" title="Convergence (journal)">Convergence: The International Journal of Research into New Media Technologies</a></i>. <b>27</b> (4): 849–854. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F13548565211034044">10.1177/13548565211034044</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1354-8565">1354-8565</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:237402465">237402465</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Convergence%3A+The+International+Journal+of+Research+into+New+Media+Technologies&amp;rft.atitle=The+Digital+Face+and+Deepfakes+on+Screen&amp;rft.volume=27&amp;rft.issue=4&amp;rft.pages=849-854&amp;rft.date=2021-07-29&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A237402465%23id-name%3DS2CID&amp;rft.issn=1354-8565&amp;rft_id=info%3Adoi%2F10.1177%2F13548565211034044&amp;rft.aulast=Bode&amp;rft.aufirst=Lisa&amp;rft.au=Lees%2C+Dominic&amp;rft.au=Golding%2C+Dan&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1177%252F13548565211034044&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Holliday-2021-18"><span class="mw-cite-backlink">^ <a href="#cite_ref-Holliday-2021_18-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Holliday-2021_18-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHolliday2021" class="citation journal cs1">Holliday, Christopher (26 July 2021). <a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F13548565211029412">"Rewriting the stars: Surface tensions and gender troubles in the online media production of digital deepfakes"</a>. <i>Convergence: The International Journal of Research into New Media Technologies</i>. <b>27</b> (4): 899–918. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F13548565211029412">10.1177/13548565211029412</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1354-8565">1354-8565</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:237402548">237402548</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Convergence%3A+The+International+Journal+of+Research+into+New+Media+Technologies&amp;rft.atitle=Rewriting+the+stars%3A+Surface+tensions+and+gender+troubles+in+the+online+media+production+of+digital+deepfakes&amp;rft.volume=27&amp;rft.issue=4&amp;rft.pages=899-918&amp;rft.date=2021-07-26&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A237402548%23id-name%3DS2CID&amp;rft.issn=1354-8565&amp;rft_id=info%3Adoi%2F10.1177%2F13548565211029412&amp;rft.aulast=Holliday&amp;rft.aufirst=Christopher&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1177%252F13548565211029412&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-19"><span class="mw-cite-backlink"><b><a href="#cite_ref-19">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGingrich2021" class="citation journal cs1">Gingrich, Oliver M. (5 July 2021). <a rel="nofollow" class="external text" href="https://doi.org/10.14236%2Fewic%2FEVA2021.25">"GENDER*UCK: Reframing gender &amp; media art"</a>. <i>Proceedings of EVA London 2021 (EVA 2021)</i>. Electronic Workshops in Computing. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.14236%2Fewic%2FEVA2021.25">10.14236/ewic/EVA2021.25</a></span>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:236918199">236918199</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Proceedings+of+EVA+London+2021+%28EVA+2021%29&amp;rft.atitle=GENDER%2AUCK%3A+Reframing+gender+%26+media+art&amp;rft.date=2021-07-05&amp;rft_id=info%3Adoi%2F10.14236%2Fewic%2FEVA2021.25&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A236918199%23id-name%3DS2CID&amp;rft.aulast=Gingrich&amp;rft.aufirst=Oliver+M.&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.14236%252Fewic%252FEVA2021.25&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-20"><span class="mw-cite-backlink"><b><a href="#cite_ref-20">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFletcher2018" class="citation journal cs1">Fletcher, John (2018). <a rel="nofollow" class="external text" href="https://muse.jhu.edu/article/715916">"Deepfakes, Artificial Intelligence, and Some Kind of Dystopia: The New Faces of Online Post-Fact Performance"</a>. <i>Theatre Journal</i>. <b>70</b> (4): 455–471. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1353%2Ftj.2018.0097">10.1353/tj.2018.0097</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1086-332X">1086-332X</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:191988083">191988083</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Theatre+Journal&amp;rft.atitle=Deepfakes%2C+Artificial+Intelligence%2C+and+Some+Kind+of+Dystopia%3A+The+New+Faces+of+Online+Post-Fact+Performance&amp;rft.volume=70&amp;rft.issue=4&amp;rft.pages=455-471&amp;rft.date=2018&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A191988083%23id-name%3DS2CID&amp;rft.issn=1086-332X&amp;rft_id=info%3Adoi%2F10.1353%2Ftj.2018.0097&amp;rft.aulast=Fletcher&amp;rft.aufirst=John&amp;rft_id=https%3A%2F%2Fmuse.jhu.edu%2Farticle%2F715916&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-21"><span class="mw-cite-backlink"><b><a href="#cite_ref-21">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFÖhman2020" class="citation journal cs1">Öhman, Carl (1 June 2020). <a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs10676-019-09522-1">"Introducing the pervert's dilemma: a contribution to the critique of Deepfake Pornography"</a>. <i>Ethics and Information Technology</i>. <b>22</b> (2): 133–140. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs10676-019-09522-1">10.1007/s10676-019-09522-1</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1572-8439">1572-8439</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:208145457">208145457</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Ethics+and+Information+Technology&amp;rft.atitle=Introducing+the+pervert%27s+dilemma%3A+a+contribution+to+the+critique+of+Deepfake+Pornography&amp;rft.volume=22&amp;rft.issue=2&amp;rft.pages=133-140&amp;rft.date=2020-06-01&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A208145457%23id-name%3DS2CID&amp;rft.issn=1572-8439&amp;rft_id=info%3Adoi%2F10.1007%2Fs10676-019-09522-1&amp;rft.aulast=%C3%96hman&amp;rft.aufirst=Carl&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1007%252Fs10676-019-09522-1&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-22"><span class="mw-cite-backlink"><b><a href="#cite_ref-22">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFvan_der_Nagel2020" class="citation journal cs1">van der Nagel, Emily (1 October 2020). <a rel="nofollow" class="external text" href="https://www.tandfonline.com/doi/full/10.1080/23268743.2020.1741434">"Verifying images: deepfakes, control, and consent"</a>. <i>Porn Studies</i>. <b>7</b> (4): 424–429. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F23268743.2020.1741434">10.1080/23268743.2020.1741434</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2326-8743">2326-8743</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:242891792">242891792</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610171621/https://www.tandfonline.com/doi/full/10.1080/23268743.2020.1741434">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">9 February</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Porn+Studies&amp;rft.atitle=Verifying+images%3A+deepfakes%2C+control%2C+and+consent&amp;rft.volume=7&amp;rft.issue=4&amp;rft.pages=424-429&amp;rft.date=2020-10-01&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A242891792%23id-name%3DS2CID&amp;rft.issn=2326-8743&amp;rft_id=info%3Adoi%2F10.1080%2F23268743.2020.1741434&amp;rft.aulast=van+der+Nagel&amp;rft.aufirst=Emily&amp;rft_id=https%3A%2F%2Fwww.tandfonline.com%2Fdoi%2Ffull%2F10.1080%2F23268743.2020.1741434&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-23"><span class="mw-cite-backlink"><b><a href="#cite_ref-23">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFallis2021" class="citation journal cs1">Fallis, Don (1 December 2021). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7406872">"The Epistemic Threat of Deepfakes"</a>. <i>Philosophy &amp; Technology</i>. <b>34</b> (4): 623–643. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs13347-020-00419-2">10.1007/s13347-020-00419-2</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2210-5433">2210-5433</a>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a>&#160;<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7406872">7406872</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a>&#160;<a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/32837868">32837868</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Philosophy+%26+Technology&amp;rft.atitle=The+Epistemic+Threat+of+Deepfakes&amp;rft.volume=34&amp;rft.issue=4&amp;rft.pages=623-643&amp;rft.date=2021-12-01&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC7406872%23id-name%3DPMC&amp;rft.issn=2210-5433&amp;rft_id=info%3Apmid%2F32837868&amp;rft_id=info%3Adoi%2F10.1007%2Fs13347-020-00419-2&amp;rft.aulast=Fallis&amp;rft.aufirst=Don&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC7406872&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-24"><span class="mw-cite-backlink"><b><a href="#cite_ref-24">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFChesneyCitron2018" class="citation journal cs1">Chesney, Robert; Citron, Danielle Keats (2018). <a rel="nofollow" class="external text" href="https://www.ssrn.com/abstract=3213954">"Deep Fakes: A Looming Challenge for Privacy, Democracy, and National Security"</a>. <i>SSRN Electronic Journal</i>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.2139%2Fssrn.3213954">10.2139/ssrn.3213954</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1556-5068">1556-5068</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191221152854/https://www.ssrn.com/abstract=3213954">Archived</a> from the original on 21 December 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 February</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=SSRN+Electronic+Journal&amp;rft.atitle=Deep+Fakes%3A+A+Looming+Challenge+for+Privacy%2C+Democracy%2C+and+National+Security&amp;rft.date=2018&amp;rft_id=info%3Adoi%2F10.2139%2Fssrn.3213954&amp;rft.issn=1556-5068&amp;rft.aulast=Chesney&amp;rft.aufirst=Robert&amp;rft.au=Citron%2C+Danielle+Keats&amp;rft_id=https%3A%2F%2Fwww.ssrn.com%2Fabstract%3D3213954&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-25"><span class="mw-cite-backlink"><b><a href="#cite_ref-25">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFYadlin-SegalOppenheim2021" class="citation journal cs1">Yadlin-Segal, Aya; Oppenheim, Yael (February 2021). <a rel="nofollow" class="external text" href="http://journals.sagepub.com/doi/10.1177/1354856520923963">"Whose dystopia is it anyway? Deepfakes and social media regulation"</a>. <i>Convergence: The International Journal of Research into New Media Technologies</i>. <b>27</b> (1): 36–51. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F1354856520923963">10.1177/1354856520923963</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1354-8565">1354-8565</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:219438536">219438536</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220209174153/https://journals.sagepub.com/doi/10.1177/1354856520923963">Archived</a> from the original on 9 February 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">9 February</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Convergence%3A+The+International+Journal+of+Research+into+New+Media+Technologies&amp;rft.atitle=Whose+dystopia+is+it+anyway%3F+Deepfakes+and+social+media+regulation&amp;rft.volume=27&amp;rft.issue=1&amp;rft.pages=36-51&amp;rft.date=2021-02&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A219438536%23id-name%3DS2CID&amp;rft.issn=1354-8565&amp;rft_id=info%3Adoi%2F10.1177%2F1354856520923963&amp;rft.aulast=Yadlin-Segal&amp;rft.aufirst=Aya&amp;rft.au=Oppenheim%2C+Yael&amp;rft_id=http%3A%2F%2Fjournals.sagepub.com%2Fdoi%2F10.1177%2F1354856520923963&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-26"><span class="mw-cite-backlink"><b><a href="#cite_ref-26">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHwangRyuJeong2021" class="citation journal cs1">Hwang, Yoori; Ryu, Ji Youn; Jeong, Se-Hoon (1 March 2021). <a rel="nofollow" class="external text" href="https://www.liebertpub.com/doi/10.1089/cyber.2020.0174">"Effects of Disinformation Using Deepfake: The Protective Effect of Media Literacy Education"</a>. <i>Cyberpsychology, Behavior, and Social Networking</i>. <b>24</b> (3): 188–193. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1089%2Fcyber.2020.0174">10.1089/cyber.2020.0174</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2152-2715">2152-2715</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a>&#160;<a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/33646021">33646021</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:232078561">232078561</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610170100/https://www.liebertpub.com/doi/10.1089/cyber.2020.0174">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">9 February</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Cyberpsychology%2C+Behavior%2C+and+Social+Networking&amp;rft.atitle=Effects+of+Disinformation+Using+Deepfake%3A+The+Protective+Effect+of+Media+Literacy+Education&amp;rft.volume=24&amp;rft.issue=3&amp;rft.pages=188-193&amp;rft.date=2021-03-01&amp;rft.issn=2152-2715&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A232078561%23id-name%3DS2CID&amp;rft_id=info%3Apmid%2F33646021&amp;rft_id=info%3Adoi%2F10.1089%2Fcyber.2020.0174&amp;rft.aulast=Hwang&amp;rft.aufirst=Yoori&amp;rft.au=Ryu%2C+Ji+Youn&amp;rft.au=Jeong%2C+Se-Hoon&amp;rft_id=https%3A%2F%2Fwww.liebertpub.com%2Fdoi%2F10.1089%2Fcyber.2020.0174&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-27"><span class="mw-cite-backlink"><b><a href="#cite_ref-27">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHight2021" class="citation journal cs1">Hight, Craig (12 November 2021). <a rel="nofollow" class="external text" href="https://www.tandfonline.com/doi/full/10.1080/10304312.2021.2003756">"Deepfakes and documentary practice in an age of misinformation"</a>. <i>Continuum</i>. <b>36</b> (3): 393–410. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F10304312.2021.2003756">10.1080/10304312.2021.2003756</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1030-4312">1030-4312</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:244092288">244092288</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220209164352/https://www.tandfonline.com/doi/full/10.1080/10304312.2021.2003756">Archived</a> from the original on 9 February 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">9 February</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Continuum&amp;rft.atitle=Deepfakes+and+documentary+practice+in+an+age+of+misinformation&amp;rft.volume=36&amp;rft.issue=3&amp;rft.pages=393-410&amp;rft.date=2021-11-12&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A244092288%23id-name%3DS2CID&amp;rft.issn=1030-4312&amp;rft_id=info%3Adoi%2F10.1080%2F10304312.2021.2003756&amp;rft.aulast=Hight&amp;rft.aufirst=Craig&amp;rft_id=https%3A%2F%2Fwww.tandfonline.com%2Fdoi%2Ffull%2F10.1080%2F10304312.2021.2003756&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-28"><span class="mw-cite-backlink"><b><a href="#cite_ref-28">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHancockBailenson2021" class="citation journal cs1">Hancock, Jeffrey T.; Bailenson, Jeremy N. (1 March 2021). <a rel="nofollow" class="external text" href="https://www.liebertpub.com/doi/10.1089/cyber.2021.29208.jth">"The Social Impact of Deepfakes"</a>. <i>Cyberpsychology, Behavior, and Social Networking</i>. <b>24</b> (3): 149–152. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1089%2Fcyber.2021.29208.jth">10.1089/cyber.2021.29208.jth</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2152-2715">2152-2715</a>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a>&#160;<a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/33760669">33760669</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:232356146">232356146</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610170102/https://www.liebertpub.com/doi/10.1089/cyber.2021.29208.jth">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">9 February</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Cyberpsychology%2C+Behavior%2C+and+Social+Networking&amp;rft.atitle=The+Social+Impact+of+Deepfakes&amp;rft.volume=24&amp;rft.issue=3&amp;rft.pages=149-152&amp;rft.date=2021-03-01&amp;rft.issn=2152-2715&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A232356146%23id-name%3DS2CID&amp;rft_id=info%3Apmid%2F33760669&amp;rft_id=info%3Adoi%2F10.1089%2Fcyber.2021.29208.jth&amp;rft.aulast=Hancock&amp;rft.aufirst=Jeffrey+T.&amp;rft.au=Bailenson%2C+Jeremy+N.&amp;rft_id=https%3A%2F%2Fwww.liebertpub.com%2Fdoi%2F10.1089%2Fcyber.2021.29208.jth&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-29"><span class="mw-cite-backlink"><b><a href="#cite_ref-29">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFde_Seta2021" class="citation journal cs1">de Seta, Gabriele (30 July 2021). <a rel="nofollow" class="external text" href="http://journals.sagepub.com/doi/10.1177/13548565211030185">"Huanlian, or changing faces: Deepfakes on Chinese digital media platforms"</a>. <i>Convergence: The International Journal of Research into New Media Technologies</i>. <b>27</b> (4): 935–953. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F13548565211030185">10.1177/13548565211030185</a>. <a href="/wiki/Hdl_(identifier)" class="mw-redirect" title="Hdl (identifier)">hdl</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://hdl.handle.net/11250%2F2833613">11250/2833613</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1354-8565">1354-8565</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:237402447">237402447</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610171616/https://journals.sagepub.com/doi/10.1177/13548565211030185">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">9 February</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Convergence%3A+The+International+Journal+of+Research+into+New+Media+Technologies&amp;rft.atitle=Huanlian%2C+or+changing+faces%3A+Deepfakes+on+Chinese+digital+media+platforms&amp;rft.volume=27&amp;rft.issue=4&amp;rft.pages=935-953&amp;rft.date=2021-07-30&amp;rft_id=info%3Ahdl%2F11250%2F2833613&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A237402447%23id-name%3DS2CID&amp;rft.issn=1354-8565&amp;rft_id=info%3Adoi%2F10.1177%2F13548565211030185&amp;rft.aulast=de+Seta&amp;rft.aufirst=Gabriele&amp;rft_id=http%3A%2F%2Fjournals.sagepub.com%2Fdoi%2F10.1177%2F13548565211030185&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Bregler-1997-30"><span class="mw-cite-backlink">^ <a href="#cite_ref-Bregler-1997_30-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Bregler-1997_30-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBreglerCovellSlaney1997" class="citation book cs1">Bregler, Christoph; Covell, Michele; Slaney, Malcolm (1997). <a rel="nofollow" class="external text" href="https://dl.acm.org/doi/10.1145/258734.258880">"Video Rewrite: Driving visual speech with audio"</a>. <i>Proceedings of the 24th annual conference on Computer graphics and interactive techniques – SIGGRAPH '97</i>. Vol.&#160;24. pp.&#160;353–360. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F258734.258880">10.1145/258734.258880</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/0897918967" title="Special:BookSources/0897918967"><bdi>0897918967</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:2341707">2341707</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610171622/https://dl.acm.org/doi/10.1145/258734.258880">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 July</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Video+Rewrite%3A+Driving+visual+speech+with+audio&amp;rft.btitle=Proceedings+of+the+24th+annual+conference+on+Computer+graphics+and+interactive+techniques+%E2%80%93+SIGGRAPH+%2797&amp;rft.pages=353-360&amp;rft.date=1997&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A2341707%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1145%2F258734.258880&amp;rft.isbn=0897918967&amp;rft.aulast=Bregler&amp;rft.aufirst=Christoph&amp;rft.au=Covell%2C+Michele&amp;rft.au=Slaney%2C+Malcolm&amp;rft_id=https%3A%2F%2Fdl.acm.org%2Fdoi%2F10.1145%2F258734.258880&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Suwajanakorn-2017-31"><span class="mw-cite-backlink">^ <a href="#cite_ref-Suwajanakorn-2017_31-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Suwajanakorn-2017_31-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Suwajanakorn-2017_31-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSuwajanakornSeitzKemelmacher-Shlizerman2017" class="citation journal cs1">Suwajanakorn, Supasorn; Seitz, Steven M.; Kemelmacher-Shlizerman, Ira (July 2017). <a rel="nofollow" class="external text" href="https://dl.acm.org/doi/10.1145/3072959.3073640">"Synthesizing Obama: Learning Lip Sync from Audio"</a>. <i>ACM Trans. Graph</i>. <b>36</b> (4): 95:1–95:13. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F3072959.3073640">10.1145/3072959.3073640</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:207586187">207586187</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200519100353/https://dl.acm.org/doi/10.1145/3072959.3073640">Archived</a> from the original on 19 May 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">10 July</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=ACM+Trans.+Graph.&amp;rft.atitle=Synthesizing+Obama%3A+Learning+Lip+Sync+from+Audio&amp;rft.volume=36&amp;rft.issue=4&amp;rft.pages=95%3A1-95%3A13&amp;rft.date=2017-07&amp;rft_id=info%3Adoi%2F10.1145%2F3072959.3073640&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A207586187%23id-name%3DS2CID&amp;rft.aulast=Suwajanakorn&amp;rft.aufirst=Supasorn&amp;rft.au=Seitz%2C+Steven+M.&amp;rft.au=Kemelmacher-Shlizerman%2C+Ira&amp;rft_id=https%3A%2F%2Fdl.acm.org%2Fdoi%2F10.1145%2F3072959.3073640&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Thies-2016-32"><span class="mw-cite-backlink">^ <a href="#cite_ref-Thies-2016_32-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Thies-2016_32-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Thies-2016_32-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFThiesZollhöferStammingerTheobalt2016" class="citation book cs1">Thies, Justus; Zollhöfer, Michael; Stamminger, Marc; Theobalt, Christian; Nießner, Matthias (June 2016). "Face2Face: Real-Time Face Capture and Reenactment of RGB Videos". <i>2016 IEEE Conference on Computer Vision and Pattern Recognition (CVPR)</i>. IEEE. pp.&#160;2387–2395. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/2007.14808">2007.14808</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FCVPR.2016.262">10.1109/CVPR.2016.262</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/9781467388511" title="Special:BookSources/9781467388511"><bdi>9781467388511</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:206593693">206593693</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Face2Face%3A+Real-Time+Face+Capture+and+Reenactment+of+RGB+Videos&amp;rft.btitle=2016+IEEE+Conference+on+Computer+Vision+and+Pattern+Recognition+%28CVPR%29&amp;rft.pages=2387-2395&amp;rft.pub=IEEE&amp;rft.date=2016-06&amp;rft_id=info%3Aarxiv%2F2007.14808&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A206593693%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1109%2FCVPR.2016.262&amp;rft.isbn=9781467388511&amp;rft.aulast=Thies&amp;rft.aufirst=Justus&amp;rft.au=Zollh%C3%B6fer%2C+Michael&amp;rft.au=Stamminger%2C+Marc&amp;rft.au=Theobalt%2C+Christian&amp;rft.au=Nie%C3%9Fner%2C+Matthias&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-The_Verge-2019-33"><span class="mw-cite-backlink"><b><a href="#cite_ref-The_Verge-2019_33-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://www.theverge.com/2018/8/26/17778792/deepfakes-video-dancing-ai-synthesis">"Deepfakes for dancing: you can now use AI to fake those dance moves you always wanted"</a>. <i>The Verge</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190517055341/https://www.theverge.com/2018/8/26/17778792/deepfakes-video-dancing-ai-synthesis">Archived</a> from the original on 17 May 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">27 August</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Verge&amp;rft.atitle=Deepfakes+for+dancing%3A+you+can+now+use+AI+to+fake+those+dance+moves+you+always+wanted&amp;rft_id=https%3A%2F%2Fwww.theverge.com%2F2018%2F8%2F26%2F17778792%2Fdeepfakes-video-dancing-ai-synthesis&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Farquhar-2018-34"><span class="mw-cite-backlink"><b><a href="#cite_ref-Farquhar-2018_34-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFarquhar2018" class="citation news cs1">Farquhar, Peter (27 August 2018). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190410050633/https://www.businessinsider.com.au/artificial-intelligence-ai-deepfake-dancing-2018-8">"An AI program will soon be here to help your deepface dancing – just don't call it deepfake"</a>. <i>Business Insider Australia</i>. Archived from <a rel="nofollow" class="external text" href="https://www.businessinsider.com.au/artificial-intelligence-ai-deepfake-dancing-2018-8">the original</a> on 10 April 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">27 August</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Business+Insider+Australia&amp;rft.atitle=An+AI+program+will+soon+be+here+to+help+your+deepface+dancing+%E2%80%93+just+don%27t+call+it+deepfake&amp;rft.date=2018-08-27&amp;rft.aulast=Farquhar&amp;rft.aufirst=Peter&amp;rft_id=https%3A%2F%2Fwww.businessinsider.com.au%2Fartificial-intelligence-ai-deepfake-dancing-2018-8&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-35"><span class="mw-cite-backlink"><b><a href="#cite_ref-35">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMirskyMahlerShelefElovici2019" class="citation book cs1">Mirsky, Yisroel; Mahler, Tom; Shelef, Ilan; Elovici, Yuval (2019). <a rel="nofollow" class="external text" href="https://www.usenix.org/conference/usenixsecurity19/presentation/mirsky"><i>CT-GAN: Malicious Tampering of 3D Medical Imagery using Deep Learning</i></a>. pp.&#160;461–478. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/1901.03597">1901.03597</a></span>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-1-939133-06-9" title="Special:BookSources/978-1-939133-06-9"><bdi>978-1-939133-06-9</bdi></a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200620075305/https://www.usenix.org/conference/usenixsecurity19/presentation/mirsky">Archived</a> from the original on 20 June 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">18 June</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=CT-GAN%3A+Malicious+Tampering+of+3D+Medical+Imagery+using+Deep+Learning&amp;rft.pages=461-478&amp;rft.date=2019&amp;rft_id=info%3Aarxiv%2F1901.03597&amp;rft.isbn=978-1-939133-06-9&amp;rft.aulast=Mirsky&amp;rft.aufirst=Yisroel&amp;rft.au=Mahler%2C+Tom&amp;rft.au=Shelef%2C+Ilan&amp;rft.au=Elovici%2C+Yuval&amp;rft_id=https%3A%2F%2Fwww.usenix.org%2Fconference%2Fusenixsecurity19%2Fpresentation%2Fmirsky&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-36"><span class="mw-cite-backlink"><b><a href="#cite_ref-36">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFO&#39;Neill2019" class="citation web cs1">O'Neill, Patrick Howell (3 April 2019). <a rel="nofollow" class="external text" href="https://gizmodo.com/researchers-demonstrate-malware-that-can-trick-doctors-1833786672">"Researchers Demonstrate Malware That Can Trick Doctors Into Misdiagnosing Cancer"</a>. <i>Gizmodo</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172118/https://gizmodo.com/researchers-demonstrate-malware-that-can-trick-doctors-1833786672">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">3 June</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Gizmodo&amp;rft.atitle=Researchers+Demonstrate+Malware+That+Can+Trick+Doctors+Into+Misdiagnosing+Cancer&amp;rft.date=2019-04-03&amp;rft.aulast=O%27Neill&amp;rft.aufirst=Patrick+Howell&amp;rft_id=https%3A%2F%2Fgizmodo.com%2Fresearchers-demonstrate-malware-that-can-trick-doctors-1833786672&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-37"><span class="mw-cite-backlink"><b><a href="#cite_ref-37">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMirskyLee2020" class="citation journal cs1">Mirsky, Yisroel; Lee, Wenke (12 May 2020). "The Creation and Detection of Deepfakes: A Survey". <i>ACM Computing Surveys</i>. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/2004.11138">2004.11138</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F3425780">10.1145/3425780</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:216080410">216080410</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=ACM+Computing+Surveys&amp;rft.atitle=The+Creation+and+Detection+of+Deepfakes%3A+A+Survey&amp;rft.date=2020-05-12&amp;rft_id=info%3Aarxiv%2F2004.11138&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A216080410%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1145%2F3425780&amp;rft.aulast=Mirsky&amp;rft.aufirst=Yisroel&amp;rft.au=Lee%2C+Wenke&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-38"><span class="mw-cite-backlink"><b><a href="#cite_ref-38">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKarnouskos2020" class="citation journal cs1">Karnouskos, Stamatis (2020). <a rel="nofollow" class="external text" href="https://papers.duckdns.org/files/2020_Deepfakes.pdf">"Artificial Intelligence in Digital Media: The Era of Deepfakes"</a> <span class="cs1-format">(PDF)</span>. <i>IEEE Transactions on Technology and Society</i>. <b>1</b> (3): 1. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1109%2FTTS.2020.3001312">10.1109/TTS.2020.3001312</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:221716206">221716206</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210714032923/https://papers.duckdns.org/files/2020_Deepfakes.pdf">Archived</a> <span class="cs1-format">(PDF)</span> from the original on 14 July 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">9 July</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=IEEE+Transactions+on+Technology+and+Society&amp;rft.atitle=Artificial+Intelligence+in+Digital+Media%3A+The+Era+of+Deepfakes&amp;rft.volume=1&amp;rft.issue=3&amp;rft.pages=1&amp;rft.date=2020&amp;rft_id=info%3Adoi%2F10.1109%2FTTS.2020.3001312&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A221716206%23id-name%3DS2CID&amp;rft.aulast=Karnouskos&amp;rft.aufirst=Stamatis&amp;rft_id=https%3A%2F%2Fpapers.duckdns.org%2Ffiles%2F2020_Deepfakes.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Cole-2018b-39"><span class="mw-cite-backlink">^ <a href="#cite_ref-Cole-2018b_39-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Cole-2018b_39-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCole2018" class="citation web cs1">Cole, Samantha (24 January 2018). <a rel="nofollow" class="external text" href="https://www.vice.com/en_us/article/bjye8a/reddit-fake-porn-app-daisy-ridley">"We Are Truly Fucked: Everyone Is Making AI-Generated Fake Porn Now"</a>. <i>Vice</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190907194524/https://www.vice.com/en_us/article/bjye8a/reddit-fake-porn-app-daisy-ridley">Archived</a> from the original on 7 September 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">4 May</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Vice&amp;rft.atitle=We+Are+Truly+Fucked%3A+Everyone+Is+Making+AI-Generated+Fake+Porn+Now&amp;rft.date=2018-01-24&amp;rft.aulast=Cole&amp;rft.aufirst=Samantha&amp;rft_id=https%3A%2F%2Fwww.vice.com%2Fen_us%2Farticle%2Fbjye8a%2Freddit-fake-porn-app-daisy-ridley&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-40"><span class="mw-cite-backlink"><b><a href="#cite_ref-40">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHaysom2018" class="citation web cs1">Haysom, Sam (31 January 2018). <a rel="nofollow" class="external text" href="https://mashable.com/2018/01/31/nicolas-cage-face-swapping-deepfakes/">"People Are Using Face-Swapping Tech to Add Nicolas Cage to Random Movies and What Is 2018"</a>. <i>Mashable</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190724221500/https://mashable.com/2018/01/31/nicolas-cage-face-swapping-deepfakes/">Archived</a> from the original on 24 July 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">4 April</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Mashable&amp;rft.atitle=People+Are+Using+Face-Swapping+Tech+to+Add+Nicolas+Cage+to+Random+Movies+and+What+Is+2018&amp;rft.date=2018-01-31&amp;rft.aulast=Haysom&amp;rft.aufirst=Sam&amp;rft_id=https%3A%2F%2Fmashable.com%2F2018%2F01%2F31%2Fnicolas-cage-face-swapping-deepfakes%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-41"><span class="mw-cite-backlink"><b><a href="#cite_ref-41">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.reddit.com/r/SFWdeepfakes/">"r/SFWdeepfakes"</a>. <i>Reddit</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190809091757/https://www.reddit.com/r/SFWdeepfakes">Archived</a> from the original on 9 August 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">12 December</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Reddit&amp;rft.atitle=r%2FSFWdeepfakes&amp;rft_id=https%3A%2F%2Fwww.reddit.com%2Fr%2FSFWdeepfakes%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Hathaway-2018-42"><span class="mw-cite-backlink"><b><a href="#cite_ref-Hathaway-2018_42-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHathaway2018" class="citation web cs1">Hathaway, Jay (8 February 2018). <a rel="nofollow" class="external text" href="https://www.dailydot.com/unclick/deepfake-sites-reddit-ban/">"Here's where 'deepfakes,' the new fake celebrity porn, went after the Reddit ban"</a>. <i>The Daily Dot</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190706092234/https://www.dailydot.com/unclick/deepfake-sites-reddit-ban/">Archived</a> from the original on 6 July 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">22 December</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Daily+Dot&amp;rft.atitle=Here%27s+where+%27deepfakes%2C%27+the+new+fake+celebrity+porn%2C+went+after+the+Reddit+ban&amp;rft.date=2018-02-08&amp;rft.aulast=Hathaway&amp;rft.aufirst=Jay&amp;rft_id=https%3A%2F%2Fwww.dailydot.com%2Funclick%2Fdeepfake-sites-reddit-ban%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-43"><span class="mw-cite-backlink"><b><a href="#cite_ref-43">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.online-tech-tips.com/computer-tips/what-is-a-deepfake-and-how-are-they-made/">"What is a Deepfake and How Are They Made?"</a>. <i>Online Tech Tips</i>. 23 May 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191108161241/https://www.online-tech-tips.com/computer-tips/what-is-a-deepfake-and-how-are-they-made/">Archived</a> from the original on 8 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Online+Tech+Tips&amp;rft.atitle=What+is+a+Deepfake+and+How+Are+They+Made%3F&amp;rft.date=2019-05-23&amp;rft_id=https%3A%2F%2Fwww.online-tech-tips.com%2Fcomputer-tips%2Fwhat-is-a-deepfake-and-how-are-they-made%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-44"><span class="mw-cite-backlink"><b><a href="#cite_ref-44">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRobertson2018" class="citation web cs1">Robertson, Adi (11 February 2018). <a rel="nofollow" class="external text" href="https://www.theverge.com/2018/2/11/16992986/fakeapp-deepfakes-ai-face-swapping">"I'm using AI to face-swap Elon Musk and Jeff Bezos, and I'm really bad at it"</a>. <i>The Verge</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20180324223908/https://www.theverge.com/2018/2/11/16992986/fakeapp-deepfakes-ai-face-swapping">Archived</a> from the original on 24 March 2018<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Verge&amp;rft.atitle=I%27m+using+AI+to+face-swap+Elon+Musk+and+Jeff+Bezos%2C+and+I%27m+really+bad+at+it&amp;rft.date=2018-02-11&amp;rft.aulast=Robertson&amp;rft.aufirst=Adi&amp;rft_id=https%3A%2F%2Fwww.theverge.com%2F2018%2F2%2F11%2F16992986%2Ffakeapp-deepfakes-ai-face-swapping&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-45"><span class="mw-cite-backlink"><b><a href="#cite_ref-45">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://deepfakesweb.com/">"Deepfakes web | The best online faceswap app"</a>. <i>Deepfakes web</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210714032902/https://deepfakesweb.com/">Archived</a> from the original on 14 July 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">21 February</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Deepfakes+web&amp;rft.atitle=Deepfakes+web+%7C+The+best+online+faceswap+app&amp;rft_id=https%3A%2F%2Fdeepfakesweb.com%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Faceswap-2019-46"><span class="mw-cite-backlink"><b><a href="#cite_ref-Faceswap-2019_46-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://faceswap.dev">"Faceswap is the leading free and Open Source multi-platform Deepfakes software"</a>. 15 October 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210531200049/https://faceswap.dev/">Archived</a> from the original on 31 May 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">14 July</span> 2021</span> &#8211; via WordPress.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Faceswap+is+the+leading+free+and+Open+Source+multi-platform+Deepfakes+software.&amp;rft.date=2019-10-15&amp;rft_id=https%3A%2F%2Ffaceswap.dev&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Github-2022-47"><span class="mw-cite-backlink"><b><a href="#cite_ref-Github-2022_47-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://github.com/iperov/DeepFaceLab">"DeepFaceLab is a tool that utilizes machine learning to replace faces in videos. Includes prebuilt ready to work standalone Windows 7,8,10 binary (look readme.md).: iperov/DeepFaceLab"</a>. 19 June 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190509223348/https://github.com/iperov/DeepFaceLab">Archived</a> from the original on 9 May 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">6 March</span> 2019</span> &#8211; via GitHub.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=DeepFaceLab+is+a+tool+that+utilizes+machine+learning+to+replace+faces+in+videos.+Includes+prebuilt+ready+to+work+standalone+Windows+7%2C8%2C10+binary+%28look+readme.md%29.%3A+iperov%2FDeepFaceLab&amp;rft.date=2019-06-19&amp;rft_id=https%3A%2F%2Fgithub.com%2Fiperov%2FDeepFaceLab&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-48"><span class="mw-cite-backlink"><b><a href="#cite_ref-48">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFChandler" class="citation web cs1">Chandler, Simon. <a rel="nofollow" class="external text" href="https://www.forbes.com/sites/simonchandler/2020/03/09/why-deepfakes-are-a-net-positive-for-humanity/">"Why Deepfakes Are A Net Positive For Humanity"</a>. <i>Forbes</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201116050152/https://www.forbes.com/sites/simonchandler/2020/03/09/why-deepfakes-are-a-net-positive-for-humanity/">Archived</a> from the original on 16 November 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">3 November</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Forbes&amp;rft.atitle=Why+Deepfakes+Are+A+Net+Positive+For+Humanity&amp;rft.aulast=Chandler&amp;rft.aufirst=Simon&amp;rft_id=https%3A%2F%2Fwww.forbes.com%2Fsites%2Fsimonchandler%2F2020%2F03%2F09%2Fwhy-deepfakes-are-a-net-positive-for-humanity%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-49"><span class="mw-cite-backlink"><b><a href="#cite_ref-49">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPangburn2019" class="citation web cs1">Pangburn, D. J. (21 September 2019). <a rel="nofollow" class="external text" href="https://www.fastcompany.com/90407145/youve-been-warned-full-body-deepfakes-are-the-next-step-in-ai-based-human-mimicry">"You've been warned: Full body deepfakes are the next step in AI-based human mimicry"</a>. <i>Fast Company</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191108161240/https://www.fastcompany.com/90407145/youve-been-warned-full-body-deepfakes-are-the-next-step-in-ai-based-human-mimicry">Archived</a> from the original on 8 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Fast+Company&amp;rft.atitle=You%27ve+been+warned%3A+Full+body+deepfakes+are+the+next+step+in+AI-based+human+mimicry&amp;rft.date=2019-09-21&amp;rft.aulast=Pangburn&amp;rft.aufirst=D.+J.&amp;rft_id=https%3A%2F%2Fwww.fastcompany.com%2F90407145%2Fyouve-been-warned-full-body-deepfakes-are-the-next-step-in-ai-based-human-mimicry&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-50"><span class="mw-cite-backlink"><b><a href="#cite_ref-50">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLyons2020" class="citation web cs1">Lyons, Kim (29 January 2020). <a rel="nofollow" class="external text" href="https://www.theverge.com/2020/1/29/21080553/ftc-deepfakes-audio-cloning-joe-rogan-phone-scams">"FTC says the tech behind audio deepfakes is getting better"</a>. <i>The Verge</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200130141130/https://www.theverge.com/2020/1/29/21080553/ftc-deepfakes-audio-cloning-joe-rogan-phone-scams">Archived</a> from the original on 30 January 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">8 February</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Verge&amp;rft.atitle=FTC+says+the+tech+behind+audio+deepfakes+is+getting+better&amp;rft.date=2020-01-29&amp;rft.aulast=Lyons&amp;rft.aufirst=Kim&amp;rft_id=https%3A%2F%2Fwww.theverge.com%2F2020%2F1%2F29%2F21080553%2Fftc-deepfakes-audio-cloning-joe-rogan-phone-scams&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-51"><span class="mw-cite-backlink"><b><a href="#cite_ref-51">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://google.github.io/tacotron/publications/speaker_adaptation/">"Audio samples from "Transfer Learning from Speaker Verification to Multispeaker Text-To-Speech Synthesis"<span class="cs1-kern-right"></span>"</a>. <i>google.github.io</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191114031835/https://google.github.io/tacotron/publications/speaker_adaptation/">Archived</a> from the original on 14 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">8 February</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=google.github.io&amp;rft.atitle=Audio+samples+from+%22Transfer+Learning+from+Speaker+Verification+to+Multispeaker+Text-To-Speech+Synthesis%22&amp;rft_id=https%3A%2F%2Fgoogle.github.io%2Ftacotron%2Fpublications%2Fspeaker_adaptation%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-52"><span class="mw-cite-backlink"><b><a href="#cite_ref-52">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFJiaZhangWeissWang2019" class="citation arxiv cs1">Jia, Ye; Zhang, Yu; Weiss, Ron J.; Wang, Quan; Shen, Jonathan; Ren, Fei; Chen, Zhifeng; Nguyen, Patrick; Pang, Ruoming; Moreno, Ignacio Lopez; Wu, Yonghui (2 January 2019). "Transfer Learning from Speaker Verification to Multispeaker Text-To-Speech Synthesis". <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/1806.04558">1806.04558</a></span> [<a rel="nofollow" class="external text" href="https://arxiv.org/archive/cs.CL">cs.CL</a>].</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=preprint&amp;rft.jtitle=arXiv&amp;rft.atitle=Transfer+Learning+from+Speaker+Verification+to+Multispeaker+Text-To-Speech+Synthesis&amp;rft.date=2019-01-02&amp;rft_id=info%3Aarxiv%2F1806.04558&amp;rft.aulast=Jia&amp;rft.aufirst=Ye&amp;rft.au=Zhang%2C+Yu&amp;rft.au=Weiss%2C+Ron+J.&amp;rft.au=Wang%2C+Quan&amp;rft.au=Shen%2C+Jonathan&amp;rft.au=Ren%2C+Fei&amp;rft.au=Chen%2C+Zhifeng&amp;rft.au=Nguyen%2C+Patrick&amp;rft.au=Pang%2C+Ruoming&amp;rft.au=Moreno%2C+Ignacio+Lopez&amp;rft.au=Wu%2C+Yonghui&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-53"><span class="mw-cite-backlink"><b><a href="#cite_ref-53">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="http://www.niessnerlab.org/projects/roessler2019faceforensicspp.html">"TUM Visual Computing: Prof. Matthias Nießner"</a>. <i>www.niessnerlab.org</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200221164942/http://niessnerlab.org/projects/roessler2019faceforensicspp.html">Archived</a> from the original on 21 February 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">8 February</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=www.niessnerlab.org&amp;rft.atitle=TUM+Visual+Computing%3A+Prof.+Matthias+Nie%C3%9Fner&amp;rft_id=http%3A%2F%2Fwww.niessnerlab.org%2Fprojects%2Froessler2019faceforensicspp.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-54"><span class="mw-cite-backlink"><b><a href="#cite_ref-54">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://spectrum.ieee.org/facebook-ai-launches-its-deepfake-detection-challenge">"Full Page Reload"</a>. <i>IEEE Spectrum: Technology, Engineering, and Science News</i>. 11 December 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200626124300/https://spectrum.ieee.org/tech-talk/artificial-intelligence/machine-learning/facebook-ai-launches-its-deepfake-detection-challenge">Archived</a> from the original on 26 June 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">8 February</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=IEEE+Spectrum%3A+Technology%2C+Engineering%2C+and+Science+News&amp;rft.atitle=Full+Page+Reload&amp;rft.date=2019-12-11&amp;rft_id=https%3A%2F%2Fspectrum.ieee.org%2Ffacebook-ai-launches-its-deepfake-detection-challenge&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-55"><span class="mw-cite-backlink"><b><a href="#cite_ref-55">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="http://ai.googleblog.com/2019/09/contributing-data-to-deepfake-detection.html">"Contributing Data to Deepfake Detection Research"</a>. 24 September 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200205104836/https://ai.googleblog.com/2019/09/contributing-data-to-deepfake-detection.html">Archived</a> from the original on 5 February 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">8 February</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Contributing+Data+to+Deepfake+Detection+Research&amp;rft.date=2019-09-24&amp;rft_id=http%3A%2F%2Fai.googleblog.com%2F2019%2F09%2Fcontributing-data-to-deepfake-detection.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-56"><span class="mw-cite-backlink"><b><a href="#cite_ref-56">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFThalen" class="citation web cs1">Thalen, Mikael. <a rel="nofollow" class="external text" href="https://www.dailydot.com/debug/impressions-deepfake-app///">"You can now deepfake yourself into a celebrity with just a few clicks"</a>. <i>daily dot</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200406221457/https://www.dailydot.com/debug/impressions-deepfake-app/">Archived</a> from the original on 6 April 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">3 April</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=daily+dot&amp;rft.atitle=You+can+now+deepfake+yourself+into+a+celebrity+with+just+a+few+clicks&amp;rft.aulast=Thalen&amp;rft.aufirst=Mikael&amp;rft_id=https%3A%2F%2Fwww.dailydot.com%2Fdebug%2Fimpressions-deepfake-app%2F%2F%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-57"><span class="mw-cite-backlink"><b><a href="#cite_ref-57">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMatthews2020" class="citation web cs1">Matthews, Zane (6 March 2020). <a rel="nofollow" class="external text" href="https://kool1079.com/fun-or-fear-deepfake-app-puts-celebrity-faces-in-your-selfies//">"Fun or Fear: Deepfake App Puts Celebrity Faces In Your Selfies"</a>. <i>Kool1079</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200324141612/https://kool1079.com/fun-or-fear-deepfake-app-puts-celebrity-faces-in-your-selfies/">Archived</a> from the original on 24 March 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">6 March</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Kool1079&amp;rft.atitle=Fun+or+Fear%3A+Deepfake+App+Puts+Celebrity+Faces+In+Your+Selfies&amp;rft.date=2020-03-06&amp;rft.aulast=Matthews&amp;rft.aufirst=Zane&amp;rft_id=https%3A%2F%2Fkool1079.com%2Ffun-or-fear-deepfake-app-puts-celebrity-faces-in-your-selfies%2F%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-58"><span class="mw-cite-backlink"><b><a href="#cite_ref-58">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://www.bbc.com/news/entertainment-arts-54753214">"Kanye West, Kim Kardashian and her dad: Should we make holograms of the dead?"</a>. <i>BBC News</i>. 31 October 2020. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201115150249/https://www.bbc.com/news/entertainment-arts-54753214">Archived</a> from the original on 15 November 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">11 November</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=BBC+News&amp;rft.atitle=Kanye+West%2C+Kim+Kardashian+and+her+dad%3A+Should+we+make+holograms+of+the+dead%3F&amp;rft.date=2020-10-31&amp;rft_id=https%3A%2F%2Fwww.bbc.com%2Fnews%2Fentertainment-arts-54753214&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-59"><span class="mw-cite-backlink"><b><a href="#cite_ref-59">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.themodems.com/post/kanye-west-gave-kim-kardashian-a-hologram-of-her-father-for-her-birthday">"Kanye West Gave Kim Kardashian a Hologram of Her Father for Her Birthday"</a>. <i>themodems</i>. 30 October 2020. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201111121855/https://www.themodems.com/post/kanye-west-gave-kim-kardashian-a-hologram-of-her-father-for-her-birthday">Archived</a> from the original on 11 November 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">11 November</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=themodems&amp;rft.atitle=Kanye+West+Gave+Kim+Kardashian+a+Hologram+of+Her+Father+for+Her+Birthday&amp;rft.date=2020-10-30&amp;rft_id=https%3A%2F%2Fwww.themodems.com%2Fpost%2Fkanye-west-gave-kim-kardashian-a-hologram-of-her-father-for-her-birthday&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-60"><span class="mw-cite-backlink"><b><a href="#cite_ref-60">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://adage.com/article/advertising/parkland-victim-joaquin-oliver-comes-back-life-heartbreaking-plea-voters/2285166">"Parkland victim Joaquin Oliver comes back to life in heartbreaking plea to voters"</a>. <i>adage.com</i>. 2 October 2020. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201111061000/https://adage.com/article/advertising/parkland-victim-joaquin-oliver-comes-back-life-heartbreaking-plea-voters/2285166">Archived</a> from the original on 11 November 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">11 November</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=adage.com&amp;rft.atitle=Parkland+victim+Joaquin+Oliver+comes+back+to+life+in+heartbreaking+plea+to+voters&amp;rft.date=2020-10-02&amp;rft_id=https%3A%2F%2Fadage.com%2Farticle%2Fadvertising%2Fparkland-victim-joaquin-oliver-comes-back-life-heartbreaking-plea-voters%2F2285166&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-61"><span class="mw-cite-backlink"><b><a href="#cite_ref-61">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBowenbank2022" class="citation magazine cs1">Bowenbank, Starr (14 September 2022). <a rel="nofollow" class="external text" href="https://www.billboard.com/culture/tv-film/simon-cowell-duet-elvis-deepfake-agt-performance-1235138799/">"Simon Cowell Duets With Elvis in Metaphysic's Latest Deepfake 'AGT' Performance: Watch"</a>. <i>Billboard</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220915081446/https://www.billboard.com/culture/tv-film/simon-cowell-duet-elvis-deepfake-agt-performance-1235138799/">Archived</a> from the original on 15 September 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Billboard&amp;rft.atitle=Simon+Cowell+Duets+With+Elvis+in+Metaphysic%27s+Latest+Deepfake+%27AGT%27+Performance%3A+Watch&amp;rft.date=2022-09-14&amp;rft.aulast=Bowenbank&amp;rft.aufirst=Starr&amp;rft_id=https%3A%2F%2Fwww.billboard.com%2Fculture%2Ftv-film%2Fsimon-cowell-duet-elvis-deepfake-agt-performance-1235138799%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-62"><span class="mw-cite-backlink"><b><a href="#cite_ref-62">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.youtube.com/watch?v=Oz9R82vWw08">"John Lennon 'One Laptop per Child' Commecial"</a>. <i><a href="/wiki/YouTube" title="YouTube">YouTube</a></i>. 26 December 2008. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20230309213906/https://www.youtube.com/watch?v=Oz9R82vWw08">Archived</a> from the original on 9 March 2023<span class="reference-accessdate">. Retrieved <span class="nowrap">9 March</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=YouTube&amp;rft.atitle=John+Lennon+%27One+Laptop+per+Child%27+Commecial&amp;rft.date=2008-12-26&amp;rft_id=https%3A%2F%2Fwww.youtube.com%2Fwatch%3Fv%3DOz9R82vWw08&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-63"><span class="mw-cite-backlink"><b><a href="#cite_ref-63">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFZucconi2018" class="citation web cs1">Zucconi, Alan (14 March 2018). <a rel="nofollow" class="external text" href="https://www.alanzucconi.com/2018/03/14/understanding-the-technology-behind-deepfakes/">"Understanding the Technology Behind DeepFakes"</a>. <i>Alan Zucconi</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191101164537/https://www.alanzucconi.com/2018/03/14/understanding-the-technology-behind-deepfakes/">Archived</a> from the original on 1 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Alan+Zucconi&amp;rft.atitle=Understanding+the+Technology+Behind+DeepFakes&amp;rft.date=2018-03-14&amp;rft.aulast=Zucconi&amp;rft.aufirst=Alan&amp;rft_id=https%3A%2F%2Fwww.alanzucconi.com%2F2018%2F03%2F14%2Funderstanding-the-technology-behind-deepfakes%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-64"><span class="mw-cite-backlink"><b><a href="#cite_ref-64">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://web.archive.org/web/20220626181456/https://blog.synthesys.io/what-is-deepfake/">"What is a Deepfake?"</a>. <i>Blog - Synthesys</i>. 3 May 2022. Archived from <a rel="nofollow" class="external text" href="https://blog.synthesys.io/what-is-deepfake/">the original</a> on 26 June 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">17 May</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Blog+-+Synthesys&amp;rft.atitle=What+is+a+Deepfake%3F&amp;rft.date=2022-05-03&amp;rft_id=https%3A%2F%2Fblog.synthesys.io%2Fwhat-is-deepfake%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Wired-1059-65"><span class="mw-cite-backlink">^ <a href="#cite_ref-Wired-1059_65-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Wired-1059_65-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation magazine cs1"><a rel="nofollow" class="external text" href="https://www.wired.com/story/these-new-tricks-can-outsmart-deepfake-videosfor-now/">"These New Tricks Can Outsmart Deepfake Videos—for Now"</a>. <i>Wired</i>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1059-1028">1059-1028</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191003055035/https://www.wired.com/story/these-new-tricks-can-outsmart-deepfake-videosfor-now/">Archived</a> from the original on 3 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Wired&amp;rft.atitle=These+New+Tricks+Can+Outsmart+Deepfake+Videos%E2%80%94for+Now&amp;rft.issn=1059-1028&amp;rft_id=https%3A%2F%2Fwww.wired.com%2Fstory%2Fthese-new-tricks-can-outsmart-deepfake-videosfor-now%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-66"><span class="mw-cite-backlink"><b><a href="#cite_ref-66">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKemp2019" class="citation news cs1">Kemp, Luke (8 July 2019). <a rel="nofollow" class="external text" href="https://www.theguardian.com/film/2019/jul/03/in-the-age-of-deepfakes-could-virtual-actors-put-humans-out-of-business">"In the age of deepfakes, could virtual actors put humans out of business?"</a>. <i>The Guardian</i>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0261-3077">0261-3077</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191020223601/https://www.theguardian.com/film/2019/jul/03/in-the-age-of-deepfakes-could-virtual-actors-put-humans-out-of-business">Archived</a> from the original on 20 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">20 October</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Guardian&amp;rft.atitle=In+the+age+of+deepfakes%2C+could+virtual+actors+put+humans+out+of+business%3F&amp;rft.date=2019-07-08&amp;rft.issn=0261-3077&amp;rft.aulast=Kemp&amp;rft.aufirst=Luke&amp;rft_id=https%3A%2F%2Fwww.theguardian.com%2Ffilm%2F2019%2Fjul%2F03%2Fin-the-age-of-deepfakes-could-virtual-actors-put-humans-out-of-business&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-67"><span class="mw-cite-backlink"><b><a href="#cite_ref-67">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFVerma2023" class="citation news cs1">Verma, Pranshu (21 July 2023). <a rel="nofollow" class="external text" href="https://www.washingtonpost.com/technology/2023/07/19/ai-actors-fear-sag-strike-hollywood/">"Digital clones made by AI tech could make Hollywood extras obsolete"</a>. <i>Washington Post</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20230720224000/https://www.washingtonpost.com/technology/2023/07/19/ai-actors-fear-sag-strike-hollywood/">Archived</a> from the original on 20 July 2023<span class="reference-accessdate">. Retrieved <span class="nowrap">4 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Washington+Post&amp;rft.atitle=Digital+clones+made+by+AI+tech+could+make+Hollywood+extras+obsolete&amp;rft.date=2023-07-21&amp;rft.aulast=Verma&amp;rft.aufirst=Pranshu&amp;rft_id=https%3A%2F%2Fwww.washingtonpost.com%2Ftechnology%2F2023%2F07%2F19%2Fai-actors-fear-sag-strike-hollywood%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-High-2020-68"><span class="mw-cite-backlink">^ <a href="#cite_ref-High-2020_68-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-High-2020_68-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://studios.disneyresearch.com/2020/06/29/high-resolution-neural-face-swapping-for-visual-effects/">"High-Resolution Neural Face Swapping for Visual Effects | Disney Research Studios"</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201127101746/https://studios.disneyresearch.com/2020/06/29/high-resolution-neural-face-swapping-for-visual-effects/">Archived</a> from the original on 27 November 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=High-Resolution+Neural+Face+Swapping+for+Visual+Effects+%7C+Disney+Research+Studios&amp;rft_id=https%3A%2F%2Fstudios.disneyresearch.com%2F2020%2F06%2F29%2Fhigh-resolution-neural-face-swapping-for-visual-effects%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Naruniec-2020-69"><span class="mw-cite-backlink">^ <a href="#cite_ref-Naruniec-2020_69-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Naruniec-2020_69-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://blooloop.com/news/disney-deepfake-face-swap-technology/">"Disney's deepfake technology could be used in film and TV"</a>. <i>Blooloop</i>. 21 July 2020. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201112044743/https://blooloop.com/news/disney-deepfake-face-swap-technology/">Archived</a> from the original on 12 November 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Blooloop&amp;rft.atitle=Disney%27s+deepfake+technology+could+be+used+in+film+and+TV&amp;rft.date=2020-07-21&amp;rft_id=https%3A%2F%2Fblooloop.com%2Fnews%2Fdisney-deepfake-face-swap-technology%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-70"><span class="mw-cite-backlink"><b><a href="#cite_ref-70">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLindley2020" class="citation web cs1">Lindley, Jon A. (2 July 2020). <a rel="nofollow" class="external text" href="https://www.techtimes.com/articles/250776/20200702/disney-is-using-deepfakes-and-facial-recognition-to-bring-back-dead-actors.htm">"Disney Ventures Into Bringing Back 'Dead Actors' Through Facial Recognition"</a>. <i>Tech Times</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210714032907/https://www.techtimes.com/articles/250776/20200702/disney-is-using-deepfakes-and-facial-recognition-to-bring-back-dead-actors.htm">Archived</a> from the original on 14 July 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Tech+Times&amp;rft.atitle=Disney+Ventures+Into+Bringing+Back+%27Dead+Actors%27+Through+Facial+Recognition&amp;rft.date=2020-07-02&amp;rft.aulast=Lindley&amp;rft.aufirst=Jon+A.&amp;rft_id=https%3A%2F%2Fwww.techtimes.com%2Farticles%2F250776%2F20200702%2Fdisney-is-using-deepfakes-and-facial-recognition-to-bring-back-dead-actors.htm&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-71"><span class="mw-cite-backlink"><b><a href="#cite_ref-71">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRadulovic2018" class="citation web cs1">Radulovic, Petrana (17 October 2018). <a rel="nofollow" class="external text" href="https://www.polygon.com/2018/10/17/17989214/harrison-ford-solo-movie-deepfake-technology">"Harrison Ford is the star of Solo: A Star Wars Story thanks to deepfake technology"</a>. <i>Polygon</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191020223601/https://www.polygon.com/2018/10/17/17989214/harrison-ford-solo-movie-deepfake-technology">Archived</a> from the original on 20 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">20 October</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Polygon&amp;rft.atitle=Harrison+Ford+is+the+star+of+Solo%3A+A+Star+Wars+Story+thanks+to+deepfake+technology&amp;rft.date=2018-10-17&amp;rft.aulast=Radulovic&amp;rft.aufirst=Petrana&amp;rft_id=https%3A%2F%2Fwww.polygon.com%2F2018%2F10%2F17%2F17989214%2Fharrison-ford-solo-movie-deepfake-technology&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-72"><span class="mw-cite-backlink"><b><a href="#cite_ref-72">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWinick" class="citation web cs1">Winick, Erin. <a rel="nofollow" class="external text" href="https://www.technologyreview.com/s/612241/how-acting-as-carrie-fishers-puppet-made-a-career-for-rogue-ones-princess-leia/">"How acting as Carrie Fisher's puppet made a career for Rogue One's Princess Leia"</a>. <i>MIT Technology Review</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191023063609/https://www.technologyreview.com/s/612241/how-acting-as-carrie-fishers-puppet-made-a-career-for-rogue-ones-princess-leia/">Archived</a> from the original on 23 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">20 October</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=MIT+Technology+Review&amp;rft.atitle=How+acting+as+Carrie+Fisher%27s+puppet+made+a+career+for+Rogue+One%27s+Princess+Leia&amp;rft.aulast=Winick&amp;rft.aufirst=Erin&amp;rft_id=https%3A%2F%2Fwww.technologyreview.com%2Fs%2F612241%2Fhow-acting-as-carrie-fishers-puppet-made-a-career-for-rogue-ones-princess-leia%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-73"><span class="mw-cite-backlink"><b><a href="#cite_ref-73">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.gq-magazine.co.uk/culture/article/boba-fett-luke-skywalker">"Deepfake Luke Skywalker is another step down a ghoulish CGI path"</a>. <i>British GQ</i>. 10 February 2022. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220522214428/https://www.gq-magazine.co.uk/culture/article/boba-fett-luke-skywalker">Archived</a> from the original on 22 May 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">3 June</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=British+GQ&amp;rft.atitle=Deepfake+Luke+Skywalker+is+another+step+down+a+ghoulish+CGI+path&amp;rft.date=2022-02-10&amp;rft_id=https%3A%2F%2Fwww.gq-magazine.co.uk%2Fculture%2Farticle%2Fboba-fett-luke-skywalker&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-74"><span class="mw-cite-backlink"><b><a href="#cite_ref-74">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFDazed2022" class="citation web cs1">Dazed (10 February 2022). <a rel="nofollow" class="external text" href="https://www.dazeddigital.com/science-tech/article/55429/1/deepfake-museum-of-moving-image-media-unstable-evidence-on-screen">"Will deepfakes rewrite history as we know it?"</a>. <i>Dazed</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220608224035/https://www.dazeddigital.com/science-tech/article/55429/1/deepfake-museum-of-moving-image-media-unstable-evidence-on-screen">Archived</a> from the original on 8 June 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">3 June</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Dazed&amp;rft.atitle=Will+deepfakes+rewrite+history+as+we+know+it%3F&amp;rft.date=2022-02-10&amp;rft.au=Dazed&amp;rft_id=https%3A%2F%2Fwww.dazeddigital.com%2Fscience-tech%2Farticle%2F55429%2F1%2Fdeepfake-museum-of-moving-image-media-unstable-evidence-on-screen&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-75"><span class="mw-cite-backlink"><b><a href="#cite_ref-75">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSchwartzel2023" class="citation web cs1">Schwartzel, Erich (21 December 2023). <a rel="nofollow" class="external text" href="https://www.wsj.com/tech/ai/behind-the-making-of-my-ai-digital-double-0ff22ac8">"Behind the Making of My AI Digital Double"</a>. <i>Wall Street Journal</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240106092450/https://www.wsj.com/tech/ai/behind-the-making-of-my-ai-digital-double-0ff22ac8">Archived</a> from the original on 6 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">4 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Wall+Street+Journal&amp;rft.atitle=Behind+the+Making+of+My+AI+Digital+Double&amp;rft.date=2023-12-21&amp;rft.aulast=Schwartzel&amp;rft.aufirst=Erich&amp;rft_id=https%3A%2F%2Fwww.wsj.com%2Ftech%2Fai%2Fbehind-the-making-of-my-ai-digital-double-0ff22ac8&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-76"><span class="mw-cite-backlink"><b><a href="#cite_ref-76">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCoffee2023" class="citation web cs1">Coffee, Patrick (18 June 2023). <a rel="nofollow" class="external text" href="https://www.wsj.com/amp/articles/ai-deepfakes-celebrity-marketing-brands-81381aa6">"Celebrities Use AI to Take Control of Their Own Images"</a>. <i>Wall Street Journal</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610171921/https://www.wsj.com/articles/ai-deepfakes-celebrity-marketing-brands-81381aa6">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">4 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Wall+Street+Journal&amp;rft.atitle=Celebrities+Use+AI+to+Take+Control+of+Their+Own+Images&amp;rft.date=2023-06-18&amp;rft.aulast=Coffee&amp;rft.aufirst=Patrick&amp;rft_id=https%3A%2F%2Fwww.wsj.com%2Famp%2Farticles%2Fai-deepfakes-celebrity-marketing-brands-81381aa6&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-77"><span class="mw-cite-backlink"><b><a href="#cite_ref-77">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPrescott2024" class="citation news cs1">Prescott, Katie Prescott (23 August 2024). <a rel="nofollow" class="external text" href="https://www.thetimes.com/business-money/companies/article/the-man-who-creates-fake-people-like-david-beckham-speaking-nine-languages-flvbxmpw3">"The man who creates fake people – like David Beckham speaking nine languages"</a>. <i><a href="/wiki/The_Times" title="The Times">The Times</a></i><span class="reference-accessdate">. Retrieved <span class="nowrap">22 October</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Times&amp;rft.atitle=The+man+who+creates+fake+people+%E2%80%93+like+David+Beckham+speaking+nine+languages&amp;rft.date=2024-08-23&amp;rft.aulast=Prescott&amp;rft.aufirst=Katie+Prescott&amp;rft_id=https%3A%2F%2Fwww.thetimes.com%2Fbusiness-money%2Fcompanies%2Farticle%2Fthe-man-who-creates-fake-people-like-david-beckham-speaking-nine-languages-flvbxmpw3&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-78"><span class="mw-cite-backlink"><b><a href="#cite_ref-78">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://english.tupaki.com/amp/entertainment/vijay-younger-version-in-goat-1383681">"Not Vijay, Here's Who Played the Younger Version of Him in The GOAT"</a>. <i>english.tupaki.com/</i>. 8 September 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=english.tupaki.com%2F&amp;rft.atitle=Not+Vijay%2C+Here%27s+Who+Played+the+Younger+Version+of+Him+in+The+GOAT&amp;rft.date=2024-09-08&amp;rft_id=https%3A%2F%2Fenglish.tupaki.com%2Famp%2Fentertainment%2Fvijay-younger-version-in-goat-1383681&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-79"><span class="mw-cite-backlink"><b><a href="#cite_ref-79">^</a></b></span> <span class="reference-text">Katerina Cizek, William Uricchio, and Sarah Wolozin: Collective Wisdom | Massachusetts Institute of Technology <a rel="nofollow" class="external autonumber" href="https://wip.mitpress.mit.edu/pub/collective-wisdom-part-6">[1]</a> <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200304015015/https://wip.mitpress.mit.edu/pub/collective-wisdom-part-6">Archived</a> 4 March 2020 at the <a href="/wiki/Wayback_Machine" title="Wayback Machine">Wayback Machine</a></span> </li> <li id="cite_note-80"><span class="mw-cite-backlink"><b><a href="#cite_ref-80">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="http://www.ansa.it/toscana/notizie/2017/11/03/ornella-muti-in-cortometraggio-a-firenze_36349008-ce7b-4c7e-8742-43e28f7225f4.html">"ANSA | Ornella Muti in cortometraggio a Firenze"</a>. 3 November 2017. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200227220711/http://www.ansa.it/toscana/notizie/2017/11/03/ornella-muti-in-cortometraggio-a-firenze_36349008-ce7b-4c7e-8742-43e28f7225f4.html">Archived</a> from the original on 27 February 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">27 February</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=ANSA+%7C+Ornella+Muti+in+cortometraggio+a+Firenze&amp;rft.date=2017-11-03&amp;rft_id=http%3A%2F%2Fwww.ansa.it%2Ftoscana%2Fnotizie%2F2017%2F11%2F03%2Fornella-muti-in-cortometraggio-a-firenze_36349008-ce7b-4c7e-8742-43e28f7225f4.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-81"><span class="mw-cite-backlink"><b><a href="#cite_ref-81">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.nme.com/news/tv/south-park-creators-launch-new-deepfake-satire-series-sassy-justice-2800657">"<span class="cs1-kern-left"></span>'South Park' creators launch new deepfake satire series 'Sassy Justice'<span class="cs1-kern-right"></span>"</a>. <i>NME</i>. 27 October 2020. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172120/https://www.nme.com/news/tv/south-park-creators-launch-new-deepfake-satire-series-sassy-justice-2800657">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">7 June</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=NME&amp;rft.atitle=%27South+Park%27+creators+launch+new+deepfake+satire+series+%27Sassy+Justice%27&amp;rft.date=2020-10-27&amp;rft_id=https%3A%2F%2Fwww.nme.com%2Fnews%2Ftv%2Fsouth-park-creators-launch-new-deepfake-satire-series-sassy-justice-2800657&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-CRS1-82"><span class="mw-cite-backlink">^ <a href="#cite_ref-CRS1_82-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-CRS1_82-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFTaylerHarris2021" class="citation report cs1">Tayler, Kelley M.; Harris, Laurie A. (8 June 2021). <a rel="nofollow" class="external text" href="https://crsreports.congress.gov/product/pdf/IF/IF11333">Deep Fakes and National Security</a> (Report). <a href="/wiki/Congressional_Research_Service" title="Congressional Research Service">Congressional Research Service</a>. p.&#160;1. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220614195611/https://crsreports.congress.gov/product/pdf/IF/IF11333">Archived</a> from the original on 14 June 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">19 July</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=report&amp;rft.btitle=Deep+Fakes+and+National+Security&amp;rft.pages=1&amp;rft.pub=Congressional+Research+Service&amp;rft.date=2021-06-08&amp;rft.aulast=Tayler&amp;rft.aufirst=Kelley+M.&amp;rft.au=Harris%2C+Laurie+A.&amp;rft_id=https%3A%2F%2Fcrsreports.congress.gov%2Fproduct%2Fpdf%2FIF%2FIF11333&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-83"><span class="mw-cite-backlink"><b><a href="#cite_ref-83">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLimberg2020" class="citation web cs1">Limberg, Peter (24 May 2020). <a rel="nofollow" class="external text" href="https://cultstate.com/2020/05/24/Podcast-18--Blackmail-Inflation/">"Blackmail Inflation"</a>. <i>CultState</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210124155722/https://cultstate.com/2020/05/24/Podcast-18--Blackmail-Inflation/">Archived</a> from the original on 24 January 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">18 January</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=CultState&amp;rft.atitle=Blackmail+Inflation&amp;rft.date=2020-05-24&amp;rft.aulast=Limberg&amp;rft.aufirst=Peter&amp;rft_id=https%3A%2F%2Fcultstate.com%2F2020%2F05%2F24%2FPodcast-18--Blackmail-Inflation%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-84"><span class="mw-cite-backlink"><b><a href="#cite_ref-84">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://t.me/forKappy">"For Kappy"</a>. <i>Telegraph</i>. 24 May 2020. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210124155641/https://t.me/forKappy">Archived</a> from the original on 24 January 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">18 January</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Telegraph&amp;rft.atitle=For+Kappy&amp;rft.date=2020-05-24&amp;rft_id=https%3A%2F%2Ft.me%2FforKappy&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-85"><span class="mw-cite-backlink"><b><a href="#cite_ref-85">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.nbc.com/nbc-insider/agt-2022-see-the-judges-reactions-to-simon-cowell-singing">"The AGT Judges Had Priceless Reactions to That Simon Cowell Singing Audition"</a>. <i>NBC Insider Official Site</i>. 8 June 2022. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220829084747/https://www.nbc.com/nbc-insider/agt-2022-see-the-judges-reactions-to-simon-cowell-singing">Archived</a> from the original on 29 August 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">29 August</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=NBC+Insider+Official+Site&amp;rft.atitle=The+AGT+Judges+Had+Priceless+Reactions+to+That+Simon+Cowell+Singing+Audition&amp;rft.date=2022-06-08&amp;rft_id=https%3A%2F%2Fwww.nbc.com%2Fnbc-insider%2Fagt-2022-see-the-judges-reactions-to-simon-cowell-singing&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-86"><span class="mw-cite-backlink"><b><a href="#cite_ref-86">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMarr" class="citation web cs1">Marr, Bernard. <a rel="nofollow" class="external text" href="https://www.forbes.com/sites/bernardmarr/2022/08/30/can-a-metaverse-ai-win-americas-got-talent-and-what-that-means-for-the-industry/">"Can A Metaverse AI Win America's Got Talent? (And What That Means For The Industry)"</a>. <i>Forbes</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220830073329/https://www.forbes.com/sites/bernardmarr/2022/08/30/can-a-metaverse-ai-win-americas-got-talent-and-what-that-means-for-the-industry/">Archived</a> from the original on 30 August 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">30 August</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Forbes&amp;rft.atitle=Can+A+Metaverse+AI+Win+America%27s+Got+Talent%3F+%28And+What+That+Means+For+The+Industry%29&amp;rft.aulast=Marr&amp;rft.aufirst=Bernard&amp;rft_id=https%3A%2F%2Fwww.forbes.com%2Fsites%2Fbernardmarr%2F2022%2F08%2F30%2Fcan-a-metaverse-ai-win-americas-got-talent-and-what-that-means-for-the-industry%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-87"><span class="mw-cite-backlink"><b><a href="#cite_ref-87">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMorales2022" class="citation web cs1">Morales, Jowi (10 June 2022). <a rel="nofollow" class="external text" href="https://www.makeuseof.com/deepfakes-mainstream-agt-entry/">"Deepfakes Go Mainstream: How Metaphysic's AGT Entry Will Impact Entertainment"</a>. <i>MUO</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172124/https://www.makeuseof.com/deepfakes-mainstream-agt-entry/">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">29 August</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=MUO&amp;rft.atitle=Deepfakes+Go+Mainstream%3A+How+Metaphysic%27s+AGT+Entry+Will+Impact+Entertainment&amp;rft.date=2022-06-10&amp;rft.aulast=Morales&amp;rft.aufirst=Jowi&amp;rft_id=https%3A%2F%2Fwww.makeuseof.com%2Fdeepfakes-mainstream-agt-entry%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-88"><span class="mw-cite-backlink"><b><a href="#cite_ref-88">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCarter2019" class="citation web cs1">Carter, Rebecca (1 June 2019). <a rel="nofollow" class="external text" href="https://www.entertainmentdaily.co.uk/tv/bgt-viewers-slam-simon-cowell-for-rude-and-nasty-remark-to-contestant/">"BGT viewers slam Simon Cowell for 'rude' and 'nasty' remark to contestant"</a>. <i>Entertainment Daily</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220831125322/https://www.entertainmentdaily.co.uk/tv/bgt-viewers-slam-simon-cowell-for-rude-and-nasty-remark-to-contestant/">Archived</a> from the original on 31 August 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">31 August</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Entertainment+Daily&amp;rft.atitle=BGT+viewers+slam+Simon+Cowell+for+%27rude%27+and+%27nasty%27+remark+to+contestant&amp;rft.date=2019-06-01&amp;rft.aulast=Carter&amp;rft.aufirst=Rebecca&amp;rft_id=https%3A%2F%2Fwww.entertainmentdaily.co.uk%2Ftv%2Fbgt-viewers-slam-simon-cowell-for-rude-and-nasty-remark-to-contestant%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-89"><span class="mw-cite-backlink"><b><a href="#cite_ref-89">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation cs2"><a rel="nofollow" class="external text" href="https://www.youtube.com/watch?v=mPU0WNUzsBo&amp;ab_channel=America%27sGotTalent"><i>Simon Cowell Sings on Stage?! Metaphysic Will Leave You Speechless | AGT 2022</i></a>, <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220829084747/https://www.youtube.com/watch?v=mPU0WNUzsBo&amp;ab_channel=America%27sGotTalent">archived</a> from the original on 29 August 2022<span class="reference-accessdate">, retrieved <span class="nowrap">29 August</span> 2022</span></cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Simon+Cowell+Sings+on+Stage%3F%21+Metaphysic+Will+Leave+You+Speechless+%7C+AGT+2022&amp;rft_id=https%3A%2F%2Fwww.youtube.com%2Fwatch%3Fv%3DmPU0WNUzsBo%26ab_channel%3DAmerica%2527sGotTalent&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-90"><span class="mw-cite-backlink"><b><a href="#cite_ref-90">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSegarra" class="citation web cs1">Segarra, Edward. <a rel="nofollow" class="external text" href="https://www.usatoday.com/story/entertainment/tv/2022/08/30/agt-simon-cowell-calls-ai-opera-best-act-metaphysic/7947094001/">"<span class="cs1-kern-left"></span>'AGT' judges Simon Cowell, Howie Mandel get 'deepfake' treatment by AI act Metaphysic: Watch here"</a>. <i>USA TODAY</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220831053351/https://www.usatoday.com/story/entertainment/tv/2022/08/30/agt-simon-cowell-calls-ai-opera-best-act-metaphysic/7947094001/">Archived</a> from the original on 31 August 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">31 August</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=USA+TODAY&amp;rft.atitle=%27AGT%27+judges+Simon+Cowell%2C+Howie+Mandel+get+%27deepfake%27+treatment+by+AI+act+Metaphysic%3A+Watch+here&amp;rft.aulast=Segarra&amp;rft.aufirst=Edward&amp;rft_id=https%3A%2F%2Fwww.usatoday.com%2Fstory%2Fentertainment%2Ftv%2F2022%2F08%2F30%2Fagt-simon-cowell-calls-ai-opera-best-act-metaphysic%2F7947094001%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-91"><span class="mw-cite-backlink"><b><a href="#cite_ref-91">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBowenbank2022" class="citation magazine cs1">Bowenbank, Starr (14 September 2022). <a rel="nofollow" class="external text" href="https://www.billboard.com/culture/tv-film/simon-cowell-duet-elvis-deepfake-agt-performance-1235138799/">"Simon Cowell Duets With Elvis in Metaphysic's Latest Deepfake 'AGT' Performance: Watch"</a>. <i>Billboard</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610170142/https://www.billboard.com/culture/tv-film/simon-cowell-duet-elvis-deepfake-agt-performance-1235138799/">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">15 September</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Billboard&amp;rft.atitle=Simon+Cowell+Duets+With+Elvis+in+Metaphysic%27s+Latest+Deepfake+%27AGT%27+Performance%3A+Watch&amp;rft.date=2022-09-14&amp;rft.aulast=Bowenbank&amp;rft.aufirst=Starr&amp;rft_id=https%3A%2F%2Fwww.billboard.com%2Fculture%2Ftv-film%2Fsimon-cowell-duet-elvis-deepfake-agt-performance-1235138799%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-kotaku-92"><span class="mw-cite-backlink"><b><a href="#cite_ref-kotaku_92-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFZwiezen2021" class="citation web cs1">Zwiezen, Zack (18 January 2021). <a rel="nofollow" class="external text" href="https://kotaku.com/this-website-lets-you-make-glados-say-whatever-you-want-1846062835">"Website Lets You Make GLaDOS Say Whatever You Want"</a>. <i><a href="/wiki/Kotaku" title="Kotaku">Kotaku</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210117164748/https://kotaku.com/this-website-lets-you-make-glados-say-whatever-you-want-1846062835">Archived</a> from the original on 17 January 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">18 January</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Kotaku&amp;rft.atitle=Website+Lets+You+Make+GLaDOS+Say+Whatever+You+Want&amp;rft.date=2021-01-18&amp;rft.aulast=Zwiezen&amp;rft.aufirst=Zack&amp;rft_id=https%3A%2F%2Fkotaku.com%2Fthis-website-lets-you-make-glados-say-whatever-you-want-1846062835&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-gameinformer-93"><span class="mw-cite-backlink"><b><a href="#cite_ref-gameinformer_93-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRuppert2021" class="citation magazine cs1">Ruppert, Liana (18 January 2021). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210118175543/https://www.gameinformer.com/gamer-culture/2021/01/18/make-portals-glados-and-other-beloved-characters-say-the-weirdest-things">"Make Portal's GLaDOS And Other Beloved Characters Say The Weirdest Things With This App"</a>. <i><a href="/wiki/Game_Informer" title="Game Informer">Game Informer</a></i>. Archived from <a rel="nofollow" class="external text" href="https://www.gameinformer.com/gamer-culture/2021/01/18/make-portals-glados-and-other-beloved-characters-say-the-weirdest-things">the original</a> on 18 January 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">18 January</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Game+Informer&amp;rft.atitle=Make+Portal%27s+GLaDOS+And+Other+Beloved+Characters+Say+The+Weirdest+Things+With+This+App&amp;rft.date=2021-01-18&amp;rft.aulast=Ruppert&amp;rft.aufirst=Liana&amp;rft_id=https%3A%2F%2Fwww.gameinformer.com%2Fgamer-culture%2F2021%2F01%2F18%2Fmake-portals-glados-and-other-beloved-characters-say-the-weirdest-things&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-pcgamer-94"><span class="mw-cite-backlink"><b><a href="#cite_ref-pcgamer_94-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFClayton2021" class="citation web cs1">Clayton, Natalie (19 January 2021). <a rel="nofollow" class="external text" href="https://www.pcgamer.com/make-the-cast-of-tf2-recite-old-memes-with-this-ai-text-to-speech-tool">"Make the cast of TF2 recite old memes with this AI text-to-speech tool"</a>. <i><a href="/wiki/PC_Gamer" title="PC Gamer">PC Gamer</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210119133726/https://www.pcgamer.com/make-the-cast-of-tf2-recite-old-memes-with-this-ai-text-to-speech-tool/">Archived</a> from the original on 19 January 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">19 January</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=PC+Gamer&amp;rft.atitle=Make+the+cast+of+TF2+recite+old+memes+with+this+AI+text-to-speech+tool&amp;rft.date=2021-01-19&amp;rft.aulast=Clayton&amp;rft.aufirst=Natalie&amp;rft_id=https%3A%2F%2Fwww.pcgamer.com%2Fmake-the-cast-of-tf2-recite-old-memes-with-this-ai-text-to-speech-tool&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-95"><span class="mw-cite-backlink"><b><a href="#cite_ref-95">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSherman2023" class="citation web cs1">Sherman, Maria (3 December 2023). <a rel="nofollow" class="external text" href="https://apnews.com/article/kiss-digital-avatars-end-of-road-finale-37a8ae9905099343c7b41654b2344d0c">"Kiss say farewell to live touring, become first US band to go virtual and become digital avatars"</a>. <i>AP News</i>. Associated Press. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240101212029/https://apnews.com/article/kiss-digital-avatars-end-of-road-finale-37a8ae9905099343c7b41654b2344d0c">Archived</a> from the original on 1 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">4 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=AP+News&amp;rft.atitle=Kiss+say+farewell+to+live+touring%2C+become+first+US+band+to+go+virtual+and+become+digital+avatars&amp;rft.date=2023-12-03&amp;rft.aulast=Sherman&amp;rft.aufirst=Maria&amp;rft_id=https%3A%2F%2Fapnews.com%2Farticle%2Fkiss-digital-avatars-end-of-road-finale-37a8ae9905099343c7b41654b2344d0c&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-:0-96"><span class="mw-cite-backlink">^ <a href="#cite_ref-:0_96-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:0_96-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-:0_96-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCerullo2024" class="citation news cs1">Cerullo, Megan (9 January 2024). <a rel="nofollow" class="external text" href="https://www.cbsnews.com/news/taylor-swift-le-creuset-ai-generated-ads/">"AI-generated ads using Taylor Swift's likeness dupe fans with fake Le Creuset giveaway"</a>. <i><a href="/wiki/CBS_News" title="CBS News">CBS News</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240110171010/https://www.cbsnews.com/news/taylor-swift-le-creuset-ai-generated-ads/">Archived</a> from the original on 10 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=CBS+News&amp;rft.atitle=AI-generated+ads+using+Taylor+Swift%27s+likeness+dupe+fans+with+fake+Le+Creuset+giveaway&amp;rft.date=2024-01-09&amp;rft.aulast=Cerullo&amp;rft.aufirst=Megan&amp;rft_id=https%3A%2F%2Fwww.cbsnews.com%2Fnews%2Ftaylor-swift-le-creuset-ai-generated-ads%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-:1-97"><span class="mw-cite-backlink">^ <a href="#cite_ref-:1_97-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:1_97-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-:1_97-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-:1_97-3"><sup><i><b>d</b></i></sup></a> <a href="#cite_ref-:1_97-4"><sup><i><b>e</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHsuLu2024" class="citation news cs1">Hsu, Tiffany; Lu, Yiwen (9 January 2024). <a rel="nofollow" class="external text" href="https://www.nytimes.com/2024/01/09/technology/taylor-swift-le-creuset-ai-deepfake.html">"No, That's Not Taylor Swift Peddling Le Creuset Cookware"</a>. <i><a href="/wiki/The_New_York_Times" title="The New York Times">The New York Times</a></i>. p.&#160;B1<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+New+York+Times&amp;rft.atitle=No%2C+That%27s+Not+Taylor+Swift+Peddling+Le+Creuset+Cookware&amp;rft.pages=B1&amp;rft.date=2024-01-09&amp;rft.aulast=Hsu&amp;rft.aufirst=Tiffany&amp;rft.au=Lu%2C+Yiwen&amp;rft_id=https%3A%2F%2Fwww.nytimes.com%2F2024%2F01%2F09%2Ftechnology%2Ftaylor-swift-le-creuset-ai-deepfake.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-:2-98"><span class="mw-cite-backlink">^ <a href="#cite_ref-:2_98-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:2_98-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-:2_98-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFTaylor2023" class="citation news cs1">Taylor, Derrick Bryson (2 October 2023). <a rel="nofollow" class="external text" href="https://www.nytimes.com/2023/10/02/technology/tom-hanks-ai-dental-video.html">"Tom Hanks Warns of Dental Ad Using A.I. Version of Him"</a>. <i>The New York Times</i>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0362-4331">0362-4331</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610173649/https://www.nytimes.com/2023/10/02/technology/tom-hanks-ai-dental-video.html">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">12 October</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+New+York+Times&amp;rft.atitle=Tom+Hanks+Warns+of+Dental+Ad+Using+A.I.+Version+of+Him&amp;rft.date=2023-10-02&amp;rft.issn=0362-4331&amp;rft.aulast=Taylor&amp;rft.aufirst=Derrick+Bryson&amp;rft_id=https%3A%2F%2Fwww.nytimes.com%2F2023%2F10%2F02%2Ftechnology%2Ftom-hanks-ai-dental-video.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-:3-99"><span class="mw-cite-backlink">^ <a href="#cite_ref-:3_99-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:3_99-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-:3_99-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFJohnson2023" class="citation news cs1">Johnson, Kirsten (11 December 2023). <a rel="nofollow" class="external text" href="https://www.abc15.com/news/let-joe-know/arizona-woman-falls-victim-to-deep-fake-scam-using-celebrities-on-social-media">"Arizona woman falls victim to deepfake scam using celebrities on social media"</a>. <i><a href="/wiki/ABC_15_Arizona" class="mw-redirect" title="ABC 15 Arizona">ABC 15 Arizona</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240110171036/https://www.abc15.com/news/let-joe-know/arizona-woman-falls-victim-to-deep-fake-scam-using-celebrities-on-social-media">Archived</a> from the original on 10 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=ABC+15+Arizona&amp;rft.atitle=Arizona+woman+falls+victim+to+deepfake+scam+using+celebrities+on+social+media&amp;rft.date=2023-12-11&amp;rft.aulast=Johnson&amp;rft.aufirst=Kirsten&amp;rft_id=https%3A%2F%2Fwww.abc15.com%2Fnews%2Flet-joe-know%2Farizona-woman-falls-victim-to-deep-fake-scam-using-celebrities-on-social-media&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-:4-100"><span class="mw-cite-backlink">^ <a href="#cite_ref-:4_100-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:4_100-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKulundu2024" class="citation news cs1">Kulundu, Mary (4 January 2024). <a rel="nofollow" class="external text" href="https://factcheck.afp.com/doc.afp.com.349D4AG">"Deepfake videos of Elon Musk used in get-rich-quick scam"</a>. <i><a href="/wiki/Agence_France-Presse" title="Agence France-Presse">Agence France-Presse</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172125/https://factcheck.afp.com/doc.afp.com.349D4AG">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Agence+France-Presse&amp;rft.atitle=Deepfake+videos+of+Elon+Musk+used+in+get-rich-quick+scam&amp;rft.date=2024-01-04&amp;rft.aulast=Kulundu&amp;rft.aufirst=Mary&amp;rft_id=https%3A%2F%2Ffactcheck.afp.com%2Fdoc.afp.com.349D4AG&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-101"><span class="mw-cite-backlink"><b><a href="#cite_ref-101">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFEsmael2024" class="citation news cs1">Esmael, Lisbet (3 January 2024). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240110171010/https://www.cnnphilippines.com/news/2024/1/3/cybersecurity-deepfake-technology.html">"PH needs multifaceted approach vs 'deepfake' videos used to scam Pinoys"</a>. <i><a href="/wiki/CNN_Philippines" title="CNN Philippines">CNN Philippines</a></i>. Archived from <a rel="nofollow" class="external text" href="https://www.cnnphilippines.com/news/2024/1/3/cybersecurity-deepfake-technology.html">the original</a> on 10 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=CNN+Philippines&amp;rft.atitle=PH+needs+multifaceted+approach+vs+%27deepfake%27+videos+used+to+scam+Pinoys&amp;rft.date=2024-01-03&amp;rft.aulast=Esmael&amp;rft.aufirst=Lisbet&amp;rft_id=https%3A%2F%2Fwww.cnnphilippines.com%2Fnews%2F2024%2F1%2F3%2Fcybersecurity-deepfake-technology.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-102"><span class="mw-cite-backlink"><b><a href="#cite_ref-102">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGerken2023" class="citation news cs1">Gerken, Tom (4 October 2023). <a rel="nofollow" class="external text" href="https://www.bbc.com/news/technology-66993651">"MrBeast and BBC stars used in deepfake scam videos"</a>. <i><a href="/wiki/BBC_News" title="BBC News">BBC News</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172122/https://www.bbc.com/news/technology-66993651">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=BBC+News&amp;rft.atitle=MrBeast+and+BBC+stars+used+in+deepfake+scam+videos&amp;rft.date=2023-10-04&amp;rft.aulast=Gerken&amp;rft.aufirst=Tom&amp;rft_id=https%3A%2F%2Fwww.bbc.com%2Fnews%2Ftechnology-66993651&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-:5-103"><span class="mw-cite-backlink">^ <a href="#cite_ref-:5_103-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:5_103-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLim2023" class="citation news cs1">Lim, Kimberly (29 December 2023). <span class="id-lock-subscription" title="Paid subscription required"><a rel="nofollow" class="external text" href="https://www.scmp.com/week-asia/politics/article/3246701/singapore-pm-lee-warns-very-convincing-deepfakes-spreading-disinformation-after-fake-video-him">"Singapore PM Lee warns of 'very convincing' deepfakes 'spreading disinformation' after fake video of him emerges"</a></span>. <i><a href="/wiki/South_China_Morning_Post" title="South China Morning Post">South China Morning Post</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240109054044/https://www.scmp.com/week-asia/politics/article/3246701/singapore-pm-lee-warns-very-convincing-deepfakes-spreading-disinformation-after-fake-video-him">Archived</a> from the original on 9 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=South+China+Morning+Post&amp;rft.atitle=Singapore+PM+Lee+warns+of+%27very+convincing%27+deepfakes+%27spreading+disinformation%27+after+fake+video+of+him+emerges&amp;rft.date=2023-12-29&amp;rft.aulast=Lim&amp;rft.aufirst=Kimberly&amp;rft_id=https%3A%2F%2Fwww.scmp.com%2Fweek-asia%2Fpolitics%2Farticle%2F3246701%2Fsingapore-pm-lee-warns-very-convincing-deepfakes-spreading-disinformation-after-fake-video-him&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-104"><span class="mw-cite-backlink"><b><a href="#cite_ref-104">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFTaylor2023" class="citation news cs1">Taylor, Josh (30 November 2023). <a rel="nofollow" class="external text" href="https://www.theguardian.com/technology/2023/dec/01/scammer-paid-facebook-7c-per-view-to-circulate-video-of-deepfake-jim-chalmers-and-gina-rinehart">"Scammer paid Facebook 7c per view to circulate video of deepfake Jim Chalmers and Gina Rinehart"</a>. <i><a href="/wiki/The_Guardian" title="The Guardian">The Guardian</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610170643/https://www.theguardian.com/technology/2023/dec/01/scammer-paid-facebook-7c-per-view-to-circulate-video-of-deepfake-jim-chalmers-and-gina-rinehart">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Guardian&amp;rft.atitle=Scammer+paid+Facebook+7c+per+view+to+circulate+video+of+deepfake+Jim+Chalmers+and+Gina+Rinehart&amp;rft.date=2023-11-30&amp;rft.aulast=Taylor&amp;rft.aufirst=Josh&amp;rft_id=https%3A%2F%2Fwww.theguardian.com%2Ftechnology%2F2023%2Fdec%2F01%2Fscammer-paid-facebook-7c-per-view-to-circulate-video-of-deepfake-jim-chalmers-and-gina-rinehart&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-105"><span class="mw-cite-backlink"><b><a href="#cite_ref-105">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPalmer2023" class="citation news cs1">Palmer, Joseph Olbrycht (14 December 2023). <a rel="nofollow" class="external text" href="https://factcheck.afp.com/doc.afp.com.34766ZF">"Deepfake of Australian treasury, central bank officials used to promote investment scam"</a>. <i><a href="/wiki/Agence_France-Presse" title="Agence France-Presse">Agence France-Presse</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240110171036/https://factcheck.afp.com/doc.afp.com.34766ZF">Archived</a> from the original on 10 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Agence+France-Presse&amp;rft.atitle=Deepfake+of+Australian+treasury%2C+central+bank+officials+used+to+promote+investment+scam&amp;rft.date=2023-12-14&amp;rft.aulast=Palmer&amp;rft.aufirst=Joseph+Olbrycht&amp;rft_id=https%3A%2F%2Ffactcheck.afp.com%2Fdoc.afp.com.34766ZF&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-:6-106"><span class="mw-cite-backlink">^ <a href="#cite_ref-:6_106-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:6_106-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-:6_106-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKoebler2024" class="citation news cs1">Koebler, Jason (9 January 2024). <a rel="nofollow" class="external text" href="https://www.404media.co/joe-rogan-taylor-swift-andrew-tate-ai-deepfake-youtube-medicare-ads/">"Deepfaked Celebrity Ads Promoting Medicare Scams Run Rampant on YouTube"</a>. <i><a href="/wiki/404_Media" title="404 Media">404 Media</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240110171016/https://www.404media.co/joe-rogan-taylor-swift-andrew-tate-ai-deepfake-youtube-medicare-ads/">Archived</a> from the original on 10 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=404+Media&amp;rft.atitle=Deepfaked+Celebrity+Ads+Promoting+Medicare+Scams+Run+Rampant+on+YouTube&amp;rft.date=2024-01-09&amp;rft.aulast=Koebler&amp;rft.aufirst=Jason&amp;rft_id=https%3A%2F%2Fwww.404media.co%2Fjoe-rogan-taylor-swift-andrew-tate-ai-deepfake-youtube-medicare-ads%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-:7-107"><span class="mw-cite-backlink">^ <a href="#cite_ref-:7_107-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-:7_107-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-:7_107-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRosenblatt2023" class="citation news cs1">Rosenblatt, Kalhan (3 October 2023). <a rel="nofollow" class="external text" href="https://www.nbcnews.com/tech/mrbeast-ai-tiktok-ad-deepfake-rcna118596">"MrBeast calls TikTok ad showing an AI version of him a 'scam'<span class="cs1-kern-right"></span>"</a>. <i><a href="/wiki/NBC_News" title="NBC News">NBC News</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240110171010/https://www.nbcnews.com/tech/mrbeast-ai-tiktok-ad-deepfake-rcna118596">Archived</a> from the original on 10 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=NBC+News&amp;rft.atitle=MrBeast+calls+TikTok+ad+showing+an+AI+version+of+him+a+%27scam%27&amp;rft.date=2023-10-03&amp;rft.aulast=Rosenblatt&amp;rft.aufirst=Kalhan&amp;rft_id=https%3A%2F%2Fwww.nbcnews.com%2Ftech%2Fmrbeast-ai-tiktok-ad-deepfake-rcna118596&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-108"><span class="mw-cite-backlink"><b><a href="#cite_ref-108">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKoebler2024" class="citation news cs1">Koebler, Jason (25 January 2024). <a rel="nofollow" class="external text" href="https://www.404media.co/youtube-deletes-1-000-videos-of-celebrity-ai-scam-ads/">"YouTube Deletes 1,000 Videos of Celebrity AI Scam Ads"</a>. <i><a href="/wiki/404_Media" title="404 Media">404 Media</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610171922/https://www.404media.co/youtube-deletes-1-000-videos-of-celebrity-ai-scam-ads/">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">2 February</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=404+Media&amp;rft.atitle=YouTube+Deletes+1%2C000+Videos+of+Celebrity+AI+Scam+Ads&amp;rft.date=2024-01-25&amp;rft.aulast=Koebler&amp;rft.aufirst=Jason&amp;rft_id=https%3A%2F%2Fwww.404media.co%2Fyoutube-deletes-1-000-videos-of-celebrity-ai-scam-ads%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-109"><span class="mw-cite-backlink"><b><a href="#cite_ref-109">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBucci2023" class="citation news cs1">Bucci, Nino (27 November 2023). <a rel="nofollow" class="external text" href="https://www.theguardian.com/australia-news/2023/nov/27/dick-smith-criticises-facebook-after-scammers-circulate-deepfake-video-ad">"Dick Smith criticises Facebook after scammers circulate deepfake video ad"</a>. <i><a href="/wiki/The_Guardian" title="The Guardian">The Guardian</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610170606/https://www.theguardian.com/australia-news/2023/nov/27/dick-smith-criticises-facebook-after-scammers-circulate-deepfake-video-ad">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Guardian&amp;rft.atitle=Dick+Smith+criticises+Facebook+after+scammers+circulate+deepfake+video+ad&amp;rft.date=2023-11-27&amp;rft.aulast=Bucci&amp;rft.aufirst=Nino&amp;rft_id=https%3A%2F%2Fwww.theguardian.com%2Faustralia-news%2F2023%2Fnov%2F27%2Fdick-smith-criticises-facebook-after-scammers-circulate-deepfake-video-ad&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-110"><span class="mw-cite-backlink"><b><a href="#cite_ref-110">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLomas2023" class="citation news cs1">Lomas, Natasha (7 July 2023). <a rel="nofollow" class="external text" href="https://techcrunch.com/2023/07/07/martin-lewis-deepfake-scam-ad-facebook/">"Martin Lewis warns over 'first' deepfake video scam ad circulating on Facebook"</a>. <i><a href="/wiki/TechCrunch" title="TechCrunch">TechCrunch</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240110171010/https://techcrunch.com/2023/07/07/martin-lewis-deepfake-scam-ad-facebook/">Archived</a> from the original on 10 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=TechCrunch&amp;rft.atitle=Martin+Lewis+warns+over+%27first%27+deepfake+video+scam+ad+circulating+on+Facebook&amp;rft.date=2023-07-07&amp;rft.aulast=Lomas&amp;rft.aufirst=Natasha&amp;rft_id=https%3A%2F%2Ftechcrunch.com%2F2023%2F07%2F07%2Fmartin-lewis-deepfake-scam-ad-facebook%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-111"><span class="mw-cite-backlink"><b><a href="#cite_ref-111">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLopatto2024" class="citation news cs1">Lopatto, Elizabeth (3 January 2024). <a rel="nofollow" class="external text" href="https://www.theverge.com/2024/1/3/24024262/youtube-twitter-x-crypto-solana-deepfake-scam">"Fun new deepfake consequence: more convincing crypto scams"</a>. <i><a href="/wiki/The_Verge" title="The Verge">The Verge</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240110171010/https://www.theverge.com/2024/1/3/24024262/youtube-twitter-x-crypto-solana-deepfake-scam">Archived</a> from the original on 10 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Verge&amp;rft.atitle=Fun+new+deepfake+consequence%3A+more+convincing+crypto+scams&amp;rft.date=2024-01-03&amp;rft.aulast=Lopatto&amp;rft.aufirst=Elizabeth&amp;rft_id=https%3A%2F%2Fwww.theverge.com%2F2024%2F1%2F3%2F24024262%2Fyoutube-twitter-x-crypto-solana-deepfake-scam&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-112"><span class="mw-cite-backlink"><b><a href="#cite_ref-112">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSpotoPoritz2023" class="citation news cs1">Spoto, Maia; Poritz, Isaiah (11 October 2023). <a rel="nofollow" class="external text" href="https://news.bloomberglaw.com/litigation/mrbeast-tom-hanks-stung-by-ai-scams-as-law-rushes-to-keep-pace">"MrBeast, Tom Hanks Stung by AI Scams as Law Rushes to Keep Pace"</a>. <i><a href="/wiki/Bloomberg_Law" title="Bloomberg Law">Bloomberg Law</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240110171041/https://news.bloomberglaw.com/litigation/mrbeast-tom-hanks-stung-by-ai-scams-as-law-rushes-to-keep-pace">Archived</a> from the original on 10 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Bloomberg+Law&amp;rft.atitle=MrBeast%2C+Tom+Hanks+Stung+by+AI+Scams+as+Law+Rushes+to+Keep+Pace&amp;rft.date=2023-10-11&amp;rft.aulast=Spoto&amp;rft.aufirst=Maia&amp;rft.au=Poritz%2C+Isaiah&amp;rft_id=https%3A%2F%2Fnews.bloomberglaw.com%2Flitigation%2Fmrbeast-tom-hanks-stung-by-ai-scams-as-law-rushes-to-keep-pace&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Statt-2019-113"><span class="mw-cite-backlink"><b><a href="#cite_ref-Statt-2019_113-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFStatt2019" class="citation news cs1">Statt, Nick (5 September 2019). <a rel="nofollow" class="external text" href="https://www.theverge.com/2019/9/5/20851248/deepfakes-ai-fake-audio-phone-calls-thieves-trick-companies-stealing-money">"Thieves are now using AI deepfakes to trick companies into sending them money"</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190915151504/https://www.theverge.com/2019/9/5/20851248/deepfakes-ai-fake-audio-phone-calls-thieves-trick-companies-stealing-money">Archived</a> from the original on 15 September 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">13 September</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.atitle=Thieves+are+now+using+AI+deepfakes+to+trick+companies+into+sending+them+money&amp;rft.date=2019-09-05&amp;rft.aulast=Statt&amp;rft.aufirst=Nick&amp;rft_id=https%3A%2F%2Fwww.theverge.com%2F2019%2F9%2F5%2F20851248%2Fdeepfakes-ai-fake-audio-phone-calls-thieves-trick-companies-stealing-money&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Damiani-2019-114"><span class="mw-cite-backlink"><b><a href="#cite_ref-Damiani-2019_114-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFDamiani" class="citation web cs1">Damiani, Jesse. <a rel="nofollow" class="external text" href="https://www.forbes.com/sites/jessedamiani/2019/09/03/a-voice-deepfake-was-used-to-scam-a-ceo-out-of-243000/">"A Voice Deepfake Was Used To Scam A CEO Out Of $243,000"</a>. <i>Forbes</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190914192455/https://www.forbes.com/sites/jessedamiani/2019/09/03/a-voice-deepfake-was-used-to-scam-a-ceo-out-of-243000/">Archived</a> from the original on 14 September 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Forbes&amp;rft.atitle=A+Voice+Deepfake+Was+Used+To+Scam+A+CEO+Out+Of+%24243%2C000&amp;rft.aulast=Damiani&amp;rft.aufirst=Jesse&amp;rft_id=https%3A%2F%2Fwww.forbes.com%2Fsites%2Fjessedamiani%2F2019%2F09%2F03%2Fa-voice-deepfake-was-used-to-scam-a-ceo-out-of-243000%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-115"><span class="mw-cite-backlink"><b><a href="#cite_ref-115">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://mitsloan.mit.edu/ideas-made-to-matter/deepfakes-explained">"Deepfakes, explained"</a>. <i>MIT Sloan</i>. 5 March 2024. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240305225527/https://mitsloan.mit.edu/ideas-made-to-matter/deepfakes-explained">Archived</a> from the original on 5 March 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">6 March</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=MIT+Sloan&amp;rft.atitle=Deepfakes%2C+explained&amp;rft.date=2024-03-05&amp;rft_id=https%3A%2F%2Fmitsloan.mit.edu%2Fideas-made-to-matter%2Fdeepfakes-explained&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-116"><span class="mw-cite-backlink"><b><a href="#cite_ref-116">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSchwartzWright2023" class="citation web cs1">Schwartz, Christopher; Wright, Matthew (17 March 2023). <a rel="nofollow" class="external text" href="https://theconversation.com/voice-deepfakes-are-calling-heres-what-they-are-and-how-to-avoid-getting-scammed-201449">"Voice deepfakes are calling – here's what they are and how to avoid getting scammed"</a>. <i>The Conversation</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240104120519/https://theconversation.com/voice-deepfakes-are-calling-heres-what-they-are-and-how-to-avoid-getting-scammed-201449">Archived</a> from the original on 4 January 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">4 January</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Conversation&amp;rft.atitle=Voice+deepfakes+are+calling+%E2%80%93+here%27s+what+they+are+and+how+to+avoid+getting+scammed&amp;rft.date=2023-03-17&amp;rft.aulast=Schwartz&amp;rft.aufirst=Christopher&amp;rft.au=Wright%2C+Matthew&amp;rft_id=https%3A%2F%2Ftheconversation.com%2Fvoice-deepfakes-are-calling-heres-what-they-are-and-how-to-avoid-getting-scammed-201449&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Deepfakes_explained-117"><span class="mw-cite-backlink">^ <a href="#cite_ref-Deepfakes_explained_117-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Deepfakes_explained_117-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Deepfakes_explained_117-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-Deepfakes_explained_117-3"><sup><i><b>d</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSomers2020" class="citation web cs1">Somers, Meredith (21 July 2020). <a rel="nofollow" class="external text" href="https://mitsloan.mit.edu/ideas-made-to-matter/deepfakes-explained">"Deepfakes, explained"</a>. <i>MIT Sloan</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240305225527/https://mitsloan.mit.edu/ideas-made-to-matter/deepfakes-explained">Archived</a> from the original on 5 March 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">6 March</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=MIT+Sloan&amp;rft.atitle=Deepfakes%2C+explained&amp;rft.date=2020-07-21&amp;rft.aulast=Somers&amp;rft.aufirst=Meredith&amp;rft_id=https%3A%2F%2Fmitsloan.mit.edu%2Fideas-made-to-matter%2Fdeepfakes-explained&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-118"><span class="mw-cite-backlink"><b><a href="#cite_ref-118">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFC2020" class="citation web cs1">C, Kim (22 August 2020). <a rel="nofollow" class="external text" href="https://www.musictimes.com/articles/82157/20200822/coffin-dance-and-more-the-music-memes-of-2020-so-far.htm">"Coffin Dance and More: The Music Memes of 2020 So Far"</a>. <i>Music Times</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210626152114/https://www.musictimes.com/articles/82157/20200822/coffin-dance-and-more-the-music-memes-of-2020-so-far.htm">Archived</a> from the original on 26 June 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">26 August</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Music+Times&amp;rft.atitle=Coffin+Dance+and+More%3A+The+Music+Memes+of+2020+So+Far&amp;rft.date=2020-08-22&amp;rft.aulast=C&amp;rft.aufirst=Kim&amp;rft_id=https%3A%2F%2Fwww.musictimes.com%2Farticles%2F82157%2F20200822%2Fcoffin-dance-and-more-the-music-memes-of-2020-so-far.htm&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-119"><span class="mw-cite-backlink"><b><a href="#cite_ref-119">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSholihyn2020" class="citation news cs1">Sholihyn, Ilyas (7 August 2020). <a rel="nofollow" class="external text" href="https://www.asiaone.com/digital/someone-deepfaked-singapores-politicians-lip-sync-japanese-meme-song">"Someone deepfaked Singapore's politicians to lip-sync that Japanese meme song"</a>. <a href="/wiki/AsiaOne" title="AsiaOne">AsiaOne</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200903160218/https://www.asiaone.com/digital/someone-deepfaked-singapores-politicians-lip-sync-japanese-meme-song">Archived</a> from the original on 3 September 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">26 August</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.atitle=Someone+deepfaked+Singapore%27s+politicians+to+lip-sync+that+Japanese+meme+song&amp;rft.date=2020-08-07&amp;rft.aulast=Sholihyn&amp;rft.aufirst=Ilyas&amp;rft_id=https%3A%2F%2Fwww.asiaone.com%2Fdigital%2Fsomeone-deepfaked-singapores-politicians-lip-sync-japanese-meme-song&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Bezmalinovic-2018-120"><span class="mw-cite-backlink">^ <a href="#cite_ref-Bezmalinovic-2018_120-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Bezmalinovic-2018_120-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Bezmalinovic-2018_120-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-Bezmalinovic-2018_120-3"><sup><i><b>d</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.aargauerzeitung.ch/leben/digital/wenn-merkel-ploetzlich-trumps-gesicht-traegt-die-gefaehrliche-manipulation-von-bildern-und-videos-132155720">"Wenn Merkel plötzlich Trumps Gesicht trägt: die gefährliche Manipulation von Bildern und Videos"</a>. az Aargauer Zeitung. 3 February 2018. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190413014251/https://www.aargauerzeitung.ch/leben/digital/wenn-merkel-ploetzlich-trumps-gesicht-traegt-die-gefaehrliche-manipulation-von-bildern-und-videos-132155720">Archived</a> from the original on 13 April 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 April</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Wenn+Merkel+pl%C3%B6tzlich+Trumps+Gesicht+tr%C3%A4gt%3A+die+gef%C3%A4hrliche+Manipulation+von+Bildern+und+Videos&amp;rft.pub=az+Aargauer+Zeitung&amp;rft.date=2018-02-03&amp;rft_id=https%3A%2F%2Fwww.aargauerzeitung.ch%2Fleben%2Fdigital%2Fwenn-merkel-ploetzlich-trumps-gesicht-traegt-die-gefaehrliche-manipulation-von-bildern-und-videos-132155720&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-121"><span class="mw-cite-backlink"><b><a href="#cite_ref-121">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGensing" class="citation web cs1">Gensing, Patrick. <a rel="nofollow" class="external text" href="http://faktenfinder.tagesschau.de/hintergrund/deep-fakes-101.html">"Deepfakes: Auf dem Weg in eine alternative Realität?"</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20181011182211/http://faktenfinder.tagesschau.de/hintergrund/deep-fakes-101.html">Archived</a> from the original on 11 October 2018<span class="reference-accessdate">. Retrieved <span class="nowrap">9 April</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Deepfakes%3A+Auf+dem+Weg+in+eine+alternative+Realit%C3%A4t%3F&amp;rft.aulast=Gensing&amp;rft.aufirst=Patrick&amp;rft_id=http%3A%2F%2Ffaktenfinder.tagesschau.de%2Fhintergrund%2Fdeep-fakes-101.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-122"><span class="mw-cite-backlink"><b><a href="#cite_ref-122">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRomano2018" class="citation web cs1">Romano, Aja (18 April 2018). <a rel="nofollow" class="external text" href="https://www.vox.com/2018/4/18/17252410/jordan-peele-obama-deepfake-buzzfeed">"Jordan Peele's simulated Obama PSA is a double-edged warning against fake news"</a>. <i>Vox</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190611142158/https://www.vox.com/2018/4/18/17252410/jordan-peele-obama-deepfake-buzzfeed">Archived</a> from the original on 11 June 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">10 September</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Vox&amp;rft.atitle=Jordan+Peele%27s+simulated+Obama+PSA+is+a+double-edged+warning+against+fake+news&amp;rft.date=2018-04-18&amp;rft.aulast=Romano&amp;rft.aufirst=Aja&amp;rft_id=https%3A%2F%2Fwww.vox.com%2F2018%2F4%2F18%2F17252410%2Fjordan-peele-obama-deepfake-buzzfeed&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-123"><span class="mw-cite-backlink"><b><a href="#cite_ref-123">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSwenson2019" class="citation news cs1">Swenson, Kyle (11 January 2019). <a rel="nofollow" class="external text" href="https://www.washingtonpost.com/nation/2019/01/11/seattle-tv-station-aired-doctored-footage-trumps-oval-office-speech-employee-has-been-fired/">"A Seattle TV station aired doctored footage of Trump's Oval Office speech. The employee has been fired"</a>. <i>The Washington Post</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190415011409/https://www.washingtonpost.com/nation/2019/01/11/seattle-tv-station-aired-doctored-footage-trumps-oval-office-speech-employee-has-been-fired/">Archived</a> from the original on 15 April 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">11 January</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Washington+Post&amp;rft.atitle=A+Seattle+TV+station+aired+doctored+footage+of+Trump%27s+Oval+Office+speech.+The+employee+has+been+fired.&amp;rft.date=2019-01-11&amp;rft.aulast=Swenson&amp;rft.aufirst=Kyle&amp;rft_id=https%3A%2F%2Fwww.washingtonpost.com%2Fnation%2F2019%2F01%2F11%2Fseattle-tv-station-aired-doctored-footage-trumps-oval-office-speech-employee-has-been-fired%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-124"><span class="mw-cite-backlink"><b><a href="#cite_ref-124">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFO&#39;Sullivan2019" class="citation web cs1">O'Sullivan, Donie (4 June 2019). <a rel="nofollow" class="external text" href="https://www.cnn.com/2019/06/04/politics/house-intelligence-committee-deepfakes-threats-hearing/index.html">"Congress to investigate deepfakes as doctored Pelosi video causes stir"</a>. <i>CNN</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190629081003/https://www.cnn.com/2019/06/04/politics/house-intelligence-committee-deepfakes-threats-hearing/index.html">Archived</a> from the original on 29 June 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=CNN&amp;rft.atitle=Congress+to+investigate+deepfakes+as+doctored+Pelosi+video+causes+stir&amp;rft.date=2019-06-04&amp;rft.aulast=O%27Sullivan&amp;rft.aufirst=Donie&amp;rft_id=https%3A%2F%2Fwww.cnn.com%2F2019%2F06%2F04%2Fpolitics%2Fhouse-intelligence-committee-deepfakes-threats-hearing%2Findex.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-125"><span class="mw-cite-backlink"><b><a href="#cite_ref-125">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.extinctionrebellion.be/en/">"#TellTheTruthBelgium"</a>. <i>Extinction Rebellion Belgium</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200425000040/https://www.extinctionrebellion.be/en">Archived</a> from the original on 25 April 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">21 April</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Extinction+Rebellion+Belgium&amp;rft.atitle=%23TellTheTruthBelgium&amp;rft_id=https%3A%2F%2Fwww.extinctionrebellion.be%2Fen%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-126"><span class="mw-cite-backlink"><b><a href="#cite_ref-126">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHolubowicz2020" class="citation web cs1 cs1-prop-foreign-lang-source">Holubowicz, Gerald (15 April 2020). <a rel="nofollow" class="external text" href="https://journalism.design/les-deepfakes/extinction-rebellion-sempare-des-deepfakes/">"Extinction Rebellion s'empare des deepfakes"</a>. <i>Journalism.design</i> (in French). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200729050900/https://journalism.design/les-deepfakes/extinction-rebellion-sempare-des-deepfakes/">Archived</a> from the original on 29 July 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">21 April</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Journalism.design&amp;rft.atitle=Extinction+Rebellion+s%27empare+des+deepfakes&amp;rft.date=2020-04-15&amp;rft.aulast=Holubowicz&amp;rft.aufirst=Gerald&amp;rft_id=https%3A%2F%2Fjournalism.design%2Fles-deepfakes%2Fextinction-rebellion-sempare-des-deepfakes%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Carnahan_2020-127"><span class="mw-cite-backlink"><b><a href="#cite_ref-Carnahan_2020_127-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCarnahan2020" class="citation web cs1">Carnahan, Dustin (16 September 2020). <a rel="nofollow" class="external text" href="https://theconversation.com/faked-videos-shore-up-false-beliefs-about-bidens-mental-health-145975">"Faked videos shore up false beliefs about Biden's mental health"</a>. <i>The Conversation</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220409163145/https://theconversation.com/faked-videos-shore-up-false-beliefs-about-bidens-mental-health-145975">Archived</a> from the original on 9 April 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">9 April</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Conversation&amp;rft.atitle=Faked+videos+shore+up+false+beliefs+about+Biden%27s+mental+health&amp;rft.date=2020-09-16&amp;rft.aulast=Carnahan&amp;rft.aufirst=Dustin&amp;rft_id=http%3A%2F%2Ftheconversation.com%2Ffaked-videos-shore-up-false-beliefs-about-bidens-mental-health-145975&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Parker_2020-128"><span class="mw-cite-backlink"><b><a href="#cite_ref-Parker_2020_128-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFParker2020" class="citation web cs1">Parker, Ashley (7 September 2020). <span class="id-lock-registration" title="Free registration required"><a rel="nofollow" class="external text" href="https://www.independent.co.uk/news/world/americas/us-election-2020/trump-us-election-fake-news-biden-twitter-deep-fake-videos-b404815.html">"Trump and allies ramp up efforts to spread disinformation and fake news"</a></span>. <i>The Independent</i><span class="reference-accessdate">. Retrieved <span class="nowrap">9 April</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Independent&amp;rft.atitle=Trump+and+allies+ramp+up+efforts+to+spread+disinformation+and+fake+news&amp;rft.date=2020-09-07&amp;rft.aulast=Parker&amp;rft.aufirst=Ashley&amp;rft_id=https%3A%2F%2Fwww.independent.co.uk%2Fnews%2Fworld%2Famericas%2Fus-election-2020%2Ftrump-us-election-fake-news-biden-twitter-deep-fake-videos-b404815.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-129"><span class="mw-cite-backlink"><b><a href="#cite_ref-129">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFChristopher2020" class="citation web cs1">Christopher, Nilesh (18 February 2020). <a rel="nofollow" class="external text" href="https://www.vice.com/en_in/article/jgedjb/the-first-use-of-deepfakes-in-indian-election-by-bjp">"We've Just Seen the First Use of Deepfakes in an Indian Election Campaign"</a>. <i>Vice</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200219153000/https://www.vice.com/en_in/article/jgedjb/the-first-use-of-deepfakes-in-indian-election-by-bjp">Archived</a> from the original on 19 February 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">19 February</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Vice&amp;rft.atitle=We%27ve+Just+Seen+the+First+Use+of+Deepfakes+in+an+Indian+Election+Campaign&amp;rft.date=2020-02-18&amp;rft.aulast=Christopher&amp;rft.aufirst=Nilesh&amp;rft_id=https%3A%2F%2Fwww.vice.com%2Fen_in%2Farticle%2Fjgedjb%2Fthe-first-use-of-deepfakes-in-indian-election-by-bjp&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-130"><span class="mw-cite-backlink"><b><a href="#cite_ref-130">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://www.economist.com/1843/2020/04/28/amabie-the-mythical-creature-making-a-coronavirus-comeback">"Amabie: the mythical creature making a coronavirus comeback"</a>. <i>The Economist</i>. 28 April 2020. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0013-0613">0013-0613</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210520200202/https://www.economist.com/1843/2020/04/28/amabie-the-mythical-creature-making-a-coronavirus-comeback">Archived</a> from the original on 20 May 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">3 June</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Economist&amp;rft.atitle=Amabie%3A+the+mythical+creature+making+a+coronavirus+comeback&amp;rft.date=2020-04-28&amp;rft.issn=0013-0613&amp;rft_id=https%3A%2F%2Fwww.economist.com%2F1843%2F2020%2F04%2F28%2Famabie-the-mythical-creature-making-a-coronavirus-comeback&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-131"><span class="mw-cite-backlink"><b><a href="#cite_ref-131">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRoth2021" class="citation news cs1">Roth, Andrew (22 April 2021). <a rel="nofollow" class="external text" href="https://www.theguardian.com/world/2021/apr/22/european-mps-targeted-by-deepfake-video-calls-imitating-russian-opposition">"European MPs targeted by deepfake video calls imitating Russian opposition"</a>. <i><a href="/wiki/The_Guardian" title="The Guardian">The Guardian</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220329113114/https://www.theguardian.com/world/2021/apr/22/european-mps-targeted-by-deepfake-video-calls-imitating-russian-opposition">Archived</a> from the original on 29 March 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">29 March</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Guardian&amp;rft.atitle=European+MPs+targeted+by+deepfake+video+calls+imitating+Russian+opposition&amp;rft.date=2021-04-22&amp;rft.aulast=Roth&amp;rft.aufirst=Andrew&amp;rft_id=https%3A%2F%2Fwww.theguardian.com%2Fworld%2F2021%2Fapr%2F22%2Feuropean-mps-targeted-by-deepfake-video-calls-imitating-russian-opposition&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-132"><span class="mw-cite-backlink"><b><a href="#cite_ref-132">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFIvanovRothrock2021" class="citation news cs1">Ivanov, Maxim; Rothrock, Kevin (22 April 2021). <a rel="nofollow" class="external text" href="https://meduza.io/en/feature/2021/04/22/hello-this-is-leonid-volkov">"Hello, this is Leonid Volkov* Using deepfake video and posing as Navalny's right-hand man, Russian pranksters fool Latvian politicians and journalists into invitation and TV interview"</a>. <i><a href="/wiki/Meduza" title="Meduza">Meduza</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220329113114/https://meduza.io/en/feature/2021/04/22/hello-this-is-leonid-volkov">Archived</a> from the original on 29 March 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">29 March</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Meduza&amp;rft.atitle=Hello%2C+this+is+Leonid+Volkov%2A+Using+deepfake+video+and+posing+as+Navalny%27s+right-hand+man%2C+Russian+pranksters+fool+Latvian+politicians+and+journalists+into+invitation+and+TV+interview&amp;rft.date=2021-04-22&amp;rft.aulast=Ivanov&amp;rft.aufirst=Maxim&amp;rft.au=Rothrock%2C+Kevin&amp;rft_id=https%3A%2F%2Fmeduza.io%2Fen%2Ffeature%2F2021%2F04%2F22%2Fhello-this-is-leonid-volkov&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-133"><span class="mw-cite-backlink"><b><a href="#cite_ref-133">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://nltimes.nl/2021/04/24/dutch-mps-video-conference-deep-fake-imitation-navalnys-chief-staff">"Dutch MPs in video conference with deep fake imitation of Navalny's Chief of Staff"</a>. <i>nltimes.nl</i>. 24 April 2021. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610170647/https://nltimes.nl/2021/04/24/dutch-mps-video-conference-deep-fake-imitation-navalnys-chief-staff">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">29 March</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=nltimes.nl&amp;rft.atitle=Dutch+MPs+in+video+conference+with+deep+fake+imitation+of+Navalny%27s+Chief+of+Staff&amp;rft.date=2021-04-24&amp;rft_id=https%3A%2F%2Fnltimes.nl%2F2021%2F04%2F24%2Fdutch-mps-video-conference-deep-fake-imitation-navalnys-chief-staff&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-134"><span class="mw-cite-backlink"><b><a href="#cite_ref-134">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://www.themoscowtimes.com/2021/04/23/deepfake-navalny-aide-targets-european-lawmakers-a73717">"<span class="cs1-kern-left"></span>'Deepfake' Navalny Aide Targets European Lawmakers"</a>. <i><a href="/wiki/The_Moscow_Times" title="The Moscow Times">The Moscow Times</a></i>. 23 April 2021. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220329152654/https://www.themoscowtimes.com/2021/04/23/deepfake-navalny-aide-targets-european-lawmakers-a73717">Archived</a> from the original on 29 March 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">29 March</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Moscow+Times&amp;rft.atitle=%27Deepfake%27+Navalny+Aide+Targets+European+Lawmakers&amp;rft.date=2021-04-23&amp;rft_id=https%3A%2F%2Fwww.themoscowtimes.com%2F2021%2F04%2F23%2Fdeepfake-navalny-aide-targets-european-lawmakers-a73717&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-135"><span class="mw-cite-backlink"><b><a href="#cite_ref-135">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFVincent2021" class="citation news cs1">Vincent, James (30 April 2021). <a rel="nofollow" class="external text" href="https://www.theverge.com/2021/4/30/22407264/deepfake-european-polticians-leonid-volkov-vovan-lexus">"<span class="cs1-kern-left"></span>'Deepfake' that supposedly fooled European politicians was just a look-alike, say pranksters"</a>. <i><a href="/wiki/The_Verge" title="The Verge">The Verge</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220329113114/https://www.theverge.com/2021/4/30/22407264/deepfake-european-polticians-leonid-volkov-vovan-lexus">Archived</a> from the original on 29 March 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">29 March</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Verge&amp;rft.atitle=%27Deepfake%27+that+supposedly+fooled+European+politicians+was+just+a+look-alike%2C+say+pranksters&amp;rft.date=2021-04-30&amp;rft.aulast=Vincent&amp;rft.aufirst=James&amp;rft_id=https%3A%2F%2Fwww.theverge.com%2F2021%2F4%2F30%2F22407264%2Fdeepfake-european-polticians-leonid-volkov-vovan-lexus&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-136"><span class="mw-cite-backlink"><b><a href="#cite_ref-136">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFNovak2023" class="citation news cs1">Novak, Matt (8 May 2023). <a rel="nofollow" class="external text" href="https://www.forbes.com/sites/mattnovak/2023/05/08/viral-video-of-kamala-harris-speaking-gibberish-is-deepfake/?sh=723384a270f7">"Viral Video Of Kamala Harris Speaking Gibberish Is Actually A Deepfake"</a>. <i>Forbes</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20230718185437/https://www.forbes.com/sites/mattnovak/2023/05/08/viral-video-of-kamala-harris-speaking-gibberish-is-deepfake/?sh=723384a270f7">Archived</a> from the original on 18 July 2023<span class="reference-accessdate">. Retrieved <span class="nowrap">18 July</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Forbes&amp;rft.atitle=Viral+Video+Of+Kamala+Harris+Speaking+Gibberish+Is+Actually+A+Deepfake&amp;rft.date=2023-05-08&amp;rft.aulast=Novak&amp;rft.aufirst=Matt&amp;rft_id=https%3A%2F%2Fwww.forbes.com%2Fsites%2Fmattnovak%2F2023%2F05%2F08%2Fviral-video-of-kamala-harris-speaking-gibberish-is-deepfake%2F%3Fsh%3D723384a270f7&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-137"><span class="mw-cite-backlink"><b><a href="#cite_ref-137">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://www.politifact.com/factchecks/2023/may/05/facebook-posts/kamala-harris-wasnt-slurring-about-today-yesterday/">"PolitiFact - Kamala Harris wasn't slurring about today, yesterday or tomorrow. This video is altered"</a>. <i>Politifact</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172627/https://www.politifact.com/factchecks/2023/may/05/facebook-posts/kamala-harris-wasnt-slurring-about-today-yesterday/">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">18 July</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Politifact&amp;rft.atitle=PolitiFact+-+Kamala+Harris+wasn%27t+slurring+about+today%2C+yesterday+or+tomorrow.+This+video+is+altered&amp;rft_id=https%3A%2F%2Fwww.politifact.com%2Ffactchecks%2F2023%2Fmay%2F05%2Ffacebook-posts%2Fkamala-harris-wasnt-slurring-about-today-yesterday%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-138"><span class="mw-cite-backlink"><b><a href="#cite_ref-138">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFShuham2023" class="citation web cs1">Shuham, Matt (8 June 2023). <a rel="nofollow" class="external text" href="https://www.huffpost.com/entry/desantis-trump-fauci-fake-ai-ad_n_64822436e4b025003edc3c8b">"DeSantis Campaign Ad Shows Fake AI Images Of Trump Hugging Fauci"</a>. <i>HuffPost</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610171226/https://www.huffpost.com/entry/desantis-trump-fauci-fake-ai-ad_n_64822436e4b025003edc3c8b">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">8 June</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=HuffPost&amp;rft.atitle=DeSantis+Campaign+Ad+Shows+Fake+AI+Images+Of+Trump+Hugging+Fauci&amp;rft.date=2023-06-08&amp;rft.aulast=Shuham&amp;rft.aufirst=Matt&amp;rft_id=https%3A%2F%2Fwww.huffpost.com%2Fentry%2Fdesantis-trump-fauci-fake-ai-ad_n_64822436e4b025003edc3c8b&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-139"><span class="mw-cite-backlink"><b><a href="#cite_ref-139">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://www.washingtonpost.com/technology/2024/04/23/ai-deepfake-election-2024-us-india/">"AI Deepfakes Pose Major Threat to Elections in US and India"</a>. <i>The Washington Post</i>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0190-8286">0190-8286</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240520200202/https://www.washingtonpost.com/technology/2024/04/23/ai-deepfake-election-2024-us-india/">Archived</a> from the original on 20 May 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">22 October</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Washington+Post&amp;rft.atitle=AI+Deepfakes+Pose+Major+Threat+to+Elections+in+US+and+India&amp;rft.issn=0190-8286&amp;rft_id=https%3A%2F%2Fwww.washingtonpost.com%2Ftechnology%2F2024%2F04%2F23%2Fai-deepfake-election-2024-us-india%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-140"><span class="mw-cite-backlink"><b><a href="#cite_ref-140">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFChristopher2024" class="citation magazine cs1">Christopher, Nilesh (March 2024). <a rel="nofollow" class="external text" href="https://www.wired.com/story/indian-elections-ai-deepfakes/">"Indian Voters Are Being Bombarded With Millions of Deepfakes. Political Candidates Approve"</a>. <i>Wired</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240312123000/https://www.wired.com/story/indian-elections-ai-deepfakes/">Archived</a> from the original on 12 March 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">20 October</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Wired&amp;rft.atitle=Indian+Voters+Are+Being+Bombarded+With+Millions+of+Deepfakes.+Political+Candidates+Approve&amp;rft.date=2024-03&amp;rft.aulast=Christopher&amp;rft.aufirst=Nilesh&amp;rft_id=https%3A%2F%2Fwww.wired.com%2Fstory%2Findian-elections-ai-deepfakes%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-141"><span class="mw-cite-backlink"><b><a href="#cite_ref-141">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://www.bloomberg.com/features/2024-ai-election-security-deepfakes/">"What an Indian Deepfaker Tells Us About Global Election Security"</a>. <i>Bloomberg</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240401123456/https://www.bloomberg.com/features/2024-ai-election-security-deepfakes/">Archived</a> from the original on 1 April 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">20 October</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Bloomberg&amp;rft.atitle=What+an+Indian+Deepfaker+Tells+Us+About+Global+Election+Security&amp;rft_id=https%3A%2F%2Fwww.bloomberg.com%2Ffeatures%2F2024-ai-election-security-deepfakes%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Roettgers-2018-142"><span class="mw-cite-backlink">^ <a href="#cite_ref-Roettgers-2018_142-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Roettgers-2018_142-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRoettgers2018" class="citation news cs1">Roettgers, Janko (21 February 2018). <a rel="nofollow" class="external text" href="https://variety.com/2018/digital/news/deepfakes-porn-adult-industry-1202705749/">"Porn Producers Offer to Help Hollywood Take Down Deepfake Videos"</a>. <i>Variety</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190610220204/https://variety.com/2018/digital/news/deepfakes-porn-adult-industry-1202705749/">Archived</a> from the original on 10 June 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">28 February</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Variety&amp;rft.atitle=Porn+Producers+Offer+to+Help+Hollywood+Take+Down+Deepfake+Videos&amp;rft.date=2018-02-21&amp;rft.aulast=Roettgers&amp;rft.aufirst=Janko&amp;rft_id=https%3A%2F%2Fvariety.com%2F2018%2Fdigital%2Fnews%2Fdeepfakes-porn-adult-industry-1202705749%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Dickson-2019-143"><span class="mw-cite-backlink">^ <a href="#cite_ref-Dickson-2019_143-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Dickson-2019_143-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Dickson-2019_143-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFDickson2019" class="citation magazine cs1">Dickson, E. J. (7 October 2019). <a rel="nofollow" class="external text" href="https://www.rollingstone.com/culture/culture-news/deepfakes-nonconsensual-porn-study-kpop-895605/">"Deepfake Porn Is Still a Threat, Particularly for K-Pop Stars"</a>. <i>Rolling Stone</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191030165258/https://www.rollingstone.com/culture/culture-news/deepfakes-nonconsensual-porn-study-kpop-895605/">Archived</a> from the original on 30 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Rolling+Stone&amp;rft.atitle=Deepfake+Porn+Is+Still+a+Threat%2C+Particularly+for+K-Pop+Stars&amp;rft.date=2019-10-07&amp;rft.aulast=Dickson&amp;rft.aufirst=E.+J.&amp;rft_id=https%3A%2F%2Fwww.rollingstone.com%2Fculture%2Fculture-news%2Fdeepfakes-nonconsensual-porn-study-kpop-895605%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Ajder-2019-144"><span class="mw-cite-backlink"><b><a href="#cite_ref-Ajder-2019_144-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://regmedia.co.uk/2019/10/08/deepfake_report.pdf">"The State of Deepfake - Landscape, Threats, and Impact"</a> <span class="cs1-format">(PDF)</span>. <i>Deeptrace</i>. 1 October 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200809043229/https://regmedia.co.uk/2019/10/08/deepfake_report.pdf">Archived</a> <span class="cs1-format">(PDF)</span> from the original on 9 August 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 July</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Deeptrace&amp;rft.atitle=The+State+of+Deepfake+-+Landscape%2C+Threats%2C+and+Impact&amp;rft.date=2019-10-01&amp;rft_id=https%3A%2F%2Fregmedia.co.uk%2F2019%2F10%2F08%2Fdeepfake_report.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-145"><span class="mw-cite-backlink"><b><a href="#cite_ref-145">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGoggin2019" class="citation web cs1">Goggin, Benjamin (7 June 2019). <a rel="nofollow" class="external text" href="https://www.businessinsider.com/deepfakes-explained-the-rise-of-fake-realistic-videos-online-2019-6">"From porn to 'Game of Thrones': How deepfakes and realistic-looking fake videos hit it big"</a>. <i>Business Insider</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191108193123/https://www.businessinsider.com/deepfakes-explained-the-rise-of-fake-realistic-videos-online-2019-6">Archived</a> from the original on 8 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Business+Insider&amp;rft.atitle=From+porn+to+%27Game+of+Thrones%27%3A+How+deepfakes+and+realistic-looking+fake+videos+hit+it+big&amp;rft.date=2019-06-07&amp;rft.aulast=Goggin&amp;rft.aufirst=Benjamin&amp;rft_id=https%3A%2F%2Fwww.businessinsider.com%2Fdeepfakes-explained-the-rise-of-fake-realistic-videos-online-2019-6&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-146"><span class="mw-cite-backlink"><b><a href="#cite_ref-146">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLee2018" class="citation news cs1">Lee, Dave (3 February 2018). <a rel="nofollow" class="external text" href="https://www.bbc.com/news/technology-42912529">"<span class="cs1-kern-left"></span>'Fake porn' has serious consequences"</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191201134131/https://www.bbc.com/news/technology-42912529">Archived</a> from the original on 1 December 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.atitle=%27Fake+porn%27+has+serious+consequences&amp;rft.date=2018-02-03&amp;rft.aulast=Lee&amp;rft.aufirst=Dave&amp;rft_id=https%3A%2F%2Fwww.bbc.com%2Fnews%2Ftechnology-42912529&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Cole-2018d-147"><span class="mw-cite-backlink"><b><a href="#cite_ref-Cole-2018d_147-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCole2018" class="citation web cs1">Cole, Samantha (19 June 2018). <a rel="nofollow" class="external text" href="https://www.vice.com/en_us/article/ywe4qw/gfycat-spotting-deepfakes-fake-ai-porn">"Gfycat's AI Solution for Fighting Deepfakes Isn't Working"</a>. <i>Vice</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191108193129/https://www.vice.com/en_us/article/ywe4qw/gfycat-spotting-deepfakes-fake-ai-porn">Archived</a> from the original on 8 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Vice&amp;rft.atitle=Gfycat%27s+AI+Solution+for+Fighting+Deepfakes+Isn%27t+Working&amp;rft.date=2018-06-19&amp;rft.aulast=Cole&amp;rft.aufirst=Samantha&amp;rft_id=https%3A%2F%2Fwww.vice.com%2Fen_us%2Farticle%2Fywe4qw%2Fgfycat-spotting-deepfakes-fake-ai-porn&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-148"><span class="mw-cite-backlink"><b><a href="#cite_ref-148">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFZoe2019" class="citation web cs1">Zoe, Freni (24 November 2019). <a rel="nofollow" class="external text" href="https://medium.com/@frenizoe/deepfake-porn-efb80f39bae3">"Deepfake Porn Is Here To Stay"</a>. <i>Medium</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191210192315/https://medium.com/@frenizoe/deepfake-porn-efb80f39bae3">Archived</a> from the original on 10 December 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">10 December</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Medium&amp;rft.atitle=Deepfake+Porn+Is+Here+To+Stay&amp;rft.date=2019-11-24&amp;rft.aulast=Zoe&amp;rft.aufirst=Freni&amp;rft_id=https%3A%2F%2Fmedium.com%2F%40frenizoe%2Fdeepfake-porn-efb80f39bae3&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Cole-2019a-149"><span class="mw-cite-backlink"><b><a href="#cite_ref-Cole-2019a_149-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFColeMaibergKoebler2019" class="citation web cs1">Cole, Samantha; Maiberg, Emanuel; Koebler, Jason (26 June 2019). <a rel="nofollow" class="external text" href="https://www.vice.com/en_us/article/kzm59x/deepnude-app-creates-fake-nudes-of-any-woman">"This Horrifying App Undresses a Photo of Any Woman with a Single Click"</a>. <i>Vice</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190702011315/https://www.vice.com/en_us/article/kzm59x/deepnude-app-creates-fake-nudes-of-any-woman">Archived</a> from the original on 2 July 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">2 July</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Vice&amp;rft.atitle=This+Horrifying+App+Undresses+a+Photo+of+Any+Woman+with+a+Single+Click&amp;rft.date=2019-06-26&amp;rft.aulast=Cole&amp;rft.aufirst=Samantha&amp;rft.au=Maiberg%2C+Emanuel&amp;rft.au=Koebler%2C+Jason&amp;rft_id=https%3A%2F%2Fwww.vice.com%2Fen_us%2Farticle%2Fkzm59x%2Fdeepnude-app-creates-fake-nudes-of-any-woman&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-150"><span class="mw-cite-backlink"><b><a href="#cite_ref-150">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCox2019" class="citation news cs1">Cox, Joseph (9 July 2019). <a rel="nofollow" class="external text" href="https://www.vice.com/en_us/article/8xzjpk/github-removed-open-source-versions-of-deepnude-app-deepfakes">"GitHub Removed Open Source Versions of DeepNude"</a>. <a href="/wiki/Vice_Media" title="Vice Media">Vice Media</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200924083833/https://www.vice.com/en_us/article/8xzjpk/github-removed-open-source-versions-of-deepnude-app-deepfakes">Archived</a> from the original on 24 September 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">14 July</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.atitle=GitHub+Removed+Open+Source+Versions+of+DeepNude&amp;rft.date=2019-07-09&amp;rft.aulast=Cox&amp;rft.aufirst=Joseph&amp;rft_id=https%3A%2F%2Fwww.vice.com%2Fen_us%2Farticle%2F8xzjpk%2Fgithub-removed-open-source-versions-of-deepnude-app-deepfakes&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-151"><span class="mw-cite-backlink"><b><a href="#cite_ref-151">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://twitter.com/deepnudeapp/status/1144307316231200768">"pic.twitter.com/8uJKBQTZ0o"</a>. 27 June 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210406183900/https://twitter.com/deepnudeapp/status/1144307316231200768">Archived</a> from the original on 6 April 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">3 August</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=pic.twitter.com%2F8uJKBQTZ0o&amp;rft.date=2019-06-27&amp;rft_id=https%3A%2F%2Ftwitter.com%2Fdeepnudeapp%2Fstatus%2F1144307316231200768&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-152"><span class="mw-cite-backlink"><b><a href="#cite_ref-152">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.nbcnews.com/tech/social-media/emma-watson-deep-fake-scarlett-johansson-face-swap-app-rcna73624">"Hundreds of sexual deepfake ads using Emma Watson's face ran on Facebook and Instagram in the last two days"</a>. <i>NBC News</i>. 7 March 2023. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240229175232/https://www.nbcnews.com/tech/social-media/emma-watson-deep-fake-scarlett-johansson-face-swap-app-rcna73624">Archived</a> from the original on 29 February 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">8 March</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=NBC+News&amp;rft.atitle=Hundreds+of+sexual+deepfake+ads+using+Emma+Watson%27s+face+ran+on+Facebook+and+Instagram+in+the+last+two+days&amp;rft.date=2023-03-07&amp;rft_id=https%3A%2F%2Fwww.nbcnews.com%2Ftech%2Fsocial-media%2Femma-watson-deep-fake-scarlett-johansson-face-swap-app-rcna73624&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-153"><span class="mw-cite-backlink"><b><a href="#cite_ref-153">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFilipovic2024" class="citation news cs1">Filipovic, Jill (31 January 2024). <a rel="nofollow" class="external text" href="https://www.theguardian.com/commentisfree/2024/jan/31/taylor-swift-ai-pictures-far-right">"Anyone could be a victim of 'deepfakes'. But there's a reason Taylor Swift is a target"</a>. <i>The Guardian</i>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0261-3077">0261-3077</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610171111/https://www.theguardian.com/commentisfree/2024/jan/31/taylor-swift-ai-pictures-far-right">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">8 March</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Guardian&amp;rft.atitle=Anyone+could+be+a+victim+of+%27deepfakes%27.+But+there%27s+a+reason+Taylor+Swift+is+a+target&amp;rft.date=2024-01-31&amp;rft.issn=0261-3077&amp;rft.aulast=Filipovic&amp;rft.aufirst=Jill&amp;rft_id=https%3A%2F%2Fwww.theguardian.com%2Fcommentisfree%2F2024%2Fjan%2F31%2Ftaylor-swift-ai-pictures-far-right&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-154"><span class="mw-cite-backlink"><b><a href="#cite_ref-154">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFParis2021" class="citation journal cs1">Paris, Britt (October 2021). <a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F20563051211062919">"Configuring Fakes: Digitized Bodies, the Politics of Evidence, and Agency"</a>. <i>Social Media + Society</i>. <b>7</b> (4). <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F20563051211062919">10.1177/20563051211062919</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2056-3051">2056-3051</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Social+Media+%2B+Society&amp;rft.atitle=Configuring+Fakes%3A+Digitized+Bodies%2C+the+Politics+of+Evidence%2C+and+Agency&amp;rft.volume=7&amp;rft.issue=4&amp;rft.date=2021-10&amp;rft_id=info%3Adoi%2F10.1177%2F20563051211062919&amp;rft.issn=2056-3051&amp;rft.aulast=Paris&amp;rft.aufirst=Britt&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1177%252F20563051211062919&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-155"><span class="mw-cite-backlink"><b><a href="#cite_ref-155">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFDamiani" class="citation web cs1">Damiani, Jesse. <a rel="nofollow" class="external text" href="https://www.forbes.com/sites/jessedamiani/2019/09/03/chinese-deepfake-app-zao-goes-viral-faces-immediate-criticism-over-user-data-and-security-policy/">"Chinese Deepfake App Zao Goes Viral, Faces Immediate Criticism Over User Data And Security Policy"</a>. <i>Forbes</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190914182816/https://www.forbes.com/sites/jessedamiani/2019/09/03/chinese-deepfake-app-zao-goes-viral-faces-immediate-criticism-over-user-data-and-security-policy/">Archived</a> from the original on 14 September 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">18 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Forbes&amp;rft.atitle=Chinese+Deepfake+App+Zao+Goes+Viral%2C+Faces+Immediate+Criticism+Over+User+Data+And+Security+Policy&amp;rft.aulast=Damiani&amp;rft.aufirst=Jesse&amp;rft_id=https%3A%2F%2Fwww.forbes.com%2Fsites%2Fjessedamiani%2F2019%2F09%2F03%2Fchinese-deepfake-app-zao-goes-viral-faces-immediate-criticism-over-user-data-and-security-policy%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-156"><span class="mw-cite-backlink"><b><a href="#cite_ref-156">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.independent.ie/business/technology/ahead-of-irish-and-us-elections-facebook-announces-new-measures-against-deepfake-videos-38840513.html">"Ahead of Irish and US elections, Facebook announces new measures against 'deepfake' videos"</a>. <i>Independent.ie</i>. 7 January 2020. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200108144047/https://www.independent.ie/business/technology/ahead-of-irish-and-us-elections-facebook-announces-new-measures-against-deepfake-videos-38840513.html">Archived</a> from the original on 8 January 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 January</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Independent.ie&amp;rft.atitle=Ahead+of+Irish+and+US+elections%2C+Facebook+announces+new+measures+against+%27deepfake%27+videos&amp;rft.date=2020-01-07&amp;rft_id=https%3A%2F%2Fwww.independent.ie%2Fbusiness%2Ftechnology%2Fahead-of-irish-and-us-elections-facebook-announces-new-measures-against-deepfake-videos-38840513.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-157"><span class="mw-cite-backlink"><b><a href="#cite_ref-157">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.thestatesman.com/technology/science/belgian-visual-expert-chris-ume-masterminded-tom-cruises-deepfakes-1502955882.html">"How Belgian visual expert Chris Ume masterminded Tom Cruise's deepfakes"</a>. <i>The Statesman</i>. 6 March 2021. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220824072346/https://www.thestatesman.com/technology/science/belgian-visual-expert-chris-ume-masterminded-tom-cruises-deepfakes-1502955882.html">Archived</a> from the original on 24 August 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">24 August</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Statesman&amp;rft.atitle=How+Belgian+visual+expert+Chris+Ume+masterminded+Tom+Cruise%27s+deepfakes&amp;rft.date=2021-03-06&amp;rft_id=https%3A%2F%2Fwww.thestatesman.com%2Ftechnology%2Fscience%2Fbelgian-visual-expert-chris-ume-masterminded-tom-cruises-deepfakes-1502955882.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-158"><span class="mw-cite-backlink"><b><a href="#cite_ref-158">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMetz" class="citation news cs1">Metz, Rachel. <a rel="nofollow" class="external text" href="https://edition.cnn.com/2021/08/06/tech/tom-cruise-deepfake-tiktok-company/index.html">"How a deepfake Tom Cruise on TikTok turned into a very real AI company"</a>. <i>CNN</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172629/https://edition.cnn.com/2021/08/06/tech/tom-cruise-deepfake-tiktok-company/index.html">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">17 March</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=CNN&amp;rft.atitle=How+a+deepfake+Tom+Cruise+on+TikTok+turned+into+a+very+real+AI+company&amp;rft.aulast=Metz&amp;rft.aufirst=Rachel&amp;rft_id=https%3A%2F%2Fedition.cnn.com%2F2021%2F08%2F06%2Ftech%2Ftom-cruise-deepfake-tiktok-company%2Findex.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-159"><span class="mw-cite-backlink"><b><a href="#cite_ref-159">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCorcoranHenry2021" class="citation web cs1">Corcoran, Mark; Henry, Matt (23 June 2021). <a rel="nofollow" class="external text" href="https://www.abc.net.au/news/2021-06-24/tom-cruise-deepfake-chris-ume-security-washington-dc/100234772">"This is not Tom Cruise. That's what has security experts so worried"</a>. <i>ABC News</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220328004537/https://www.abc.net.au/news/2021-06-24/tom-cruise-deepfake-chris-ume-security-washington-dc/100234772">Archived</a> from the original on 28 March 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">28 March</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=ABC+News&amp;rft.atitle=This+is+not+Tom+Cruise.+That%27s+what+has+security+experts+so+worried&amp;rft.date=2021-06-23&amp;rft.aulast=Corcoran&amp;rft.aufirst=Mark&amp;rft.au=Henry%2C+Matt&amp;rft_id=https%3A%2F%2Fwww.abc.net.au%2Fnews%2F2021-06-24%2Ftom-cruise-deepfake-chris-ume-security-washington-dc%2F100234772&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-160"><span class="mw-cite-backlink"><b><a href="#cite_ref-160">^</a></b></span> <span class="reference-text">Reuters, 15 July 2020, <a rel="nofollow" class="external text" href="https://www.reuters.com/article/us-cyber-deepfake-activist-idUSKCN24G15E">Deepfake Used to Attack Activist Couple Shows New Disinformation Frontier</a> <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200926073638/https://www.reuters.com/article/us-cyber-deepfake-activist-idUSKCN24G15E">Archived</a> 26 September 2020 at the <a href="/wiki/Wayback_Machine" title="Wayback Machine">Wayback Machine</a></span> </li> <li id="cite_note-161"><span class="mw-cite-backlink"><b><a href="#cite_ref-161">^</a></b></span> <span class="reference-text">972 Magazine, 12 August 2020, <a rel="nofollow" class="external text" href="https://www.972mag.com/leftists-for-bibi-deepfake-pro-netanyahu-propaganda-exposed/">"'Leftists for Bibi'? Deepfake Pro-Netanyahu Propaganda Exposed: According to a Series of Facebook Posts, the Israeli Prime Minister is Winning over Left-Wing Followers--Except that None of the People in Question Exist"</a> <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200814013617/https://www.972mag.com/leftists-for-bibi-deepfake-pro-netanyahu-propaganda-exposed/">Archived</a> 14 August 2020 at the <a href="/wiki/Wayback_Machine" title="Wayback Machine">Wayback Machine</a></span> </li> <li id="cite_note-162"><span class="mw-cite-backlink"><b><a href="#cite_ref-162">^</a></b></span> <span class="reference-text">The Seventh Eye, 9 June 2020, <a rel="nofollow" class="external text" href="https://www.the7eye.org.il/375768">הפורנוגרפיה של ההסתהתומכי נתניהו ממשיכים להפיץ פוסטים מזויפים בקבוצות במדיה החברתית • לצד הטרלות מעלות גיחוך מופצות תמונות שקריות על מנת להגביר את השנאה והפילוג בחברה הישראלית</a> <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200818080717/https://www.the7eye.org.il/375768">Archived</a> 18 August 2020 at the <a href="/wiki/Wayback_Machine" title="Wayback Machine">Wayback Machine</a></span> </li> <li id="cite_note-Perfect-163"><span class="mw-cite-backlink">^ <a href="#cite_ref-Perfect_163-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Perfect_163-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.wbur.org/hereandnow/2019/10/02/deepfake-technology">"Perfect Deepfake Tech Could Arrive Sooner Than Expected"</a>. <i>www.wbur.org</i>. 2 October 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191030164611/https://www.wbur.org/hereandnow/2019/10/02/deepfake-technology">Archived</a> from the original on 30 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=www.wbur.org&amp;rft.atitle=Perfect+Deepfake+Tech+Could+Arrive+Sooner+Than+Expected&amp;rft.date=2019-10-02&amp;rft_id=https%3A%2F%2Fwww.wbur.org%2Fhereandnow%2F2019%2F10%2F02%2Fdeepfake-technology&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-164"><span class="mw-cite-backlink"><b><a href="#cite_ref-164">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSonnemaker" class="citation web cs1">Sonnemaker, Tyler. <a rel="nofollow" class="external text" href="https://www.businessinsider.com/google-ex-fraud-czar-danger-of-deepfakes-is-becoming-boring-2020-1">"As social media platforms brace for the incoming wave of deepfakes, Google's former 'fraud czar' predicts the biggest danger is that deepfakes will eventually become boring"</a>. <i>Business Insider</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210414002924/https://www.businessinsider.com/google-ex-fraud-czar-danger-of-deepfakes-is-becoming-boring-2020-1">Archived</a> from the original on 14 April 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">14 April</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Business+Insider&amp;rft.atitle=As+social+media+platforms+brace+for+the+incoming+wave+of+deepfakes%2C+Google%27s+former+%27fraud+czar%27+predicts+the+biggest+danger+is+that+deepfakes+will+eventually+become+boring&amp;rft.aulast=Sonnemaker&amp;rft.aufirst=Tyler&amp;rft_id=https%3A%2F%2Fwww.businessinsider.com%2Fgoogle-ex-fraud-czar-danger-of-deepfakes-is-becoming-boring-2020-1&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Vaccari-165"><span class="mw-cite-backlink"><b><a href="#cite_ref-Vaccari_165-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFVaccariChadwick2020" class="citation journal cs1">Vaccari, Cristian; Chadwick, Andrew (January 2020). <a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F2056305120903408">"Deepfakes and Disinformation: Exploring the Impact of Synthetic Political Video on Deception, Uncertainty, and Trust in News"</a>. <i>Social Media + Society</i>. <b>6</b> (1): 205630512090340. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1177%2F2056305120903408">10.1177/2056305120903408</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/2056-3051">2056-3051</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:214265502">214265502</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Social+Media+%2B+Society&amp;rft.atitle=Deepfakes+and+Disinformation%3A+Exploring+the+Impact+of+Synthetic+Political+Video+on+Deception%2C+Uncertainty%2C+and+Trust+in+News&amp;rft.volume=6&amp;rft.issue=1&amp;rft.pages=205630512090340&amp;rft.date=2020-01&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A214265502%23id-name%3DS2CID&amp;rft.issn=2056-3051&amp;rft_id=info%3Adoi%2F10.1177%2F2056305120903408&amp;rft.aulast=Vaccari&amp;rft.aufirst=Cristian&amp;rft.au=Chadwick%2C+Andrew&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1177%252F2056305120903408&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Pawelec-166"><span class="mw-cite-backlink">^ <a href="#cite_ref-Pawelec_166-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Pawelec_166-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPawelec2022" class="citation journal cs1">Pawelec, M (2022). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC9453721">"Deepfakes and Democracy (Theory): How Synthetic Audio-Visual Media for Disinformation and Hate Speech Threaten Core Democratic Functions"</a>. <i>Digital Society: Ethics, Socio-legal and Governance of Digital Technology</i>. <b>1</b> (2): 19. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs44206-022-00010-6">10.1007/s44206-022-00010-6</a>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a>&#160;<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC9453721">9453721</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a>&#160;<a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/36097613">36097613</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Digital+Society%3A+Ethics%2C+Socio-legal+and+Governance+of+Digital+Technology&amp;rft.atitle=Deepfakes+and+Democracy+%28Theory%29%3A+How+Synthetic+Audio-Visual+Media+for+Disinformation+and+Hate+Speech+Threaten+Core+Democratic+Functions.&amp;rft.volume=1&amp;rft.issue=2&amp;rft.pages=19&amp;rft.date=2022&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC9453721%23id-name%3DPMC&amp;rft_id=info%3Apmid%2F36097613&amp;rft_id=info%3Adoi%2F10.1007%2Fs44206-022-00010-6&amp;rft.aulast=Pawelec&amp;rft.aufirst=M&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC9453721&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Bateman-2020-167"><span class="mw-cite-backlink">^ <a href="#cite_ref-Bateman-2020_167-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Bateman-2020_167-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Bateman-2020_167-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBateman2020" class="citation journal cs1">Bateman, Jon (2020). <a rel="nofollow" class="external text" href="https://www.jstor.org/stable/resrep25783.6">"Summary"</a>. <i>Deepfakes and Synthetic Media in the Financial System</i>: 1–2. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210420005800/https://www.jstor.org/stable/resrep25783.6">Archived</a> from the original on 20 April 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">28 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Deepfakes+and+Synthetic+Media+in+the+Financial+System&amp;rft.atitle=Summary&amp;rft.pages=1-2&amp;rft.date=2020&amp;rft.aulast=Bateman&amp;rft.aufirst=Jon&amp;rft_id=https%3A%2F%2Fwww.jstor.org%2Fstable%2Fresrep25783.6&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-168"><span class="mw-cite-backlink"><b><a href="#cite_ref-168">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKelion2020" class="citation news cs1">Kelion, Leo (September 2020). <a rel="nofollow" class="external text" href="https://www.bbc.com/news/technology-53984114">"Deepfake detection tool unveiled by Microsoft"</a>. <i>BBC News</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210414182803/https://www.bbc.com/news/technology-53984114">Archived</a> from the original on 14 April 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">15 April</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=BBC+News&amp;rft.atitle=Deepfake+detection+tool+unveiled+by+Microsoft&amp;rft.date=2020-09&amp;rft.aulast=Kelion&amp;rft.aufirst=Leo&amp;rft_id=https%3A%2F%2Fwww.bbc.com%2Fnews%2Ftechnology-53984114&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-169"><span class="mw-cite-backlink"><b><a href="#cite_ref-169">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCohenRimonAflaloPermuter2022" class="citation journal cs1">Cohen, Ariel; Rimon, Inbal; Aflalo, Eran; Permuter, Haim H. (June 2022). "A study on data augmentation in voice anti-spoofing". <i>Speech Communication</i>. <b>141</b>: 56–67. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/2110.10491">2110.10491</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1016%2Fj.specom.2022.04.005">10.1016/j.specom.2022.04.005</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:239050551">239050551</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Speech+Communication&amp;rft.atitle=A+study+on+data+augmentation+in+voice+anti-spoofing&amp;rft.volume=141&amp;rft.pages=56-67&amp;rft.date=2022-06&amp;rft_id=info%3Aarxiv%2F2110.10491&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A239050551%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1016%2Fj.specom.2022.04.005&amp;rft.aulast=Cohen&amp;rft.aufirst=Ariel&amp;rft.au=Rimon%2C+Inbal&amp;rft.au=Aflalo%2C+Eran&amp;rft.au=Permuter%2C+Haim+H.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Manke-2019-170"><span class="mw-cite-backlink">^ <a href="#cite_ref-Manke-2019_170-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Manke-2019_170-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Manke-2019_170-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFManke2019" class="citation web cs1">Manke, Kara (18 June 2019). <a rel="nofollow" class="external text" href="https://news.berkeley.edu/2019/06/18/researchers-use-facial-quirks-to-unmask-deepfakes/">"Researchers use facial quirks to unmask 'deepfakes'<span class="cs1-kern-right"></span>"</a>. <i>Berkeley News</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191109021126/https://news.berkeley.edu/2019/06/18/researchers-use-facial-quirks-to-unmask-deepfakes/">Archived</a> from the original on 9 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Berkeley+News&amp;rft.atitle=Researchers+use+facial+quirks+to+unmask+%27deepfakes%27&amp;rft.date=2019-06-18&amp;rft.aulast=Manke&amp;rft.aufirst=Kara&amp;rft_id=https%3A%2F%2Fnews.berkeley.edu%2F2019%2F06%2F18%2Fresearchers-use-facial-quirks-to-unmask-deepfakes%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Hany-171"><span class="mw-cite-backlink"><b><a href="#cite_ref-Hany_171-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFarid2006" class="citation journal cs1">Farid, Hany (1 December 2006). <a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1740-9713.2006.00197.x">"Digital Doctoring: How to Tell the Real from the Fake"</a>. <i>Significance</i>. <b>3</b> (4): 162–166. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1111%2Fj.1740-9713.2006.00197.x">10.1111/j.1740-9713.2006.00197.x</a></span>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:13861938">13861938</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Significance&amp;rft.atitle=Digital+Doctoring%3A+How+to+Tell+the+Real+from+the+Fake&amp;rft.volume=3&amp;rft.issue=4&amp;rft.pages=162-166&amp;rft.date=2006-12-01&amp;rft_id=info%3Adoi%2F10.1111%2Fj.1740-9713.2006.00197.x&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A13861938%23id-name%3DS2CID&amp;rft.aulast=Farid&amp;rft.aufirst=Hany&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1111%252Fj.1740-9713.2006.00197.x&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-DDCD-2020-172"><span class="mw-cite-backlink"><b><a href="#cite_ref-DDCD-2020_172-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://deepfakedetectionchallenge.ai/">"Join the Deepfake Detection Challenge (DFDC)"</a>. deepfakedetectionchallenge.ai. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200112102819/https://deepfakedetectionchallenge.ai/">Archived</a> from the original on 12 January 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Join+the+Deepfake+Detection+Challenge+%28DFDC%29&amp;rft.pub=deepfakedetectionchallenge.ai&amp;rft_id=https%3A%2F%2Fdeepfakedetectionchallenge.ai%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-173"><span class="mw-cite-backlink"><b><a href="#cite_ref-173">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://ai.facebook.com/blog/deepfake-detection-challenge-results-an-open-initiative-to-advance-ai/">"Deepfake Detection Challenge Results: An open initiative to advance AI"</a>. <i>ai.facebook.com</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201029023928/https://ai.facebook.com/blog/deepfake-detection-challenge-results-an-open-initiative-to-advance-ai">Archived</a> from the original on 29 October 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">30 September</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=ai.facebook.com&amp;rft.atitle=Deepfake+Detection+Challenge+Results%3A+An+open+initiative+to+advance+AI&amp;rft_id=https%3A%2F%2Fai.facebook.com%2Fblog%2Fdeepfake-detection-challenge-results-an-open-initiative-to-advance-ai%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-174"><span class="mw-cite-backlink"><b><a href="#cite_ref-174">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGrohEpsteinFirestonePicard2022" class="citation journal cs1">Groh, Matthew; Epstein, Ziv; Firestone, Chaz; Picard, Rosalind (2022). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8740705">"Deepfake detection by human crowds, machines, and machine-informed crowds"</a>. <i>Proceedings of the National Academy of Sciences</i>. <b>119</b> (1). <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/2105.06496">2105.06496</a></span>. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2022PNAS..11910013G">2022PNAS..11910013G</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1073%2Fpnas.2110013119">10.1073/pnas.2110013119</a></span>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a>&#160;<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8740705">8740705</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a>&#160;<a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/34969837">34969837</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Proceedings+of+the+National+Academy+of+Sciences&amp;rft.atitle=Deepfake+detection+by+human+crowds%2C+machines%2C+and+machine-informed+crowds&amp;rft.volume=119&amp;rft.issue=1&amp;rft.date=2022&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC8740705%23id-name%3DPMC&amp;rft_id=info%3Abibcode%2F2022PNAS..11910013G&amp;rft_id=info%3Aarxiv%2F2105.06496&amp;rft_id=info%3Apmid%2F34969837&amp;rft_id=info%3Adoi%2F10.1073%2Fpnas.2110013119&amp;rft.aulast=Groh&amp;rft.aufirst=Matthew&amp;rft.au=Epstein%2C+Ziv&amp;rft.au=Firestone%2C+Chaz&amp;rft.au=Picard%2C+Rosalind&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC8740705&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-175"><span class="mw-cite-backlink"><b><a href="#cite_ref-175">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHuLiLyu2020" class="citation arxiv cs1">Hu, Shu; Li, Yuezun; Lyu, Siwei (12 October 2020). "Exposing GAN-Generated Faces Using Inconsistent Corneal Specular Highlights". <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/2009.11924">2009.11924</a></span> [<a rel="nofollow" class="external text" href="https://arxiv.org/archive/cs.CV">cs.CV</a>].</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=preprint&amp;rft.jtitle=arXiv&amp;rft.atitle=Exposing+GAN-Generated+Faces+Using+Inconsistent+Corneal+Specular+Highlights&amp;rft.date=2020-10-12&amp;rft_id=info%3Aarxiv%2F2009.11924&amp;rft.aulast=Hu&amp;rft.aufirst=Shu&amp;rft.au=Li%2C+Yuezun&amp;rft.au=Lyu%2C+Siwei&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Boháček-176"><span class="mw-cite-backlink"><b><a href="#cite_ref-Boháček_176-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBoháčekFarid2022" class="citation journal cs1">Boháček, M; Farid, H (29 November 2022). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC9860138">"Protecting world leaders against deep fakes using facial, gestural, and vocal mannerisms"</a>. <i>Proceedings of the National Academy of Sciences of the United States of America</i>. <b>119</b> (48): e2216035119. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2022PNAS..11916035B">2022PNAS..11916035B</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1073%2Fpnas.2216035119">10.1073/pnas.2216035119</a></span>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a>&#160;<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC9860138">9860138</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a>&#160;<a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/36417442">36417442</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Proceedings+of+the+National+Academy+of+Sciences+of+the+United+States+of+America&amp;rft.atitle=Protecting+world+leaders+against+deep+fakes+using+facial%2C+gestural%2C+and+vocal+mannerisms.&amp;rft.volume=119&amp;rft.issue=48&amp;rft.pages=e2216035119&amp;rft.date=2022-11-29&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC9860138%23id-name%3DPMC&amp;rft_id=info%3Apmid%2F36417442&amp;rft_id=info%3Adoi%2F10.1073%2Fpnas.2216035119&amp;rft_id=info%3Abibcode%2F2022PNAS..11916035B&amp;rft.aulast=Boh%C3%A1%C4%8Dek&amp;rft.aufirst=M&amp;rft.au=Farid%2C+H&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC9860138&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Scholar-search-2022a-177"><span class="mw-cite-backlink">^ <a href="#cite_ref-Scholar-search-2022a_177-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Scholar-search-2022a_177-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://scholar.google.com/scholar?hl=en&amp;as_sdt=0,39&amp;q=Recurrent+Convolutional+Strategies+for+Face+Manipulation+Detection+in+Videos&amp;btnG=">"Google Scholar"</a>. <i>scholar.google.com</i><span class="reference-accessdate">. Retrieved <span class="nowrap">30 April</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=scholar.google.com&amp;rft.atitle=Google+Scholar&amp;rft_id=https%3A%2F%2Fscholar.google.com%2Fscholar%3Fhl%3Den%26as_sdt%3D0%2C39%26q%3DRecurrent%2BConvolutional%2BStrategies%2Bfor%2BFace%2BManipulation%2BDetection%2Bin%2BVideos%26btnG%3D&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Scholar-search-2022b-178"><span class="mw-cite-backlink">^ <a href="#cite_ref-Scholar-search-2022b_178-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Scholar-search-2022b_178-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMasiKillekarMascarenhasGurudatt2020" class="citation book cs1">Masi, Iacopo; Killekar, Aditya; Mascarenhas, Royston Marian; Gurudatt, Shenoy Pratik; Abdalmageed, Wael (2020). <a rel="nofollow" class="external text" href="https://scholar.google.com/citations?view_op=view_citation&amp;hl=en&amp;user=tRGH8FkAAAAJ&amp;citation_for_view=tRGH8FkAAAAJ:nb7KW1ujOQ8C"><i>Two-branch recurrent network for isolating deepfakes in videos</i></a>. Lecture Notes in Computer Science. Vol.&#160;12352. pp.&#160;667–684. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/2008.03412">2008.03412</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2F978-3-030-58571-6_39">10.1007/978-3-030-58571-6_39</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-3-030-58570-9" title="Special:BookSources/978-3-030-58570-9"><bdi>978-3-030-58570-9</bdi></a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172427/https://scholar.google.com/citations?view_op=view_citation&amp;hl=en&amp;user=tRGH8FkAAAAJ&amp;citation_for_view=tRGH8FkAAAAJ:nb7KW1ujOQ8C">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">30 April</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Two-branch+recurrent+network+for+isolating+deepfakes+in+videos&amp;rft.series=Lecture+Notes+in+Computer+Science&amp;rft.pages=667-684&amp;rft.date=2020&amp;rft_id=info%3Aarxiv%2F2008.03412&amp;rft_id=info%3Adoi%2F10.1007%2F978-3-030-58571-6_39&amp;rft.isbn=978-3-030-58570-9&amp;rft.aulast=Masi&amp;rft.aufirst=Iacopo&amp;rft.au=Killekar%2C+Aditya&amp;rft.au=Mascarenhas%2C+Royston+Marian&amp;rft.au=Gurudatt%2C+Shenoy+Pratik&amp;rft.au=Abdalmageed%2C+Wael&amp;rft_id=https%3A%2F%2Fscholar.google.com%2Fcitations%3Fview_op%3Dview_citation%26hl%3Den%26user%3DtRGH8FkAAAAJ%26citation_for_view%3DtRGH8FkAAAAJ%3Anb7KW1ujOQ8C&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Wired-2019b-179"><span class="mw-cite-backlink">^ <a href="#cite_ref-Wired-2019b_179-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Wired-2019b_179-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Wired-2019b_179-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation magazine cs1"><a rel="nofollow" class="external text" href="https://www.wired.com/story/the-blockchain-solution-to-our-deepfake-problems/">"The Blockchain Solution to Our Deepfake Problems"</a>. <i>Wired</i>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1059-1028">1059-1028</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191107164023/https://www.wired.com/story/the-blockchain-solution-to-our-deepfake-problems/">Archived</a> from the original on 7 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Wired&amp;rft.atitle=The+Blockchain+Solution+to+Our+Deepfake+Problems&amp;rft.issn=1059-1028&amp;rft_id=https%3A%2F%2Fwww.wired.com%2Fstory%2Fthe-blockchain-solution-to-our-deepfake-problems%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Leetaru-2021-180"><span class="mw-cite-backlink">^ <a href="#cite_ref-Leetaru-2021_180-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Leetaru-2021_180-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLeetaru" class="citation web cs1">Leetaru, Kalev. <a rel="nofollow" class="external text" href="https://www.forbes.com/sites/kalevleetaru/2018/09/09/why-digital-signatures-wont-prevent-deep-fakes-but-will-help-repressive-governments/">"Why Digital Signatures Won't Prevent Deep Fakes But Will Help Repressive Governments"</a>. <i>Forbes</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210414234733/https://www.forbes.com/sites/kalevleetaru/2018/09/09/why-digital-signatures-wont-prevent-deep-fakes-but-will-help-repressive-governments/">Archived</a> from the original on 14 April 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">17 February</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Forbes&amp;rft.atitle=Why+Digital+Signatures+Won%27t+Prevent+Deep+Fakes+But+Will+Help+Repressive+Governments&amp;rft.aulast=Leetaru&amp;rft.aufirst=Kalev&amp;rft_id=https%3A%2F%2Fwww.forbes.com%2Fsites%2Fkalevleetaru%2F2018%2F09%2F09%2Fwhy-digital-signatures-wont-prevent-deep-fakes-but-will-help-repressive-governments%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-181"><span class="mw-cite-backlink"><b><a href="#cite_ref-181">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://web.archive.org/web/20220826235234/https://metaphysic.ai/to-uncover-a-deepfake-video-call-ask-the-caller-to-turn-sideways/">"To Uncover a Deepfake Video Call, Ask the Caller to Turn Sideways"</a>. <i>Metaphysic</i>. 8 August 2022. Archived from <a rel="nofollow" class="external text" href="https://metaphysic.ai/to-uncover-a-deepfake-video-call-ask-the-caller-to-turn-sideways/">the original</a> on 26 August 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">24 August</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Metaphysic&amp;rft.atitle=To+Uncover+a+Deepfake+Video+Call%2C+Ask+the+Caller+to+Turn+Sideways&amp;rft.date=2022-08-08&amp;rft_id=https%3A%2F%2Fmetaphysic.ai%2Fto-uncover-a-deepfake-video-call-ask-the-caller-to-turn-sideways%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-182"><span class="mw-cite-backlink"><b><a href="#cite_ref-182">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.msn.com/en-ae/news/featured/kate-middleton-s-ring-mysteriously-vanishes-raises-more-ai-concerns/ar-BB1ktPZJ">"Kate Middleton's ring mysteriously vanishes, raises more AI concerns"</a>. MSN. 25 March 2024. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610171113/https://www.msn.com/en-ae/news/featured/kate-middleton-s-ring-mysteriously-vanishes-raises-more-ai-concerns/ar-BB1ktPZJ">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">19 May</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Kate+Middleton%27s+ring+mysteriously+vanishes%2C+raises+more+AI+concerns&amp;rft.pub=MSN&amp;rft.date=2024-03-25&amp;rft_id=https%3A%2F%2Fwww.msn.com%2Fen-ae%2Fnews%2Ffeatured%2Fkate-middleton-s-ring-mysteriously-vanishes-raises-more-ai-concerns%2Far-BB1ktPZJ&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-183"><span class="mw-cite-backlink"><b><a href="#cite_ref-183">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHindustan_Times2024" class="citation web cs1">Hindustan Times (5 April 2024). <a rel="nofollow" class="external text" href="https://www.hindustantimes.com/world-news/us-news/kates-cancer-admission-is-fake-meghan-markles-fan-and-ucla-director-johnathan-perkins-floats-conspiracy-theory-101712301940262.html">"<span class="cs1-kern-left"></span>'Kate's cancer admission is fake', Meghan Markle's fan and UCLA director, Johnathan Perkins, floats conspiracy theory"</a>. The Hindustan Times. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172633/https://www.hindustantimes.com/world-news/us-news/kates-cancer-admission-is-fake-meghan-markles-fan-and-ucla-director-johnathan-perkins-floats-conspiracy-theory-101712301940262.html">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">19 May</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=%27Kate%27s+cancer+admission+is+fake%27%2C+Meghan+Markle%27s+fan+and+UCLA+director%2C+Johnathan+Perkins%2C+floats+conspiracy+theory&amp;rft.pub=The+Hindustan+Times&amp;rft.date=2024-04-05&amp;rft.au=Hindustan+Times&amp;rft_id=https%3A%2F%2Fwww.hindustantimes.com%2Fworld-news%2Fus-news%2Fkates-cancer-admission-is-fake-meghan-markles-fan-and-ucla-director-johnathan-perkins-floats-conspiracy-theory-101712301940262.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-AP_News-184"><span class="mw-cite-backlink">^ <a href="#cite_ref-AP_News_184-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-AP_News_184-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://apnews.com/article/fact-check-trump-nypd-stormy-daniels-539393517762">"AI-generated images of Trump being arrested circulate on social media"</a>. AP News. 21 March 2023. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172633/https://apnews.com/article/fact-check-trump-nypd-stormy-daniels-539393517762">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 October</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=AI-generated+images+of+Trump+being+arrested+circulate+on+social+media&amp;rft.pub=AP+News&amp;rft.date=2023-03-21&amp;rft_id=https%3A%2F%2Fapnews.com%2Farticle%2Ffact-check-trump-nypd-stormy-daniels-539393517762&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-185"><span class="mw-cite-backlink"><b><a href="#cite_ref-185">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFagan" class="citation web cs1">Fagan, Kaylee. <a rel="nofollow" class="external text" href="https://www.businessinsider.com/obama-deepfake-video-insulting-trump-2018-4">"A viral video that appeared to show Obama calling Trump a 'dips---' shows a disturbing new trend called 'deepfakes'<span class="cs1-kern-right"></span>"</a>. <i>Business Insider</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200922174210/https://www.businessinsider.com/obama-deepfake-video-insulting-trump-2018-4">Archived</a> from the original on 22 September 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">3 November</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Business+Insider&amp;rft.atitle=A+viral+video+that+appeared+to+show+Obama+calling+Trump+a+%27dips---%27+shows+a+disturbing+new+trend+called+%27deepfakes%27&amp;rft.aulast=Fagan&amp;rft.aufirst=Kaylee&amp;rft_id=https%3A%2F%2Fwww.businessinsider.com%2Fobama-deepfake-video-insulting-trump-2018-4&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Parkin-2019-186"><span class="mw-cite-backlink">^ <a href="#cite_ref-Parkin-2019_186-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Parkin-2019_186-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.theguardian.com/technology/ng-interactive/2019/jun/22/the-rise-of-the-deepfake-and-the-threat-to-democracy">"The rise of the deepfake and the threat to democracy"</a>. <i>The Guardian</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201101063543/https://www.theguardian.com/technology/ng-interactive/2019/jun/22/the-rise-of-the-deepfake-and-the-threat-to-democracy">Archived</a> from the original on 1 November 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">3 November</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Guardian&amp;rft.atitle=The+rise+of+the+deepfake+and+the+threat+to+democracy&amp;rft_id=http%3A%2F%2Fwww.theguardian.com%2Ftechnology%2Fng-interactive%2F2019%2Fjun%2F22%2Fthe-rise-of-the-deepfake-and-the-threat-to-democracy&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-187"><span class="mw-cite-backlink"><b><a href="#cite_ref-187">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.independent.co.uk/news/world/americas/us-politics/donald-trump-ai-praying-photo-b2307178.html">"Trump shares deepfake photo of himself praying as AI images of arrest spread online"</a>. <i>The Independent</i>. 24 March 2023. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20230528154052/https://www.independent.co.uk/news/world/americas/us-politics/donald-trump-ai-praying-photo-b2307178.html">Archived</a> from the original on 28 May 2023<span class="reference-accessdate">. Retrieved <span class="nowrap">16 June</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Independent&amp;rft.atitle=Trump+shares+deepfake+photo+of+himself+praying+as+AI+images+of+arrest+spread+online&amp;rft.date=2023-03-24&amp;rft_id=https%3A%2F%2Fwww.independent.co.uk%2Fnews%2Fworld%2Famericas%2Fus-politics%2Fdonald-trump-ai-praying-photo-b2307178.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-188"><span class="mw-cite-backlink"><b><a href="#cite_ref-188">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFTowers-Clark" class="citation web cs1">Towers-Clark, Charles. <a rel="nofollow" class="external text" href="https://www.forbes.com/sites/charlestowersclark/2019/05/31/mona-lisa-and-nancy-pelosi-the-implications-of-deepfakes/">"Mona Lisa And Nancy Pelosi: The Implications Of Deepfakes"</a>. <i>Forbes</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201123002751/https://www.forbes.com/sites/charlestowersclark/2019/05/31/mona-lisa-and-nancy-pelosi-the-implications-of-deepfakes/">Archived</a> from the original on 23 November 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Forbes&amp;rft.atitle=Mona+Lisa+And+Nancy+Pelosi%3A+The+Implications+Of+Deepfakes&amp;rft.aulast=Towers-Clark&amp;rft.aufirst=Charles&amp;rft_id=https%3A%2F%2Fwww.forbes.com%2Fsites%2Fcharlestowersclark%2F2019%2F05%2F31%2Fmona-lisa-and-nancy-pelosi-the-implications-of-deepfakes%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-189"><span class="mw-cite-backlink"><b><a href="#cite_ref-189">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://web.archive.org/web/20220626115937/https://deepfakenow.com/what-is-the-difference-between-a-deepfake-and-shallowfake/">"What Is The Difference Between A Deepfake And Shallowfake?"</a>. 21 April 2020. Archived from <a rel="nofollow" class="external text" href="https://deepfakenow.com/what-is-the-difference-between-a-deepfake-and-shallowfake/">the original</a> on 26 June 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">5 December</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=What+Is+The+Difference+Between+A+Deepfake+And+Shallowfake%3F&amp;rft.date=2020-04-21&amp;rft_id=https%3A%2F%2Fdeepfakenow.com%2Fwhat-is-the-difference-between-a-deepfake-and-shallowfake%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-190"><span class="mw-cite-backlink"><b><a href="#cite_ref-190">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://billposters.ch/spectre-launch/">"Gallery: 'Spectre' Launches ( Press Release)"</a>. <i>Bill Posters</i>. 29 May 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610171117/https://billposters.ch/spectre-launch/">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">15 May</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Bill+Posters&amp;rft.atitle=Gallery%3A+%27Spectre%27+Launches+%28+Press+Release%29&amp;rft.date=2019-05-29&amp;rft_id=https%3A%2F%2Fbillposters.ch%2Fspectre-launch%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-191"><span class="mw-cite-backlink"><b><a href="#cite_ref-191">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCole2019" class="citation news cs1">Cole, Samantha (11 June 2019). <a rel="nofollow" class="external text" href="https://www.vice.com/en/article/ywyxex/deepfake-of-mark-zuckerberg-facebook-fake-video-policy">"This Deepfake of Mark Zuckerberg Tests Facebook's Fake Video Policies"</a>. <i>Vice</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172428/https://www.vice.com/en/article/ywyxex/deepfake-of-mark-zuckerberg-facebook-fake-video-policy">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">15 May</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Vice&amp;rft.atitle=This+Deepfake+of+Mark+Zuckerberg+Tests+Facebook%27s+Fake+Video+Policies&amp;rft.date=2019-06-11&amp;rft.aulast=Cole&amp;rft.aufirst=Samantha&amp;rft_id=https%3A%2F%2Fwww.vice.com%2Fen%2Farticle%2Fywyxex%2Fdeepfake-of-mark-zuckerberg-facebook-fake-video-policy&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Hao-2020-192"><span class="mw-cite-backlink">^ <a href="#cite_ref-Hao-2020_192-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Hao-2020_192-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-Hao-2020_192-2"><sup><i><b>c</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.technologyreview.com/2020/09/29/1009098/ai-deepfake-putin-kim-jong-un-us-election/">"Deepfake Putin is here to warn Americans about their self-inflicted doom"</a>. <i>MIT Technology Review</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201030140905/https://www.technologyreview.com/2020/09/29/1009098/ai-deepfake-putin-kim-jong-un-us-election/">Archived</a> from the original on 30 October 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=MIT+Technology+Review&amp;rft.atitle=Deepfake+Putin+is+here+to+warn+Americans+about+their+self-inflicted+doom&amp;rft_id=https%3A%2F%2Fwww.technologyreview.com%2F2020%2F09%2F29%2F1009098%2Fai-deepfake-putin-kim-jong-un-us-election%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-NYT1-193"><span class="mw-cite-backlink"><b><a href="#cite_ref-NYT1_193-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSonne2023" class="citation news cs1">Sonne, Paul (5 June 2023). <a rel="nofollow" class="external text" href="https://www.nytimes.com/2023/06/05/world/europe/putin-deep-fake-speech-hackers.html">"Fake Putin Speech Calling for Martial Law Aired in Russia"</a>. <i>The New York Times</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172636/https://www.nytimes.com/2023/06/05/world/europe/putin-deep-fake-speech-hackers.html">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">6 June</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+New+York+Times&amp;rft.atitle=Fake+Putin+Speech+Calling+for+Martial+Law+Aired+in+Russia&amp;rft.date=2023-06-05&amp;rft.aulast=Sonne&amp;rft.aufirst=Paul&amp;rft_id=https%3A%2F%2Fwww.nytimes.com%2F2023%2F06%2F05%2Fworld%2Feurope%2Fputin-deep-fake-speech-hackers.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-194"><span class="mw-cite-backlink"><b><a href="#cite_ref-194">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFAllyn,_Bobby2022" class="citation web cs1">Allyn, Bobby (16 March 2022). <a rel="nofollow" class="external text" href="https://www.npr.org/2022/03/16/1087062648/deepfake-video-zelenskyy-experts-war-manipulation-ukraine-russia">"Deepfake video of Zelenskyy could be 'tip of the iceberg' in info war, experts warn"</a>. <a href="/wiki/NPR" title="NPR">NPR</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220329190821/https://www.npr.org/2022/03/16/1087062648/deepfake-video-zelenskyy-experts-war-manipulation-ukraine-russia">Archived</a> from the original on 29 March 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">17 March</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Deepfake+video+of+Zelenskyy+could+be+%27tip+of+the+iceberg%27+in+info+war%2C+experts+warn&amp;rft.pub=NPR&amp;rft.date=2022-03-16&amp;rft.au=Allyn%2C+Bobby&amp;rft_id=https%3A%2F%2Fwww.npr.org%2F2022%2F03%2F16%2F1087062648%2Fdeepfake-video-zelenskyy-experts-war-manipulation-ukraine-russia&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-195"><span class="mw-cite-backlink"><b><a href="#cite_ref-195">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSatarianoMozur2023" class="citation web cs1">Satariano, Adam; Mozur, Paul (7 February 2023). <a rel="nofollow" class="external text" href="https://www.nytimes.com/2023/02/07/technology/artificial-intelligence-training-deepfake.html">"The People Onscreen Are Fake. The Disinformation Is Real"</a>. <i>The New York Times</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610173139/https://www.nytimes.com/2023/02/07/technology/artificial-intelligence-training-deepfake.html">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">10 February</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+New+York+Times&amp;rft.atitle=The+People+Onscreen+Are+Fake.+The+Disinformation+Is+Real.&amp;rft.date=2023-02-07&amp;rft.aulast=Satariano&amp;rft.aufirst=Adam&amp;rft.au=Mozur%2C+Paul&amp;rft_id=https%3A%2F%2Fwww.nytimes.com%2F2023%2F02%2F07%2Ftechnology%2Fartificial-intelligence-training-deepfake.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-196"><span class="mw-cite-backlink"><b><a href="#cite_ref-196">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://nypost.com/2023/03/27/pope-francis-in-balenciaga-deepfake-fools-millions-definitely-scary/">"Pope Francis in Balenciaga deepfake fools millions: 'Definitely scary'<span class="cs1-kern-right"></span>"</a>. <i>New York Post</i>. 28 March 2023. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610172429/https://nypost.com/2023/03/27/pope-francis-in-balenciaga-deepfake-fools-millions-definitely-scary/">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">16 June</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=New+York+Post&amp;rft.atitle=Pope+Francis+in+Balenciaga+deepfake+fools+millions%3A+%27Definitely+scary%27&amp;rft.date=2023-03-28&amp;rft_id=https%3A%2F%2Fnypost.com%2F2023%2F03%2F27%2Fpope-francis-in-balenciaga-deepfake-fools-millions-definitely-scary%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-197"><span class="mw-cite-backlink"><b><a href="#cite_ref-197">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLu2023" class="citation news cs1">Lu, Donna (31 March 2023). <a rel="nofollow" class="external text" href="https://www.theguardian.com/technology/2023/apr/01/misinformation-mistakes-and-the-pope-in-a-puffer-what-rapidly-evolving-ai-can-and-cant-do">"Misinformation, mistakes and the Pope in a puffer: what rapidly evolving AI can – and can't – do"</a>. <i>The Guardian</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610171215/https://www.theguardian.com/technology/2023/apr/01/misinformation-mistakes-and-the-pope-in-a-puffer-what-rapidly-evolving-ai-can-and-cant-do">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">16 June</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Guardian&amp;rft.atitle=Misinformation%2C+mistakes+and+the+Pope+in+a+puffer%3A+what+rapidly+evolving+AI+can+%E2%80%93+and+can%27t+%E2%80%93+do&amp;rft.date=2023-03-31&amp;rft.aulast=Lu&amp;rft.aufirst=Donna&amp;rft_id=https%3A%2F%2Fwww.theguardian.com%2Ftechnology%2F2023%2Fapr%2F01%2Fmisinformation-mistakes-and-the-pope-in-a-puffer-what-rapidly-evolving-ai-can-and-cant-do&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-198"><span class="mw-cite-backlink"><b><a href="#cite_ref-198">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMurphy2023" class="citation news cs1">Murphy, Heather Tal (29 March 2023). <a rel="nofollow" class="external text" href="https://slate.com/technology/2023/03/pope-coat-midjourney-puffer-jacket-balenciaga-explained.html">"The Pope in a Coat Is Not From a Holy Place"</a>. <i>Slate</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610173141/https://slate.com/technology/2023/03/pope-coat-midjourney-puffer-jacket-balenciaga-explained.html">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">16 June</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Slate&amp;rft.atitle=The+Pope+in+a+Coat+Is+Not+From+a+Holy+Place&amp;rft.date=2023-03-29&amp;rft.aulast=Murphy&amp;rft.aufirst=Heather+Tal&amp;rft_id=https%3A%2F%2Fslate.com%2Ftechnology%2F2023%2F03%2Fpope-coat-midjourney-puffer-jacket-balenciaga-explained.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-199"><span class="mw-cite-backlink"><b><a href="#cite_ref-199">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://news.sky.com/story/labour-faces-political-attack-after-deepfake-audio-is-posted-of-sir-keir-starmer-12980181">"Deepfake audio of Sir Keir Starmer released on first day of Labour conference"</a>. <i>Sky News</i><span class="reference-accessdate">. Retrieved <span class="nowrap">29 May</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Sky+News&amp;rft.atitle=Deepfake+audio+of+Sir+Keir+Starmer+released+on+first+day+of+Labour+conference&amp;rft_id=https%3A%2F%2Fnews.sky.com%2Fstory%2Flabour-faces-political-attack-after-deepfake-audio-is-posted-of-sir-keir-starmer-12980181&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-200"><span class="mw-cite-backlink"><b><a href="#cite_ref-200">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://timesofindia.indiatimes.com/entertainment/hindi/bollywood/news/woman-in-deepfake-video-with-rashmika-mandannas-face-breaks-silence-im-deeply-disturbed-and-upset-by-what-is-happening/articleshow/105047285.cms?from=mdr">"Woman in deepfake video with Rashmika Mandanna's face breaks silence: I'm deeply disturbed and upset by what is happening"</a>. <i>The Times of India</i>. 9 November 2023. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0971-8257">0971-8257</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20231123173646/https://timesofindia.indiatimes.com/entertainment/hindi/bollywood/news/woman-in-deepfake-video-with-rashmika-mandannas-face-breaks-silence-im-deeply-disturbed-and-upset-by-what-is-happening/articleshow/105047285.cms?from=mdr">Archived</a> from the original on 23 November 2023<span class="reference-accessdate">. Retrieved <span class="nowrap">23 November</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Times+of+India&amp;rft.atitle=Woman+in+deepfake+video+with+Rashmika+Mandanna%27s+face+breaks+silence%3A+I%27m+deeply+disturbed+and+upset+by+what+is+happening&amp;rft.date=2023-11-09&amp;rft.issn=0971-8257&amp;rft_id=https%3A%2F%2Ftimesofindia.indiatimes.com%2Fentertainment%2Fhindi%2Fbollywood%2Fnews%2Fwoman-in-deepfake-video-with-rashmika-mandannas-face-breaks-silence-im-deeply-disturbed-and-upset-by-what-is-happening%2Farticleshow%2F105047285.cms%3Ffrom%3Dmdr&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-201"><span class="mw-cite-backlink"><b><a href="#cite_ref-201">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCupin2024" class="citation news cs1">Cupin, Bea (24 April 2024). <a rel="nofollow" class="external text" href="https://www.rappler.com/philippines/malacanang-flags-deepfake-audio-marcos-ordering-military-attack-april-2024/">"Malacañang flags deepfake audio of Marcos ordering military attack"</a>. <i>Rappler</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610173148/https://www.rappler.com/philippines/malacanang-flags-deepfake-audio-marcos-ordering-military-attack-april-2024/">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">14 May</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Rappler&amp;rft.atitle=Malaca%C3%B1ang+flags+deepfake+audio+of+Marcos+ordering+military+attack&amp;rft.date=2024-04-24&amp;rft.aulast=Cupin&amp;rft.aufirst=Bea&amp;rft_id=https%3A%2F%2Fwww.rappler.com%2Fphilippines%2Fmalacanang-flags-deepfake-audio-marcos-ordering-military-attack-april-2024%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-202"><span class="mw-cite-backlink"><b><a href="#cite_ref-202">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFlores2024" class="citation news cs1">Flores, Helen (27 April 2024). <a rel="nofollow" class="external text" href="https://www.philstar.com/headlines/2024/04/27/2350826/foreign-actor-seen-behind-president-marcos-audio-deepfake">"<span class="cs1-kern-left"></span>'Foreign actor' seen behind President Marcos audio deepfake"</a>. <i>The Philippines Star</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610173146/https://www.philstar.com/headlines/2024/04/27/2350826/foreign-actor-seen-behind-president-marcos-audio-deepfake">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">14 May</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Philippines+Star&amp;rft.atitle=%27Foreign+actor%27+seen+behind+President+Marcos+audio+deepfake&amp;rft.date=2024-04-27&amp;rft.aulast=Flores&amp;rft.aufirst=Helen&amp;rft_id=https%3A%2F%2Fwww.philstar.com%2Fheadlines%2F2024%2F04%2F27%2F2350826%2Fforeign-actor-seen-behind-president-marcos-audio-deepfake&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-203"><span class="mw-cite-backlink"><b><a href="#cite_ref-203">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFArgosino2024" class="citation news cs1">Argosino, Faith (14 May 2024). <a rel="nofollow" class="external text" href="https://newsinfo.inquirer.net/1940406/raps-filed-vs-social-media-pages-for-libelous-content-marcos-deepfake">"Raps filed vs social media pages for libelous content, Marcos deepfake"</a>. <i>Philippine Daily Inquirer</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610173145/https://newsinfo.inquirer.net/1940406/raps-filed-vs-social-media-pages-for-libelous-content-marcos-deepfake">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">14 May</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Philippine+Daily+Inquirer&amp;rft.atitle=Raps+filed+vs+social+media+pages+for+libelous+content%2C+Marcos+deepfake&amp;rft.date=2024-05-14&amp;rft.aulast=Argosino&amp;rft.aufirst=Faith&amp;rft_id=https%3A%2F%2Fnewsinfo.inquirer.net%2F1940406%2Fraps-filed-vs-social-media-pages-for-libelous-content-marcos-deepfake&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-204"><span class="mw-cite-backlink"><b><a href="#cite_ref-204">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://www.rappler.com/philippines/face-swapped-deepfake-detector-flags-alleged-marcos-video-suspicious/">"Face-swapped? Deepfake detector flags alleged Marcos video as 'suspicious'<span class="cs1-kern-right"></span>"</a>. <i>Rappler</i>. 23 July 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">25 July</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Rappler&amp;rft.atitle=Face-swapped%3F+Deepfake+detector+flags+alleged+Marcos+video+as+%27suspicious%27&amp;rft.date=2024-07-23&amp;rft_id=https%3A%2F%2Fwww.rappler.com%2Fphilippines%2Fface-swapped-deepfake-detector-flags-alleged-marcos-video-suspicious%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-205"><span class="mw-cite-backlink"><b><a href="#cite_ref-205">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://www.philstar.com/nation/2024/07/24/2372498/nbi-pnp-findings-show-polvoron-video-fake">"NBI, PNP findings show 'polvoron' video fake"</a>. <i>The Philippine Star</i>. 24 July 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">25 July</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Philippine+Star&amp;rft.atitle=NBI%2C+PNP+findings+show+%27polvoron%27+video+fake&amp;rft.date=2024-07-24&amp;rft_id=https%3A%2F%2Fwww.philstar.com%2Fnation%2F2024%2F07%2F24%2F2372498%2Fnbi-pnp-findings-show-polvoron-video-fake&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-206"><span class="mw-cite-backlink"><b><a href="#cite_ref-206">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFShepardson2024" class="citation news cs1">Shepardson, David (23 May 2024). <a rel="nofollow" class="external text" href="https://www.reuters.com/world/us/us-political-consultant-indicted-over-ai-generated-biden-robocalls-2024-05-23/">"US political consultant indicted over AI-generated Biden robocalls"</a>. <i>Reuters</i>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Reuters&amp;rft.atitle=US+political+consultant+indicted+over+AI-generated+Biden+robocalls&amp;rft.date=2024-05-23&amp;rft.aulast=Shepardson&amp;rft.aufirst=David&amp;rft_id=https%3A%2F%2Fwww.reuters.com%2Fworld%2Fus%2Fus-political-consultant-indicted-over-ai-generated-biden-robocalls-2024-05-23%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-207"><span class="mw-cite-backlink"><b><a href="#cite_ref-207">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://apnews.com/article/biden-robocalls-ai-new-hampshire-charges-fines-9e9cc63a71eb9c78b9bb0d1ec2aa6e9c">"US political consultant indicted over AI-generated Biden robocalls"</a>. <i>AP News</i>. 23 May 2024. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610173146/https://apnews.com/article/biden-robocalls-ai-new-hampshire-charges-fines-9e9cc63a71eb9c78b9bb0d1ec2aa6e9c">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">8 June</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=AP+News&amp;rft.atitle=US+political+consultant+indicted+over+AI-generated+Biden+robocalls&amp;rft.date=2024-05-23&amp;rft_id=https%3A%2F%2Fapnews.com%2Farticle%2Fbiden-robocalls-ai-new-hampshire-charges-fines-9e9cc63a71eb9c78b9bb0d1ec2aa6e9c&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-blog.twitter-2020-208"><span class="mw-cite-backlink">^ <a href="#cite_ref-blog.twitter-2020_208-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-blog.twitter-2020_208-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-blog.twitter-2020_208-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-blog.twitter-2020_208-3"><sup><i><b>d</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://blog.twitter.com/en_us/topics/company/2019/synthetic_manipulated_media_policy_feedback.html">"Help us shape our approach to synthetic and manipulated media"</a>. <i>blog.twitter.com</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201028211949/https://blog.twitter.com/en_us/topics/company/2019/synthetic_manipulated_media_policy_feedback.html">Archived</a> from the original on 28 October 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=blog.twitter.com&amp;rft.atitle=Help+us+shape+our+approach+to+synthetic+and+manipulated+media&amp;rft_id=https%3A%2F%2Fblog.twitter.com%2Fen_us%2Ftopics%2Fcompany%2F2019%2Fsynthetic_manipulated_media_policy_feedback.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-209"><span class="mw-cite-backlink"><b><a href="#cite_ref-209">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://techcrunch.com/2019/11/11/twitter-drafts-a-deepfake-policy-that-would-label-and-warn-but-not-remove-manipulated-media/">"TechCrunch"</a>. <i>TechCrunch</i>. 11 November 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210714032908/https://techcrunch.com/2019/11/11/twitter-drafts-a-deepfake-policy-that-would-label-and-warn-but-not-remove-manipulated-media/">Archived</a> from the original on 14 July 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=TechCrunch&amp;rft.atitle=TechCrunch&amp;rft.date=2019-11-11&amp;rft_id=https%3A%2F%2Ftechcrunch.com%2F2019%2F11%2F11%2Ftwitter-drafts-a-deepfake-policy-that-would-label-and-warn-but-not-remove-manipulated-media%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-210"><span class="mw-cite-backlink"><b><a href="#cite_ref-210">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.reuters.com/technology/artificial-intelligence/five-us-states-push-musk-fix-ai-chatbot-over-election-misinformation-2024-08-05/">Five US states push Musk to fix AI chatbot over election misinformation</a> Reuters accessed 19 August 2024.</span> </li> <li id="cite_note-211"><span class="mw-cite-backlink"><b><a href="#cite_ref-211">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.washingtonpost.com/technology/2024/08/16/elon-musk-grok-ai/">Noxious images spread after Elon Musk launches AI tool with few guardrails</a> The Washington Post accessed 19 August 2024.</span> </li> <li id="cite_note-212"><span class="mw-cite-backlink"><b><a href="#cite_ref-212">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.rollingstone.com/culture/culture-features/elon-musk-twitter-misinformation-timeline-1235076786/">How Elon Musk and X Became the Biggest Purveyors of Online Misinformation</a> Rolling Stone accessed 19 August 2024.</span> </li> <li id="cite_note-Ferrer-2020-213"><span class="mw-cite-backlink">^ <a href="#cite_ref-Ferrer-2020_213-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Ferrer-2020_213-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://ai.facebook.com/blog/deepfake-detection-challenge-results-an-open-initiative-to-advance-ai/">"Deepfake Detection Challenge Results: An open initiative to advance AI"</a>. <i>ai.facebook.com</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201029023928/https://ai.facebook.com/blog/deepfake-detection-challenge-results-an-open-initiative-to-advance-ai">Archived</a> from the original on 29 October 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=ai.facebook.com&amp;rft.atitle=Deepfake+Detection+Challenge+Results%3A+An+open+initiative+to+advance+AI&amp;rft_id=https%3A%2F%2Fai.facebook.com%2Fblog%2Fdeepfake-detection-challenge-results-an-open-initiative-to-advance-ai%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Paul-2020-214"><span class="mw-cite-backlink">^ <a href="#cite_ref-Paul-2020_214-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Paul-2020_214-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPaul2020" class="citation news cs1">Paul, Katie (4 February 2020). <a rel="nofollow" class="external text" href="https://www.reuters.com/article/us-twitter-security-idUSKBN1ZY2OV">"Twitter to label deepfakes and other deceptive media"</a>. <i>Reuters</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201010081053/https://www.reuters.com/article/us-twitter-security-idUSKBN1ZY2OV">Archived</a> from the original on 10 October 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Reuters&amp;rft.atitle=Twitter+to+label+deepfakes+and+other+deceptive+media&amp;rft.date=2020-02-04&amp;rft.aulast=Paul&amp;rft.aufirst=Katie&amp;rft_id=https%3A%2F%2Fwww.reuters.com%2Farticle%2Fus-twitter-security-idUSKBN1ZY2OV&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Cole-2018e-215"><span class="mw-cite-backlink"><b><a href="#cite_ref-Cole-2018e_215-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCole2018" class="citation web cs1">Cole, Samantha (31 January 2018). <a rel="nofollow" class="external text" href="https://www.vice.com/en_us/article/vby5jx/deepfakes-ai-porn-removed-from-gfycat">"AI-Generated Fake Porn Makers Have Been Kicked Off Their Favorite Host"</a>. <i>Vice</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191101165130/https://www.vice.com/en_us/article/vby5jx/deepfakes-ai-porn-removed-from-gfycat">Archived</a> from the original on 1 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">18 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Vice&amp;rft.atitle=AI-Generated+Fake+Porn+Makers+Have+Been+Kicked+Off+Their+Favorite+Host&amp;rft.date=2018-01-31&amp;rft.aulast=Cole&amp;rft.aufirst=Samantha&amp;rft_id=https%3A%2F%2Fwww.vice.com%2Fen_us%2Farticle%2Fvby5jx%2Fdeepfakes-ai-porn-removed-from-gfycat&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Ghoshal-2018-216"><span class="mw-cite-backlink">^ <a href="#cite_ref-Ghoshal-2018_216-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Ghoshal-2018_216-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGhoshal2018" class="citation web cs1">Ghoshal, Abhimanyu (7 February 2018). <a rel="nofollow" class="external text" href="https://thenextweb.com/insider/2018/02/07/twitter-pornhub-and-other-platforms-ban-ai-generated-celebrity-porn/">"Twitter, Pornhub and other platforms ban AI-generated celebrity porn"</a>. <i>The Next Web</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191220061859/https://thenextweb.com/insider/2018/02/07/twitter-pornhub-and-other-platforms-ban-ai-generated-celebrity-porn/">Archived</a> from the original on 20 December 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Next+Web&amp;rft.atitle=Twitter%2C+Pornhub+and+other+platforms+ban+AI-generated+celebrity+porn&amp;rft.date=2018-02-07&amp;rft.aulast=Ghoshal&amp;rft.aufirst=Abhimanyu&amp;rft_id=https%3A%2F%2Fthenextweb.com%2Finsider%2F2018%2F02%2F07%2Ftwitter-pornhub-and-other-platforms-ban-ai-generated-celebrity-porn%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-217"><span class="mw-cite-backlink"><b><a href="#cite_ref-217">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBöhm2018" class="citation news cs1">Böhm, Markus (7 February 2018). <a rel="nofollow" class="external text" href="https://www.spiegel.de/netzwelt/web/deepfakes-online-plattformen-wollen-fake-promi-pornos-loeschen-a-1192170.html">"<span class="cs1-kern-left"></span>"Deepfakes": Firmen gehen gegen gefälschte Promi-Pornos vor"</a>. <i>Spiegel Online</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190923002744/https://www.spiegel.de/netzwelt/web/deepfakes-online-plattformen-wollen-fake-promi-pornos-loeschen-a-1192170.html">Archived</a> from the original on 23 September 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Spiegel+Online&amp;rft.atitle=%22Deepfakes%22%3A+Firmen+gehen+gegen+gef%C3%A4lschte+Promi-Pornos+vor&amp;rft.date=2018-02-07&amp;rft.aulast=B%C3%B6hm&amp;rft.aufirst=Markus&amp;rft_id=https%3A%2F%2Fwww.spiegel.de%2Fnetzwelt%2Fweb%2Fdeepfakes-online-plattformen-wollen-fake-promi-pornos-loeschen-a-1192170.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-218"><span class="mw-cite-backlink"><b><a href="#cite_ref-218">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFbarbara.wimmer2018" class="citation web cs1 cs1-prop-foreign-lang-source">barbara.wimmer (8 February 2018). <a rel="nofollow" class="external text" href="https://futurezone.at/digital-life/deepfakes-reddit-loescht-forum-fuer-kuenstlich-generierte-fake-pornos/400003061">"Deepfakes: Reddit löscht Forum für künstlich generierte Fake-Pornos"</a>. <i>futurezone.at</i> (in German). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20180208194840/https://futurezone.at/digital-life/deepfakes-reddit-loescht-forum-fuer-kuenstlich-generierte-fake-pornos">Archived</a> from the original on 8 February 2018<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=futurezone.at&amp;rft.atitle=Deepfakes%3A+Reddit+l%C3%B6scht+Forum+f%C3%BCr+k%C3%BCnstlich+generierte+Fake-Pornos&amp;rft.date=2018-02-08&amp;rft.au=barbara.wimmer&amp;rft_id=https%3A%2F%2Ffuturezone.at%2Fdigital-life%2Fdeepfakes-reddit-loescht-forum-fuer-kuenstlich-generierte-fake-pornos%2F400003061&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-219"><span class="mw-cite-backlink"><b><a href="#cite_ref-219">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1 cs1-prop-foreign-lang-source"><a rel="nofollow" class="external text" href="https://www.heise.de/newsticker/meldung/Deepfakes-Auch-Reddit-verbannt-Fake-Porn-3962987.html">"Deepfakes: Auch Reddit verbannt Fake-Porn"</a>. <i>heise online</i> (in German). 8 February 2018. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190410050632/https://www.heise.de/newsticker/meldung/Deepfakes-Auch-Reddit-verbannt-Fake-Porn-3962987.html">Archived</a> from the original on 10 April 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=heise+online&amp;rft.atitle=Deepfakes%3A+Auch+Reddit+verbannt+Fake-Porn&amp;rft.date=2018-02-08&amp;rft_id=https%3A%2F%2Fwww.heise.de%2Fnewsticker%2Fmeldung%2FDeepfakes-Auch-Reddit-verbannt-Fake-Porn-3962987.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-220"><span class="mw-cite-backlink"><b><a href="#cite_ref-220">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1 cs1-prop-foreign-lang-source"><a rel="nofollow" class="external text" href="https://www.derstandard.at/story/2000073855676/reddit-verbannt-deepfake-pornos">"Reddit verbannt Deepfake-Pornos - derStandard.de"</a>. <i>DER STANDARD</i> (in Austrian German). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191109005835/https://www.derstandard.at/story/2000073855676/reddit-verbannt-deepfake-pornos">Archived</a> from the original on 9 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=DER+STANDARD&amp;rft.atitle=Reddit+verbannt+Deepfake-Pornos+-+derStandard.de&amp;rft_id=https%3A%2F%2Fwww.derstandard.at%2Fstory%2F2000073855676%2Freddit-verbannt-deepfake-pornos&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-221"><span class="mw-cite-backlink"><b><a href="#cite_ref-221">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRobertson2018" class="citation web cs1">Robertson, Adi (7 February 2018). <a rel="nofollow" class="external text" href="https://www.theverge.com/2018/2/7/16982046/reddit-deepfakes-ai-celebrity-face-swap-porn-community-ban">"Reddit bans 'deepfakes' AI porn communities"</a>. <i>The Verge</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190924035821/https://www.theverge.com/2018/2/7/16982046/reddit-deepfakes-ai-celebrity-face-swap-porn-community-ban">Archived</a> from the original on 24 September 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Verge&amp;rft.atitle=Reddit+bans+%27deepfakes%27+AI+porn+communities&amp;rft.date=2018-02-07&amp;rft.aulast=Robertson&amp;rft.aufirst=Adi&amp;rft_id=https%3A%2F%2Fwww.theverge.com%2F2018%2F2%2F7%2F16982046%2Freddit-deepfakes-ai-celebrity-face-swap-porn-community-ban&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Cole-2018a-222"><span class="mw-cite-backlink"><b><a href="#cite_ref-Cole-2018a_222-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCole2018" class="citation web cs1">Cole, Samantha (6 February 2018). <a rel="nofollow" class="external text" href="https://www.vice.com/en_us/article/ywqgab/twitter-bans-deepfakes">"Twitter Is the Latest Platform to Ban AI-Generated Porn"</a>. <i>Vice</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191101165115/https://www.vice.com/en_us/article/ywqgab/twitter-bans-deepfakes">Archived</a> from the original on 1 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">8 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Vice&amp;rft.atitle=Twitter+Is+the+Latest+Platform+to+Ban+AI-Generated+Porn&amp;rft.date=2018-02-06&amp;rft.aulast=Cole&amp;rft.aufirst=Samantha&amp;rft_id=https%3A%2F%2Fwww.vice.com%2Fen_us%2Farticle%2Fywqgab%2Ftwitter-bans-deepfakes&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-223"><span class="mw-cite-backlink"><b><a href="#cite_ref-223">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPrice2018" class="citation web cs1">Price, Rob (27 January 2018). <a rel="nofollow" class="external text" href="https://www.businessinsider.com/discord-closes-down-deepfakes-server-ai-celebrity-porn-2018-1">"Discord just shut down a chat group dedicated to sharing porn videos edited with AI to include celebrities"</a>. <i>Business Insider Australia</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191215182904/https://www.businessinsider.com.au/discord-closes-down-deepfakes-server-ai-celebrity-porn-2018-1">Archived</a> from the original on 15 December 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">28 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Business+Insider+Australia&amp;rft.atitle=Discord+just+shut+down+a+chat+group+dedicated+to+sharing+porn+videos+edited+with+AI+to+include+celebrities&amp;rft.date=2018-01-27&amp;rft.aulast=Price&amp;rft.aufirst=Rob&amp;rft_id=https%3A%2F%2Fwww.businessinsider.com%2Fdiscord-closes-down-deepfakes-server-ai-celebrity-porn-2018-1&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-224"><span class="mw-cite-backlink"><b><a href="#cite_ref-224">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.engadget.com/2018/02/07/twitter-joins-those-banning-deepfake-ai-porn/">"Twitter bans 'deepfake' AI-generated porn"</a>. <i>Engadget</i>. 20 July 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191215182857/https://www.engadget.com/2018/02/07/twitter-joins-those-banning-deepfake-ai-porn/">Archived</a> from the original on 15 December 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">28 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Engadget&amp;rft.atitle=Twitter+bans+%27deepfake%27+AI-generated+porn&amp;rft.date=2019-07-20&amp;rft_id=https%3A%2F%2Fwww.engadget.com%2F2018%2F02%2F07%2Ftwitter-joins-those-banning-deepfake-ai-porn%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Fake-porn_videos_are_being_weaponized_to_harass_and_humiliate_women:_&#39;Everybody_is_a_potential_target&#39;-225"><span class="mw-cite-backlink">^ <a href="#cite_ref-Fake-porn_videos_are_being_weaponized_to_harass_and_humiliate_women:_&#39;Everybody_is_a_potential_target&#39;_225-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Fake-porn_videos_are_being_weaponized_to_harass_and_humiliate_women:_&#39;Everybody_is_a_potential_target&#39;_225-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHarrell" class="citation news cs1">Harrell, Drew. <a rel="nofollow" class="external text" href="https://www.washingtonpost.com/technology/2018/12/30/fake-porn-videos-are-being-weaponized-harass-humiliate-women-everybody-is-potential-target">"Fake-porn videos are being weaponized to harass and humiliate women: 'Everybody is a potential target'<span class="cs1-kern-right"></span>"</a>. <i>The Washington Post</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190102031512/https://www.washingtonpost.com/technology/2018/12/30/fake-porn-videos-are-being-weaponized-harass-humiliate-women-everybody-is-potential-target/">Archived</a> from the original on 2 January 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">1 January</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Washington+Post&amp;rft.atitle=Fake-porn+videos+are+being+weaponized+to+harass+and+humiliate+women%3A+%27Everybody+is+a+potential+target%27&amp;rft.aulast=Harrell&amp;rft.aufirst=Drew&amp;rft_id=https%3A%2F%2Fwww.washingtonpost.com%2Ftechnology%2F2018%2F12%2F30%2Ffake-porn-videos-are-being-weaponized-harass-humiliate-women-everybody-is-potential-target&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Cole-2018c-226"><span class="mw-cite-backlink">^ <a href="#cite_ref-Cole-2018c_226-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Cole-2018c_226-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCole2018" class="citation web cs1">Cole, Samantha (6 February 2018). <a rel="nofollow" class="external text" href="https://www.vice.com/en_us/article/zmwvdw/pornhub-bans-deepfakes">"Pornhub Is Banning AI-Generated Fake Porn Videos, Says They're Nonconsensual"</a>. <i>Vice</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191101165117/https://www.vice.com/en_us/article/zmwvdw/pornhub-bans-deepfakes">Archived</a> from the original on 1 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Vice&amp;rft.atitle=Pornhub+Is+Banning+AI-Generated+Fake+Porn+Videos%2C+Says+They%27re+Nonconsensual&amp;rft.date=2018-02-06&amp;rft.aulast=Cole&amp;rft.aufirst=Samantha&amp;rft_id=https%3A%2F%2Fwww.vice.com%2Fen_us%2Farticle%2Fzmwvdw%2Fpornhub-bans-deepfakes&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-227"><span class="mw-cite-backlink"><b><a href="#cite_ref-227">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBeresGilmer2018" class="citation web cs1">Beres, Damon; Gilmer, Marcus (2 February 2018). <a rel="nofollow" class="external text" href="https://mashable.com/2018/02/02/what-are-deepfakes/">"A guide to 'deepfakes,' the internet's latest moral crisis"</a>. <i>Mashable</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191209201826/https://mashable.com/2018/02/02/what-are-deepfakes/">Archived</a> from the original on 9 December 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Mashable&amp;rft.atitle=A+guide+to+%27deepfakes%2C%27+the+internet%27s+latest+moral+crisis&amp;rft.date=2018-02-02&amp;rft.aulast=Beres&amp;rft.aufirst=Damon&amp;rft.au=Gilmer%2C+Marcus&amp;rft_id=https%3A%2F%2Fmashable.com%2F2018%2F02%2F02%2Fwhat-are-deepfakes%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Jee-2019-228"><span class="mw-cite-backlink">^ <a href="#cite_ref-Jee-2019_228-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Jee-2019_228-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.technologyreview.com/f/613690/facebook-deepfake-zuckerberg-instagram-social-media-election-video/">"Facebook has promised to leave up a deepfake video of Mark Zuckerberg"</a>. <i>MIT Technology Review</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191016060800/https://www.technologyreview.com/f/613690/facebook-deepfake-zuckerberg-instagram-social-media-election-video/">Archived</a> from the original on 16 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=MIT+Technology+Review&amp;rft.atitle=Facebook+has+promised+to+leave+up+a+deepfake+video+of+Mark+Zuckerberg&amp;rft_id=https%3A%2F%2Fwww.technologyreview.com%2Ff%2F613690%2Ffacebook-deepfake-zuckerberg-instagram-social-media-election-video%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Cole-2019b-229"><span class="mw-cite-backlink"><b><a href="#cite_ref-Cole-2019b_229-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCole2019" class="citation web cs1">Cole, Samantha (11 June 2019). <a rel="nofollow" class="external text" href="https://www.vice.com/en_us/article/ywyxex/deepfake-of-mark-zuckerberg-facebook-fake-video-policy">"This Deepfake of Mark Zuckerberg Tests Facebook's Fake Video Policies"</a>. <i>Vice</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191012160019/https://www.vice.com/en_us/article/ywyxex/deepfake-of-mark-zuckerberg-facebook-fake-video-policy">Archived</a> from the original on 12 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Vice&amp;rft.atitle=This+Deepfake+of+Mark+Zuckerberg+Tests+Facebook%27s+Fake+Video+Policies&amp;rft.date=2019-06-11&amp;rft.aulast=Cole&amp;rft.aufirst=Samantha&amp;rft_id=https%3A%2F%2Fwww.vice.com%2Fen_us%2Farticle%2Fywyxex%2Fdeepfake-of-mark-zuckerberg-facebook-fake-video-policy&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-230"><span class="mw-cite-backlink"><b><a href="#cite_ref-230">^</a></b></span> <span class="reference-text">Anderson, Martin (2022). <a rel="nofollow" class="external text" href="https://www.unite.ai/google-has-banned-the-training-of-deepfakes-in-colab/"><i>Google Has Banned the Training of Deepfakes in Colab</i></a> <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220530122326/https://www.unite.ai/google-has-banned-the-training-of-deepfakes-in-colab/">Archived</a> 30 May 2022 at the <a href="/wiki/Wayback_Machine" title="Wayback Machine">Wayback Machine</a>, Unite.ai, May 28, 2022</span> </li> <li id="cite_note-231"><span class="mw-cite-backlink"><b><a href="#cite_ref-231">^</a></b></span> <span class="reference-text">Maiberg, Emanuel (2022). <a rel="nofollow" class="external text" href="https://www.vice.com/en/article/qjb7b7/ethical-deepfakes-deep-tom-cruise-ai-generated-porn"><i>It Takes 2 Clicks to Get From 'Deep Tom Cruise' to Vile Deepfake Porn</i></a> <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220530140832/https://www.vice.com/en/article/qjb7b7/ethical-deepfakes-deep-tom-cruise-ai-generated-porn">Archived</a> 30 May 2022 at the <a href="/wiki/Wayback_Machine" title="Wayback Machine">Wayback Machine</a>, VICE, May 17, 2022</span> </li> <li id="cite_note-232"><span class="mw-cite-backlink"><b><a href="#cite_ref-232">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSasse2018" class="citation web cs1">Sasse, Ben (21 December 2018). <a rel="nofollow" class="external text" href="https://www.congress.gov/bill/115th-congress/senate-bill/3805">"S. 3805–115th Congress (2017-2018): Malicious Deep Fake Prohibition Act of 2018"</a>. <i>www.congress.gov</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191016053649/https://www.congress.gov/bill/115th-congress/senate-bill/3805">Archived</a> from the original on 16 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">16 October</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=www.congress.gov&amp;rft.atitle=S.+3805%E2%80%93115th+Congress+%282017-2018%29%3A+Malicious+Deep+Fake+Prohibition+Act+of+2018&amp;rft.date=2018-12-21&amp;rft.aulast=Sasse&amp;rft.aufirst=Ben&amp;rft_id=https%3A%2F%2Fwww.congress.gov%2Fbill%2F115th-congress%2Fsenate-bill%2F3805&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Clarke-2019-233"><span class="mw-cite-backlink"><b><a href="#cite_ref-Clarke-2019_233-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFClarke2019" class="citation web cs1">Clarke, Yvette D. (28 June 2019). <a rel="nofollow" class="external text" href="https://www.congress.gov/bill/116th-congress/house-bill/3230">"H.R.3230 - 116th Congress (2019-2020): Defending Each and Every Person from False Appearances by Keeping Exploitation Subject to Accountability Act of 2019"</a>. <i>www.congress.gov</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191217110329/https://www.congress.gov/bill/116th-congress/house-bill/3230">Archived</a> from the original on 17 December 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">16 October</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=www.congress.gov&amp;rft.atitle=H.R.3230+-+116th+Congress+%282019-2020%29%3A+Defending+Each+and+Every+Person+from+False+Appearances+by+Keeping+Exploitation+Subject+to+Accountability+Act+of+2019&amp;rft.date=2019-06-28&amp;rft.aulast=Clarke&amp;rft.aufirst=Yvette+D.&amp;rft_id=https%3A%2F%2Fwww.congress.gov%2Fbill%2F116th-congress%2Fhouse-bill%2F3230&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-234"><span class="mw-cite-backlink"><b><a href="#cite_ref-234">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://techcrunch.com/2019/07/01/deepfake-revenge-porn-is-now-illegal-in-virginia/">"<span class="cs1-kern-left"></span>'Deepfake' revenge porn is now illegal in Virginia"</a>. <i>TechCrunch</i>. July 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210714032913/https://techcrunch.com/2019/07/01/deepfake-revenge-porn-is-now-illegal-in-virginia/">Archived</a> from the original on 14 July 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">16 October</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=TechCrunch&amp;rft.atitle=%27Deepfake%27+revenge+porn+is+now+illegal+in+Virginia&amp;rft.date=2019-07&amp;rft_id=https%3A%2F%2Ftechcrunch.com%2F2019%2F07%2F01%2Fdeepfake-revenge-porn-is-now-illegal-in-virginia%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-235"><span class="mw-cite-backlink"><b><a href="#cite_ref-235">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFIacono_Brown2019" class="citation web cs1">Iacono Brown, Nina (15 July 2019). <a rel="nofollow" class="external text" href="https://slate.com/technology/2019/07/congress-deepfake-regulation-230-2020.html">"Congress Wants to Solve Deepfakes by 2020. That Should Worry Us"</a>. <i>Slate Magazine</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191016053644/https://slate.com/technology/2019/07/congress-deepfake-regulation-230-2020.html">Archived</a> from the original on 16 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">16 October</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Slate+Magazine&amp;rft.atitle=Congress+Wants+to+Solve+Deepfakes+by+2020.+That+Should+Worry+Us.&amp;rft.date=2019-07-15&amp;rft.aulast=Iacono+Brown&amp;rft.aufirst=Nina&amp;rft_id=https%3A%2F%2Fslate.com%2Ftechnology%2F2019%2F07%2Fcongress-deepfake-regulation-230-2020.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-AB602-236"><span class="mw-cite-backlink">^ <a href="#cite_ref-AB602_236-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-AB602_236-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://leginfo.legislature.ca.gov/faces/billTextClient.xhtml?bill_id=201920200AB602">"Bill Text - AB-602 Depiction of individual using digital or electronic technology: sexually explicit material: cause of action"</a>. <i>leginfo.legislature.ca.gov</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191117091545/https://leginfo.legislature.ca.gov/faces/billTextClient.xhtml?bill_id=201920200AB602">Archived</a> from the original on 17 November 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=leginfo.legislature.ca.gov&amp;rft.atitle=Bill+Text+-+AB-602+Depiction+of+individual+using+digital+or+electronic+technology%3A+sexually+explicit+material%3A+cause+of+action.&amp;rft_id=https%3A%2F%2Fleginfo.legislature.ca.gov%2Ffaces%2FbillTextClient.xhtml%3Fbill_id%3D201920200AB602&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-AB730-237"><span class="mw-cite-backlink">^ <a href="#cite_ref-AB730_237-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-AB730_237-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://leginfo.legislature.ca.gov/faces/billTextClient.xhtml?bill_id=201920200AB730">"Bill Text - AB-730 Elections: deceptive audio or visual media"</a>. <i>leginfo.legislature.ca.gov</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191031021726/https://leginfo.legislature.ca.gov/faces/billTextClient.xhtml?bill_id=201920200AB730">Archived</a> from the original on 31 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">9 November</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=leginfo.legislature.ca.gov&amp;rft.atitle=Bill+Text+-+AB-730+Elections%3A+deceptive+audio+or+visual+media.&amp;rft_id=https%3A%2F%2Fleginfo.legislature.ca.gov%2Ffaces%2FbillTextClient.xhtml%3Fbill_id%3D201920200AB730&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-238"><span class="mw-cite-backlink"><b><a href="#cite_ref-238">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.govtrack.us/congress/bills/118/hr5586">H.R. 5586: DEEPFAKES Accountability Act</a> Govtrack.US accessed 15 August 2024.</span> </li> <li id="cite_note-239"><span class="mw-cite-backlink"><b><a href="#cite_ref-239">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.govtrack.us/congress/bills/118/hr6943">H.R. 6943: No AI FRAUD Act</a> Govtrack.US accessed 15 August 2024.</span> </li> <li id="cite_note-Reuters2019-240"><span class="mw-cite-backlink"><b><a href="#cite_ref-Reuters2019_240-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.reuters.com/article/us-china-technology/china-seeks-to-root-out-fake-news-and-deepfakes-with-new-online-content-rules-idUSKBN1Y30VU">"China seeks to root out fake news and deepfakes with new online content rules"</a>. <i><a href="/wiki/Reuters.com" class="mw-redirect" title="Reuters.com">Reuters.com</a></i>. <a href="/wiki/Reuters" title="Reuters">Reuters</a>. 29 November 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191217111759/https://www.reuters.com/article/us-china-technology/china-seeks-to-root-out-fake-news-and-deepfakes-with-new-online-content-rules-idUSKBN1Y30VU">Archived</a> from the original on 17 December 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">17 December</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Reuters.com&amp;rft.atitle=China+seeks+to+root+out+fake+news+and+deepfakes+with+new+online+content+rules&amp;rft.date=2019-11-29&amp;rft_id=https%3A%2F%2Fwww.reuters.com%2Farticle%2Fus-china-technology%2Fchina-seeks-to-root-out-fake-news-and-deepfakes-with-new-online-content-rules-idUSKBN1Y30VU&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-TheVerge2019-241"><span class="mw-cite-backlink"><b><a href="#cite_ref-TheVerge2019_241-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFStatt2019" class="citation web cs1">Statt, Nick (29 November 2019). <a rel="nofollow" class="external text" href="https://www.theverge.com/2019/11/29/20988363/china-deepfakes-ban-internet-rules-fake-news-disclosure-virtual-reality">"China makes it a criminal offense to publish deepfakes or fake news without disclosure"</a>. <i><a href="/wiki/The_Verge" title="The Verge">The Verge</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191222164345/https://www.theverge.com/2019/11/29/20988363/china-deepfakes-ban-internet-rules-fake-news-disclosure-virtual-reality">Archived</a> from the original on 22 December 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">17 December</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Verge&amp;rft.atitle=China+makes+it+a+criminal+offense+to+publish+deepfakes+or+fake+news+without+disclosure&amp;rft.date=2019-11-29&amp;rft.aulast=Statt&amp;rft.aufirst=Nick&amp;rft_id=https%3A%2F%2Fwww.theverge.com%2F2019%2F11%2F29%2F20988363%2Fchina-deepfakes-ban-internet-rules-fake-news-disclosure-virtual-reality&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-242"><span class="mw-cite-backlink"><b><a href="#cite_ref-242">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.china-briefing.com/news/china-to-regulate-deep-synthesis-deep-fake-technology-starting-january-2023/">China to Regulate Deep Synthesis (Deepfake) Technology Starting 2023</a> China Briefing accessed 15 August 2024.</span> </li> <li id="cite_note-243"><span class="mw-cite-backlink"><b><a href="#cite_ref-243">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.loc.gov/item/global-legal-monitor/2023-04-25/china-provisions-on-deep-synthesis-technology-enter-into-effect/">China: Provisions on Deep Synthesis Technology Enter into Effect</a> Library of Congress accessed 15 August 2024.</span> </li> <li id="cite_note-244"><span class="mw-cite-backlink"><b><a href="#cite_ref-244">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.theguardian.com/world/2018/jun/21/call-for-upskirting-bill-to-include-deepfake-pornography-ban">Call for upskirting bill to include 'deepfake' pornography ban</a> <a rel="nofollow" class="external text" href="https://web.archive.org/web/20180621090804/https://www.theguardian.com/world/2018/jun/21/call-for-upskirting-bill-to-include-deepfake-pornography-ban">Archived</a> 21 June 2018 at the <a href="/wiki/Wayback_Machine" title="Wayback Machine">Wayback Machine</a> <i><a href="/wiki/The_Guardian" title="The Guardian">The Guardian</a></i></span> </li> <li id="cite_note-245"><span class="mw-cite-backlink"><b><a href="#cite_ref-245">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.bbc.com/news/uk-68823042">Creating sexually explicit deepfakes to become a criminal offence</a> BBC accessed 15 August 2024.</span> </li> <li id="cite_note-246"><span class="mw-cite-backlink"><b><a href="#cite_ref-246">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.theguardian.com/technology/2024/apr/16/creating-sexually-explicit-deepfake-images-to-be-made-offence-in-uk">Creating sexually explicit deepfake images to be made offence in UK</a> accessed 15 August 2024.</span> </li> <li id="cite_note-247"><span class="mw-cite-backlink"><b><a href="#cite_ref-247">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external autonumber" href="https://cyber.gc.ca/sites/default/files/publications/tdp-2019-report_e.pdf">[2]</a> <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191122155500/https://cyber.gc.ca/sites/default/files/publications/tdp-2019-report_e.pdf">Archived</a> 22 November 2019 at the <a href="/wiki/Wayback_Machine" title="Wayback Machine">Wayback Machine</a> see page 18</span> </li> <li id="cite_note-248"><span class="mw-cite-backlink"><b><a href="#cite_ref-248">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBogart2019" class="citation web cs1">Bogart, Nicole (10 September 2019). <a rel="nofollow" class="external text" href="https://election.ctvnews.ca/how-deepfakes-could-impact-the-2019-canadian-election-1.4586847">"How deepfakes could impact the 2019 Canadian election"</a>. <i>Federal Election 2019</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200127163749/https://election.ctvnews.ca/how-deepfakes-could-impact-the-2019-canadian-election-1.4586847">Archived</a> from the original on 27 January 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">28 January</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Federal+Election+2019&amp;rft.atitle=How+deepfakes+could+impact+the+2019+Canadian+election&amp;rft.date=2019-09-10&amp;rft.aulast=Bogart&amp;rft.aufirst=Nicole&amp;rft_id=https%3A%2F%2Felection.ctvnews.ca%2Fhow-deepfakes-could-impact-the-2019-canadian-election-1.4586847&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-249"><span class="mw-cite-backlink"><b><a href="#cite_ref-249">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://mcmillan.ca/What-Can-The-Law-Do-About-Deepfake">"What Can The Law Do About Deepfake"</a>. <i>mcmillan.ca</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191207025712/https://mcmillan.ca/What-Can-The-Law-Do-About-Deepfake">Archived</a> from the original on 7 December 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">28 January</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=mcmillan.ca&amp;rft.atitle=What+Can+The+Law+Do+About+Deepfake&amp;rft_id=https%3A%2F%2Fmcmillan.ca%2FWhat-Can-The-Law-Do-About-Deepfake&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-250"><span class="mw-cite-backlink"><b><a href="#cite_ref-250">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.techpolicy.press/an-overview-of-canadas-online-harms-act/">An Overview of Canada's Online Harms Act</a> TechPolicy.Press] accessed 15 August 2024.</span> </li> <li id="cite_note-251"><span class="mw-cite-backlink"><b><a href="#cite_ref-251">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://www.parl.ca/Content/Bills/441/Government/C-63/C-63_1/C-63_1.PDF">BILL C-63</a> <a href="/wiki/House_of_Commons_of_Canada" title="House of Commons of Canada">House of Commons of Canada</a> accessed 15 August 2024.</span> </li> <li id="cite_note-252"><span class="mw-cite-backlink"><b><a href="#cite_ref-252">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFVenkatasubbbu2023" class="citation web cs1">Venkatasubbbu, Satish (27 June 2023). <a rel="nofollow" class="external text" href="https://cybermithra.in/2023/06/27/deepfakes-part-2">"How deepfakes are used to scam You &amp; Me? Current trends on detection using AI &amp; legal regulations worldwide"</a>. <i>cybermithra.in</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610173649/https://cybermithra.in/2023/06/27/deepfakes-part-2/">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">3 July</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=cybermithra.in&amp;rft.atitle=How+deepfakes+are+used+to+scam+You+%26+Me%3F+Current+trends+on+detection+using+AI+%26+legal+regulations+worldwide.&amp;rft.date=2023-06-27&amp;rft.aulast=Venkatasubbbu&amp;rft.aufirst=Satish&amp;rft_id=https%3A%2F%2Fcybermithra.in%2F2023%2F06%2F27%2Fdeepfakes-part-2&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-253"><span class="mw-cite-backlink"><b><a href="#cite_ref-253">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRomero-Moreno2024" class="citation journal cs1">Romero-Moreno, Felipe (29 March 2024). <a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F13600869.2024.2324540">"Generative AI and deepfakes: a human rights approach to tackling harmful content"</a>. <i>International Review of Law, Computers &amp; Technology</i>. <b>39</b> (2): 297–326. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1080%2F13600869.2024.2324540">10.1080/13600869.2024.2324540</a></span>. <a href="/wiki/Hdl_(identifier)" class="mw-redirect" title="Hdl (identifier)">hdl</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://hdl.handle.net/2299%2F20431">2299/20431</a></span>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/1360-0869">1360-0869</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=International+Review+of+Law%2C+Computers+%26+Technology&amp;rft.atitle=Generative+AI+and+deepfakes%3A+a+human+rights+approach+to+tackling+harmful+content&amp;rft.volume=39&amp;rft.issue=2&amp;rft.pages=297-326&amp;rft.date=2024-03-29&amp;rft_id=info%3Ahdl%2F2299%2F20431&amp;rft.issn=1360-0869&amp;rft_id=info%3Adoi%2F10.1080%2F13600869.2024.2324540&amp;rft.aulast=Romero-Moreno&amp;rft.aufirst=Felipe&amp;rft_id=https%3A%2F%2Fdoi.org%2F10.1080%252F13600869.2024.2324540&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-254"><span class="mw-cite-backlink"><b><a href="#cite_ref-254">^</a></b></span> <span class="reference-text"><a rel="nofollow" class="external text" href="https://techcrunch.com/2024/08/11/elon-musks-x-targeted-with-eight-privacy-complaints-after-grabbing-eu-users-data-for-training-grok/">Elon Musk's X targeted with nine privacy complaints after grabbing EU users’ data for training Grok</a> TechCrunch accessed 19 August 2024.</span> </li> <li id="cite_note-255"><span class="mw-cite-backlink"><b><a href="#cite_ref-255">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHatmaker2018" class="citation web cs1">Hatmaker, Taylor (1 May 2018). <a rel="nofollow" class="external text" href="https://techcrunch.com/2018/04/30/deepfakes-fake-videos-darpa-sri-international-media-forensics/">"DARPA is funding new tech that can identify manipulated videos and 'deepfakes'<span class="cs1-kern-right"></span>"</a>. <i>TechCrunch</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20231208032052/https://techcrunch.com/2018/04/30/deepfakes-fake-videos-darpa-sri-international-media-forensics/">Archived</a> from the original on 8 December 2023<span class="reference-accessdate">. Retrieved <span class="nowrap">14 April</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=TechCrunch&amp;rft.atitle=DARPA+is+funding+new+tech+that+can+identify+manipulated+videos+and+%27deepfakes%27&amp;rft.date=2018-05-01&amp;rft.aulast=Hatmaker&amp;rft.aufirst=Taylor&amp;rft_id=https%3A%2F%2Ftechcrunch.com%2F2018%2F04%2F30%2Fdeepfakes-fake-videos-darpa-sri-international-media-forensics%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-256"><span class="mw-cite-backlink"><b><a href="#cite_ref-256">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHsu2018" class="citation web cs1">Hsu, Jeremy (22 June 2018). <a rel="nofollow" class="external text" href="https://spectrum.ieee.org/experts-bet-on-first-deepfakes-political-scandal">"Experts Bet on First Deepfakes Political Scandal - IEEE Spectrum"</a>. <i><a href="/wiki/IEEE" class="mw-redirect" title="IEEE">IEEE</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240222112955/https://spectrum.ieee.org/experts-bet-on-first-deepfakes-political-scandal">Archived</a> from the original on 22 February 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">14 April</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=IEEE&amp;rft.atitle=Experts+Bet+on+First+Deepfakes+Political+Scandal+-+IEEE+Spectrum&amp;rft.date=2018-06-22&amp;rft.aulast=Hsu&amp;rft.aufirst=Jeremy&amp;rft_id=https%3A%2F%2Fspectrum.ieee.org%2Fexperts-bet-on-first-deepfakes-political-scandal&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Corvey-2020-257"><span class="mw-cite-backlink"><b><a href="#cite_ref-Corvey-2020_257-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.darpa.mil/program/media-forensics">"Media Forensics"</a>. <i>www.darpa.mil</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201029173807/https://www.darpa.mil/program/media-forensics">Archived</a> from the original on 29 October 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=www.darpa.mil&amp;rft.atitle=Media+Forensics&amp;rft_id=https%3A%2F%2Fwww.darpa.mil%2Fprogram%2Fmedia-forensics&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-258"><span class="mw-cite-backlink"><b><a href="#cite_ref-258">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.technologyreview.com/2018/05/23/142770/the-us-military-is-funding-an-effort-to-catch-deepfakes-and-other-ai-trickery/">"The US military is funding an effort to catch deepfakes and other AI trickery"</a>. <i>MIT Technology Review</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201101063502/https://www.technologyreview.com/2018/05/23/142770/the-us-military-is-funding-an-effort-to-catch-deepfakes-and-other-ai-trickery/">Archived</a> from the original on 1 November 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=MIT+Technology+Review&amp;rft.atitle=The+US+military+is+funding+an+effort+to+catch+deepfakes+and+other+AI+trickery&amp;rft_id=https%3A%2F%2Fwww.technologyreview.com%2F2018%2F05%2F23%2F142770%2Fthe-us-military-is-funding-an-effort-to-catch-deepfakes-and-other-ai-trickery%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-259"><span class="mw-cite-backlink"><b><a href="#cite_ref-259">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCollins2019" class="citation web cs1">Collins, Connor (11 March 2019). <a rel="nofollow" class="external text" href="https://govciomedia.com/darpa-tackles-deepfakes-with-ai/">"DARPA Tackles Deepfakes With AI"</a>. <i>GovCIO Media &amp; Research</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240302173105/http://govciomedia.com/darpa-tackles-deepfakes-with-ai/">Archived</a> from the original on 2 March 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">14 April</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=GovCIO+Media+%26+Research&amp;rft.atitle=DARPA+Tackles+Deepfakes+With+AI&amp;rft.date=2019-03-11&amp;rft.aulast=Collins&amp;rft.aufirst=Connor&amp;rft_id=https%3A%2F%2Fgovciomedia.com%2Fdarpa-tackles-deepfakes-with-ai%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Corrigan-2019-260"><span class="mw-cite-backlink">^ <a href="#cite_ref-Corrigan-2019_260-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Corrigan-2019_260-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.nextgov.com/emerging-tech/2019/08/darpa-taking-deepfake-problem/158980/">"DARPA Is Taking On the Deepfake Problem"</a>. <i>Nextgov.com</i>. 6 August 2019. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201028083255/https://www.nextgov.com/emerging-tech/2019/08/darpa-taking-deepfake-problem/158980/">Archived</a> from the original on 28 October 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">7 October</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Nextgov.com&amp;rft.atitle=DARPA+Is+Taking+On+the+Deepfake+Problem&amp;rft.date=2019-08-06&amp;rft_id=https%3A%2F%2Fwww.nextgov.com%2Femerging-tech%2F2019%2F08%2Fdarpa-taking-deepfake-problem%2F158980%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-261"><span class="mw-cite-backlink"><b><a href="#cite_ref-261">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSybert2021" class="citation web cs1">Sybert, Sarah (16 September 2021). <a rel="nofollow" class="external text" href="https://govciomedia.com/darpa-launches-new-programs-to-detect-falsified-media/">"DARPA Launches New Programs to Detect Falsified Media"</a>. <i>GovCIO Media &amp; Research</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610173648/https://govciomedia.com/darpa-launches-new-programs-to-detect-falsified-media/">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">14 April</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=GovCIO+Media+%26+Research&amp;rft.atitle=DARPA+Launches+New+Programs+to+Detect+Falsified+Media&amp;rft.date=2021-09-16&amp;rft.aulast=Sybert&amp;rft.aufirst=Sarah&amp;rft_id=https%3A%2F%2Fgovciomedia.com%2Fdarpa-launches-new-programs-to-detect-falsified-media%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-262"><span class="mw-cite-backlink"><b><a href="#cite_ref-262">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCooper2024" class="citation web cs1">Cooper, Naomi (15 March 2024). <a rel="nofollow" class="external text" href="https://executivegov.com/2024/03/darpa-launches-2-new-efforts-to-boost-defenses-against-manipulated-media/">"DARPA Launches 2 New Efforts to Boost Defenses Against Manipulated Media"</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240315215444/https://executivegov.com/2024/03/darpa-launches-2-new-efforts-to-boost-defenses-against-manipulated-media/">Archived</a> from the original on 15 March 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">14 April</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=DARPA+Launches+2+New+Efforts+to+Boost+Defenses+Against+Manipulated+Media&amp;rft.date=2024-03-15&amp;rft.aulast=Cooper&amp;rft.aufirst=Naomi&amp;rft_id=https%3A%2F%2Fexecutivegov.com%2F2024%2F03%2Fdarpa-launches-2-new-efforts-to-boost-defenses-against-manipulated-media%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-263"><span class="mw-cite-backlink"><b><a href="#cite_ref-263">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://semanticforensics.com/analytic-catalog">"Semantic Forensics - Analytic Catalog"</a>. <i>semanticforensics.com</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240418015239/https://semanticforensics.com/analytic-catalog/">Archived</a> from the original on 18 April 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">14 April</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=semanticforensics.com&amp;rft.atitle=Semantic+Forensics+-+Analytic+Catalog&amp;rft_id=https%3A%2F%2Fsemanticforensics.com%2Fanalytic-catalog&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-264"><span class="mw-cite-backlink"><b><a href="#cite_ref-264">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFNational_Academies_of_Sciences,_Engineering,_and_Medicine2023" class="citation journal cs1">National Academies of Sciences, Engineering, and Medicine (22 June 2023). <a rel="nofollow" class="external text" href="https://www.nationalacademies.org/news/2023/06/nobel-prize-summit-fuels-initiatives-to-combat-misinformation-and-disinformation-and-build-trust-in-science">"Nobel Prize Summit Fuels Initiatives to Combat Misinformation and Disinformation and Build Trust in Science"</a>. <i><a href="/wiki/National_Academies_of_Sciences,_Engineering,_and_Medicine" title="National Academies of Sciences, Engineering, and Medicine">National Academies of Sciences, Engineering, and Medicine</a></i>. <a href="/wiki/WDQ_(identifier)" class="mw-redirect" title="WDQ (identifier)">Wikidata</a>&#160;<a href="https://www.wikidata.org/wiki/Q124711722" class="extiw" title="d:Q124711722">Q124711722</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=National+Academies+of+Sciences%2C+Engineering%2C+and+Medicine&amp;rft.atitle=Nobel+Prize+Summit+Fuels+Initiatives+to+Combat+Misinformation+and+Disinformation+and+Build+Trust+in+Science&amp;rft.date=2023-06-22&amp;rft.au=National+Academies+of+Sciences%2C+Engineering%2C+and+Medicine&amp;rft_id=https%3A%2F%2Fwww.nationalacademies.org%2Fnews%2F2023%2F06%2Fnobel-prize-summit-fuels-initiatives-to-combat-misinformation-and-disinformation-and-build-trust-in-science&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span><span class="cs1-maint citation-comment"><code class="cs1-code">{{<a href="/wiki/Template:Cite_journal" title="Template:Cite journal">cite journal</a>}}</code>: CS1 maint: multiple names: authors list (<a href="/wiki/Category:CS1_maint:_multiple_names:_authors_list" title="Category:CS1 maint: multiple names: authors list">link</a>)</span></span> </li> <li id="cite_note-1986-picaper-265"><span class="mw-cite-backlink"><b><a href="#cite_ref-1986-picaper_265-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="http://www.isfdb.org/cgi-bin/title.cgi?48679">"Picaper"</a>. <a href="/wiki/Internet_Speculative_Fiction_Database" title="Internet Speculative Fiction Database">Internet Speculative Fiction Database</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200729073914/http://www.isfdb.org/cgi-bin/title.cgi?48679">Archived</a> from the original on 29 July 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">9 July</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Picaper&amp;rft.pub=Internet+Speculative+Fiction+Database&amp;rft_id=http%3A%2F%2Fwww.isfdb.org%2Fcgi-bin%2Ftitle.cgi%3F48679&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-1992-API-266"><span class="mw-cite-backlink"><b><a href="#cite_ref-1992-API_266-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKerr2010" class="citation book cs1">Kerr, Philip (2010). <i>A Philosophical Investigation</i>. National Geographic Books. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-0143117537" title="Special:BookSources/978-0143117537"><bdi>978-0143117537</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=A+Philosophical+Investigation&amp;rft.pub=National+Geographic+Books&amp;rft.date=2010&amp;rft.isbn=978-0143117537&amp;rft.aulast=Kerr&amp;rft.aufirst=Philip&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-267"><span class="mw-cite-backlink"><b><a href="#cite_ref-267">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBernal2019" class="citation news cs1">Bernal, Natasha (8 October 2019). <a rel="nofollow" class="external text" href="https://www.telegraph.co.uk/technology/2019/10/08/truth-behind-deepfake-video-bbc-ones-thriller-capture/">"The disturbing truth behind The Capture and real life deepfakes"</a>. <i>The Telegraph</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20191014090045/https://www.telegraph.co.uk/technology/2019/10/08/truth-behind-deepfake-video-bbc-ones-thriller-capture/">Archived</a> from the original on 14 October 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">24 October</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Telegraph&amp;rft.atitle=The+disturbing+truth+behind+The+Capture+and+real+life+deepfakes&amp;rft.date=2019-10-08&amp;rft.aulast=Bernal&amp;rft.aufirst=Natasha&amp;rft_id=https%3A%2F%2Fwww.telegraph.co.uk%2Ftechnology%2F2019%2F10%2F08%2Ftruth-behind-deepfake-video-bbc-ones-thriller-capture%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-268"><span class="mw-cite-backlink"><b><a href="#cite_ref-268">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFCrawley2019" class="citation news cs1">Crawley, Peter (5 September 2019). <a rel="nofollow" class="external text" href="https://www.irishtimes.com/culture/tv-radio-web/the-capture-a-bbc-thriller-of-surveillance-distortion-and-duplicity-1.4008823">"The Capture: A BBC thriller of surveillance, distortion and duplicity"</a>. <i>The Irish Times</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190909044130/https://www.irishtimes.com/culture/tv-radio-web/the-capture-a-bbc-thriller-of-surveillance-distortion-and-duplicity-1.4008823">Archived</a> from the original on 9 September 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">24 October</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Irish+Times&amp;rft.atitle=The+Capture%3A+A+BBC+thriller+of+surveillance%2C+distortion+and+duplicity&amp;rft.date=2019-09-05&amp;rft.aulast=Crawley&amp;rft.aufirst=Peter&amp;rft_id=https%3A%2F%2Fwww.irishtimes.com%2Fculture%2Ftv-radio-web%2Fthe-capture-a-bbc-thriller-of-surveillance-distortion-and-duplicity-1.4008823&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-269"><span class="mw-cite-backlink"><b><a href="#cite_ref-269">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation audio-visual cs1"><a rel="nofollow" class="external text" href="https://www.youtube.com/watch?v=6sUO2pgWAGc"><i>John Travolta is Forrest Gump &#91;DeepFake&#93;</i></a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240420055311/https://www.youtube.com/watch?v=6sUO2pgWAGc">Archived</a> from the original on 20 April 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">20 April</span> 2024</span> &#8211; via www.youtube.com.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=John+Travolta+is+Forrest+Gump+%5BDeepFake%5D&amp;rft_id=https%3A%2F%2Fwww.youtube.com%2Fwatch%3Fv%3D6sUO2pgWAGc&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-270"><span class="mw-cite-backlink"><b><a href="#cite_ref-270">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFNovak2023" class="citation web cs1">Novak, Lauren (24 November 2023). <a rel="nofollow" class="external text" href="https://www.remindmagazine.com/article/8243/stars-who-turned-down-iconic-roles/">"John Travolta Turned Down 'Forrest Gump' &amp; Other Stars Who Chose Not to Play Iconic Characters"</a>. <i>Remind</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20240610173649/https://www.remindmagazine.com/article/8243/stars-who-turned-down-iconic-roles/">Archived</a> from the original on 10 June 2024<span class="reference-accessdate">. Retrieved <span class="nowrap">20 April</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Remind&amp;rft.atitle=John+Travolta+Turned+Down+%27Forrest+Gump%27+%26+Other+Stars+Who+Chose+Not+to+Play+Iconic+Characters&amp;rft.date=2023-11-24&amp;rft.aulast=Novak&amp;rft.aufirst=Lauren&amp;rft_id=https%3A%2F%2Fwww.remindmagazine.com%2Farticle%2F8243%2Fstars-who-turned-down-iconic-roles%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-271"><span class="mw-cite-backlink"><b><a href="#cite_ref-271">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation pressrelease cs1"><a rel="nofollow" class="external text" href="https://espnpressroom.com/us/press-releases/2021/01/espn-films-latest-30-for-30-documentary-al-davis-vs-the-nfl-to-premiere-february-4/">"ESPN Films Latest 30 for 30 Documentary <i>Al Davis vs. The NFL</i> to Premiere February 4"</a> (Press release). ESPN. 15 January 2021. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210206211440/https://espnpressroom.com/us/press-releases/2021/01/espn-films-latest-30-for-30-documentary-al-davis-vs-the-nfl-to-premiere-february-4/">Archived</a> from the original on 6 February 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">5 February</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=ESPN+Films+Latest+30+for+30+Documentary+Al+Davis+vs.+The+NFL+to+Premiere+February+4&amp;rft.pub=ESPN&amp;rft.date=2021-01-15&amp;rft_id=https%3A%2F%2Fespnpressroom.com%2Fus%2Fpress-releases%2F2021%2F01%2Fespn-films-latest-30-for-30-documentary-al-davis-vs-the-nfl-to-premiere-february-4%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-272"><span class="mw-cite-backlink"><b><a href="#cite_ref-272">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSprung2021" class="citation web cs1">Sprung, Shlomo (1 February 2021). <a rel="nofollow" class="external text" href="https://www.forbes.com/sites/shlomosprung/2021/02/02/al-davis-vs-the-nfl-uses-deepfake-technology-to-bring-late-raiders-owner-pete-rozelle-back-to-life/">"ESPN Documentary 'Al Davis Vs The NFL' Uses Deepfake Technology To Bring Late Raiders Owner Back To Life"</a>. <i>Forbes</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210414230505/https://www.forbes.com/sites/shlomosprung/2021/02/02/al-davis-vs-the-nfl-uses-deepfake-technology-to-bring-late-raiders-owner-pete-rozelle-back-to-life/">Archived</a> from the original on 14 April 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">4 February</span> 2021</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Forbes&amp;rft.atitle=ESPN+Documentary+%27Al+Davis+Vs+The+NFL%27+Uses+Deepfake+Technology+To+Bring+Late+Raiders+Owner+Back+To+Life&amp;rft.date=2021-02-01&amp;rft.aulast=Sprung&amp;rft.aufirst=Shlomo&amp;rft_id=https%3A%2F%2Fwww.forbes.com%2Fsites%2Fshlomosprung%2F2021%2F02%2F02%2Fal-davis-vs-the-nfl-uses-deepfake-technology-to-bring-late-raiders-owner-pete-rozelle-back-to-life%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-273"><span class="mw-cite-backlink"><b><a href="#cite_ref-273">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://www.citytv.com/show/hudson-rex/">"Hudson and Rex"</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220113221029/https://www.citytv.com/show/hudson-rex/">Archived</a> from the original on 13 January 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">13 January</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Hudson+and+Rex&amp;rft_id=https%3A%2F%2Fwww.citytv.com%2Fshow%2Fhudson-rex%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-Wood-2022-274"><span class="mw-cite-backlink">^ <a href="#cite_ref-Wood-2022_274-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-Wood-2022_274-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWood2022" class="citation web cs1">Wood, Mikael (9 May 2022). <a rel="nofollow" class="external text" href="https://www.latimes.com/entertainment-arts/music/story/2022-05-09/kendrick-lamar-new-video-the-heart-part-5-deepfake">"Watch Kendrick Lamar morph into O.J., Kanye, Kobe, Nipsey Hussle in new video"</a>. <i>Los Angeles Times</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20221030140728/https://www.latimes.com/entertainment-arts/music/story/2022-05-09/kendrick-lamar-new-video-the-heart-part-5-deepfake">Archived</a> from the original on 30 October 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">10 May</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Los+Angeles+Times&amp;rft.atitle=Watch+Kendrick+Lamar+morph+into+O.J.%2C+Kanye%2C+Kobe%2C+Nipsey+Hussle+in+new+video&amp;rft.date=2022-05-09&amp;rft.aulast=Wood&amp;rft.aufirst=Mikael&amp;rft_id=https%3A%2F%2Fwww.latimes.com%2Fentertainment-arts%2Fmusic%2Fstory%2F2022-05-09%2Fkendrick-lamar-new-video-the-heart-part-5-deepfake&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-275"><span class="mw-cite-backlink"><b><a href="#cite_ref-275">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation cs2"><a rel="nofollow" class="external text" href="https://www.youtube.com/watch?v=TzofRTcoPsU"><i>Aloe Blacc - Wake Me Up (Universal Language Mix)</i></a>, 20 April 2022, <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220824080656/https://www.youtube.com/watch?v=TzofRTcoPsU">archived</a> from the original on 24 August 2022<span class="reference-accessdate">, retrieved <span class="nowrap">24 August</span> 2022</span></cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Aloe+Blacc+-+Wake+Me+Up+%28Universal+Language+Mix%29&amp;rft.date=2022-04-20&amp;rft_id=https%3A%2F%2Fwww.youtube.com%2Fwatch%3Fv%3DTzofRTcoPsU&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-276"><span class="mw-cite-backlink"><b><a href="#cite_ref-276">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="https://voicebot.ai/2022/05/05/watch-aloe-blacc-perform-wake-me-up-in-3-languages-to-honor-avicii-using-respeecher-ai-translation/">"Watch Aloe Blacc Perform "Wake Me Up" in 3 Languages to Honor Avicii Using Respeecher AI Translation"</a>. <i>Voicebot.ai</i>. 5 May 2022. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220824080547/https://voicebot.ai/2022/05/05/watch-aloe-blacc-perform-wake-me-up-in-3-languages-to-honor-avicii-using-respeecher-ai-translation/">Archived</a> from the original on 24 August 2022<span class="reference-accessdate">. Retrieved <span class="nowrap">24 August</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Voicebot.ai&amp;rft.atitle=Watch+Aloe+Blacc+Perform+%22Wake+Me+Up%22+in+3+Languages+to+Honor+Avicii+Using+Respeecher+AI+Translation&amp;rft.date=2022-05-05&amp;rft_id=https%3A%2F%2Fvoicebot.ai%2F2022%2F05%2F05%2Fwatch-aloe-blacc-perform-wake-me-up-in-3-languages-to-honor-avicii-using-respeecher-ai-translation%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> <li id="cite_note-277"><span class="mw-cite-backlink"><b><a href="#cite_ref-277">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLees2023" class="citation web cs1">Lees, Dominic (27 January 2023). <a rel="nofollow" class="external text" href="https://theconversation.com/deep-fake-neighbour-wars-itvs-comedy-shows-how-ai-can-transform-popular-culture-198569">"Deep Fake Neighbour Wars: ITV's comedy shows how AI can transform popular culture"</a>. <i>The Conversation</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20230624110509/https://theconversation.com/deep-fake-neighbour-wars-itvs-comedy-shows-how-ai-can-transform-popular-culture-198569">Archived</a> from the original on 24 June 2023<span class="reference-accessdate">. Retrieved <span class="nowrap">3 July</span> 2023</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+Conversation&amp;rft.atitle=Deep+Fake+Neighbour+Wars%3A+ITV%27s+comedy+shows+how+AI+can+transform+popular+culture&amp;rft.date=2023-01-27&amp;rft.aulast=Lees&amp;rft.aufirst=Dominic&amp;rft_id=http%3A%2F%2Ftheconversation.com%2Fdeep-fake-neighbour-wars-itvs-comedy-shows-how-ai-can-transform-popular-culture-198569&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></span> </li> </ol></div></div> <div class="mw-heading mw-heading2"><h2 id="Further_reading">Further reading</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=38" title="Edit section: Further reading"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <ul><li><a href="/wiki/Daniel_Immerwahr" title="Daniel Immerwahr">Daniel Immerwahr</a>, "Your Lying Eyes: People now use A.I. to generate fake videos indistinguishable from real ones. How much does it matter?", <i><a href="/wiki/The_New_Yorker" title="The New Yorker">The New Yorker</a></i>, 20 November 2023, pp.&#160;54–59. "If by 'deepfakes' we mean realistic videos produced using <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">artificial intelligence</a> that actually deceive people, then they barely exist. The fakes aren't deep, and the deeps aren't fake. [...] A.I.-generated videos are not, in general, operating in our media as counterfeited evidence. Their role better resembles that of <a href="/wiki/Cartoon" title="Cartoon">cartoons</a>, especially smutty ones." (p.&#160;59.)</li> <li><a href="/w/index.php?title=Emmanouil_Billis&amp;action=edit&amp;redlink=1" class="new" title="Emmanouil Billis (page does not exist)">Emmanouil Billis</a>,"<a rel="nofollow" class="external text" href="https://www.researchgate.net/publication/381187829">Deepfakes και Ποινικό Δίκαιο [Deepfakes and the Criminal Law]</a>"(in Greek). In: H. Satzger et al. (eds.), <a rel="nofollow" class="external text" href="https://www.sakkoulas.com/product/oria-kai-mellon-tou-poinikou-dikaiou/">The Limits and Future of Criminal Law - Essays in Honor of Christos Mylonopoulos</a>, Athens, P.N. Sakkoulas, 2024, pp.&#160;689–732.</li></ul> <div class="mw-heading mw-heading2"><h2 id="External_links">External links</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=Deepfake&amp;action=edit&amp;section=39" title="Edit section: External links"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p><span class="noviewer" typeof="mw:File"><a href="/wiki/File:Commons-logo.svg" class="mw-file-description"><img alt="" src="//upload.wikimedia.org/wikipedia/en/thumb/4/4a/Commons-logo.svg/12px-Commons-logo.svg.png" decoding="async" width="12" height="16" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/en/thumb/4/4a/Commons-logo.svg/18px-Commons-logo.svg.png 1.5x, //upload.wikimedia.org/wikipedia/en/thumb/4/4a/Commons-logo.svg/24px-Commons-logo.svg.png 2x" data-file-width="1024" data-file-height="1376" /></a></span> Media related to <a href="https://commons.wikimedia.org/wiki/Category:Deepfake" class="extiw" title="commons:Category:Deepfake">Deepfake</a> at Wikimedia Commons </p> <ul><li><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSasse2018" class="citation news cs1">Sasse, Ben (19 October 2018). <a rel="nofollow" class="external text" href="https://www.washingtonpost.com/opinions/the-real-scary-news-about-deepfakes/2018/10/19/6238c3ce-d176-11e8-83d6-291fcead2ab1_story.html">"This New Technology Could Send American Politics into a Tailspin"</a>. Opinions. <i><a href="/wiki/The_Washington_Post" title="The Washington Post">The Washington Post</a></i><span class="reference-accessdate">. Retrieved <span class="nowrap">10 July</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Washington+Post&amp;rft.atitle=This+New+Technology+Could+Send+American+Politics+into+a+Tailspin&amp;rft.date=2018-10-19&amp;rft.aulast=Sasse&amp;rft.aufirst=Ben&amp;rft_id=https%3A%2F%2Fwww.washingtonpost.com%2Fopinions%2Fthe-real-scary-news-about-deepfakes%2F2018%2F10%2F19%2F6238c3ce-d176-11e8-83d6-291fcead2ab1_story.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3ADeepfake" class="Z3988"></span></li> <li><a rel="nofollow" class="external text" href="https://www.asvspoof.org/">Fake/Spoof Audio Detection Challenge (ASVspoof)</a></li> <li><a rel="nofollow" class="external text" href="https://deepfakedetectionchallenge.ai/">Deepfake Detection Challenge (DFDC)</a></li> <li><a rel="nofollow" class="external text" href="https://deepfakes.virtuality.mit.edu/wp-content/uploads/2021/08/Media-Literacy-Bibliography.pdf">Bibliography: Media Literacy in the Age of Deepfakes</a>. Curated by Dr Joshua Glick.</li></ul> <div class="navbox-styles"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><style data-mw-deduplicate="TemplateStyles:r1236075235">.mw-parser-output .navbox{box-sizing:border-box;border:1px solid #a2a9b1;width:100%;clear:both;font-size:88%;text-align:center;padding:1px;margin:1em auto 0}.mw-parser-output .navbox .navbox{margin-top:0}.mw-parser-output .navbox+.navbox,.mw-parser-output .navbox+.navbox-styles+.navbox{margin-top:-1px}.mw-parser-output .navbox-inner,.mw-parser-output .navbox-subgroup{width:100%}.mw-parser-output .navbox-group,.mw-parser-output .navbox-title,.mw-parser-output .navbox-abovebelow{padding:0.25em 1em;line-height:1.5em;text-align:center}.mw-parser-output .navbox-group{white-space:nowrap;text-align:right}.mw-parser-output .navbox,.mw-parser-output .navbox-subgroup{background-color:#fdfdfd}.mw-parser-output .navbox-list{line-height:1.5em;border-color:#fdfdfd}.mw-parser-output .navbox-list-with-group{text-align:left;border-left-width:2px;border-left-style:solid}.mw-parser-output tr+tr>.navbox-abovebelow,.mw-parser-output tr+tr>.navbox-group,.mw-parser-output tr+tr>.navbox-image,.mw-parser-output tr+tr>.navbox-list{border-top:2px solid #fdfdfd}.mw-parser-output .navbox-title{background-color:#ccf}.mw-parser-output .navbox-abovebelow,.mw-parser-output .navbox-group,.mw-parser-output .navbox-subgroup .navbox-title{background-color:#ddf}.mw-parser-output .navbox-subgroup .navbox-group,.mw-parser-output .navbox-subgroup .navbox-abovebelow{background-color:#e6e6ff}.mw-parser-output .navbox-even{background-color:#f7f7f7}.mw-parser-output .navbox-odd{background-color:transparent}.mw-parser-output .navbox .hlist td dl,.mw-parser-output .navbox .hlist td ol,.mw-parser-output .navbox .hlist td ul,.mw-parser-output .navbox td.hlist dl,.mw-parser-output .navbox td.hlist ol,.mw-parser-output .navbox td.hlist ul{padding:0.125em 0}.mw-parser-output .navbox .navbar{display:block;font-size:100%}.mw-parser-output .navbox-title .navbar{float:left;text-align:left;margin-right:0.5em}body.skin--responsive .mw-parser-output .navbox-image img{max-width:none!important}@media print{body.ns-0 .mw-parser-output .navbox{display:none!important}}</style></div><div role="navigation" class="navbox" aria-labelledby="Differentiable_computing" style="padding:3px"><table class="nowraplinks hlist mw-collapsible autocollapse navbox-inner" style="border-spacing:0;background:transparent;color:inherit"><tbody><tr><th scope="col" class="navbox-title" colspan="2"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1239400231"><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Differentiable_computing" title="Template:Differentiable computing"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Differentiable_computing" title="Template talk:Differentiable computing"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Differentiable_computing" title="Special:EditPage/Template:Differentiable computing"><abbr title="Edit this template">e</abbr></a></li></ul></div><div id="Differentiable_computing" style="font-size:114%;margin:0 4em">Differentiable computing</div></th></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Differentiable_function" title="Differentiable function">General</a></th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><b><a href="/wiki/Differentiable_programming" title="Differentiable programming">Differentiable programming</a></b></li> <li><a href="/wiki/Information_geometry" title="Information geometry">Information geometry</a></li> <li><a href="/wiki/Statistical_manifold" title="Statistical manifold">Statistical manifold</a></li> <li><a href="/wiki/Automatic_differentiation" title="Automatic differentiation">Automatic differentiation</a></li> <li><a href="/wiki/Neuromorphic_computing" title="Neuromorphic computing">Neuromorphic computing</a></li> <li><a href="/wiki/Pattern_recognition" title="Pattern recognition">Pattern recognition</a></li> <li><a href="/wiki/Ricci_calculus" title="Ricci calculus">Ricci calculus</a></li> <li><a href="/wiki/Computational_learning_theory" title="Computational learning theory">Computational learning theory</a></li> <li><a href="/wiki/Inductive_bias" title="Inductive bias">Inductive bias</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Hardware</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Graphcore" title="Graphcore">IPU</a></li> <li><a href="/wiki/Tensor_Processing_Unit" title="Tensor Processing Unit">TPU</a></li> <li><a href="/wiki/Vision_processing_unit" title="Vision processing unit">VPU</a></li> <li><a href="/wiki/Memristor" title="Memristor">Memristor</a></li> <li><a href="/wiki/SpiNNaker" title="SpiNNaker">SpiNNaker</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Software libraries</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/TensorFlow" title="TensorFlow">TensorFlow</a></li> <li><a href="/wiki/PyTorch" title="PyTorch">PyTorch</a></li> <li><a href="/wiki/Keras" title="Keras">Keras</a></li> <li><a href="/wiki/Scikit-learn" title="Scikit-learn">scikit-learn</a></li> <li><a href="/wiki/Theano_(software)" title="Theano (software)">Theano</a></li> <li><a href="/wiki/Google_JAX" title="Google JAX">JAX</a></li> <li><a href="/wiki/Flux_(machine-learning_framework)" title="Flux (machine-learning framework)">Flux.jl</a></li> <li><a href="/wiki/MindSpore" title="MindSpore">MindSpore</a></li></ul> </div></td></tr><tr><td class="navbox-abovebelow" colspan="2"><div> <ul><li><span class="noviewer" typeof="mw:File"><a href="/wiki/File:Symbol_portal_class.svg" class="mw-file-description" title="Portal"><img alt="" src="//upload.wikimedia.org/wikipedia/en/thumb/e/e2/Symbol_portal_class.svg/16px-Symbol_portal_class.svg.png" decoding="async" width="16" height="16" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/en/thumb/e/e2/Symbol_portal_class.svg/23px-Symbol_portal_class.svg.png 1.5x, //upload.wikimedia.org/wikipedia/en/thumb/e/e2/Symbol_portal_class.svg/31px-Symbol_portal_class.svg.png 2x" data-file-width="180" data-file-height="185" /></a></span> Portals <ul><li><a href="/wiki/Portal:Computer_programming" title="Portal:Computer programming">Computer programming</a></li> <li><a href="/wiki/Portal:Technology" title="Portal:Technology">Technology</a></li></ul></li></ul> </div></td></tr></tbody></table></div> <div class="navbox-styles"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236075235"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"></div><div role="navigation" class="navbox" aria-labelledby="Media_manipulation" style="padding:3px"><table class="nowraplinks mw-collapsible mw-collapsed navbox-inner" style="border-spacing:0;background:transparent;color:inherit"><tbody><tr><th scope="col" class="navbox-title" colspan="2"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1239400231"><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Media_manipulation" title="Template:Media manipulation"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Media_manipulation" title="Template talk:Media manipulation"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Media_manipulation" title="Special:EditPage/Template:Media manipulation"><abbr title="Edit this template">e</abbr></a></li></ul></div><div id="Media_manipulation" style="font-size:114%;margin:0 4em"><a href="/wiki/Media_manipulation" title="Media manipulation">Media manipulation</a></div></th></tr><tr><th scope="row" class="navbox-group" style="width:1%">Context</th><td class="navbox-list-with-group navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Media_bias" title="Media bias">Bias</a></li> <li><a href="/wiki/Crowd_psychology" title="Crowd psychology">Crowd psychology</a></li> <li><a href="/wiki/Deception" title="Deception">Deception</a></li> <li><a href="/wiki/Dumbing_down" title="Dumbing down">Dumbing down</a></li> <li><a href="/wiki/False_balance" title="False balance">False balance</a></li> <li><a href="/wiki/Half-truth" title="Half-truth">Half-truths</a></li> <li><a href="/wiki/Mass_media" title="Mass media">Media</a></li> <li><a href="/wiki/Obfuscation" title="Obfuscation">Obfuscation</a></li> <li><a href="/wiki/Orwellian" title="Orwellian">Orwellian</a></li> <li><a href="/wiki/Persuasion" title="Persuasion">Persuasion</a></li> <li><a href="/wiki/Manipulation_(psychology)" title="Manipulation (psychology)">Manipulation (psychology)</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Activism" title="Activism">Activism</a></th><td class="navbox-list-with-group navbox-list navbox-even hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Advocacy" title="Advocacy">Advocacy</a> <ul><li><a href="/wiki/Advocacy_group" title="Advocacy group">group</a></li></ul></li> <li><a href="/wiki/Alternative_media" title="Alternative media">Alternative media</a></li> <li><a href="/wiki/Boycott" title="Boycott">Boycott</a></li> <li><a href="/wiki/Call-out_culture" class="mw-redirect" title="Call-out culture">Call-out culture</a></li> <li><a href="/wiki/Cancel_culture" title="Cancel culture">Cancel culture</a></li> <li><a href="/wiki/Civil_disobedience" title="Civil disobedience">Civil disobedience</a></li> <li><a href="/wiki/Culture_jamming" title="Culture jamming">Culture jamming</a></li> <li><a href="/wiki/Political_demonstration" title="Political demonstration">Demonstrations</a></li> <li><a href="/wiki/Deplatforming" title="Deplatforming">Deplatforming</a></li> <li><a href="/wiki/Grassroots" title="Grassroots">Grassroots</a></li> <li><a href="/wiki/Guerrilla_communication" title="Guerrilla communication">Guerrilla communication</a></li> <li><a href="/wiki/Hacktivism" title="Hacktivism">Hacktivism</a></li> <li><a href="/wiki/Internet_activism" title="Internet activism">Internet</a></li> <li><a href="/wiki/Media_activism" title="Media activism">Media</a></li> <li><a href="/wiki/Occupation_(protest)" title="Occupation (protest)">Occupations</a></li> <li><a href="/wiki/Petition" title="Petition">Petitions</a></li> <li><a href="/wiki/Protest" title="Protest">Protests</a></li> <li><a href="/wiki/Youth_activism" title="Youth activism">Youth</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Advertising" title="Advertising">Advertising</a></th><td class="navbox-list-with-group navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Billboard" title="Billboard">Billboards</a></li> <li><a href="/wiki/False_advertising" title="False advertising">False</a></li> <li><a href="/wiki/Infomercial" title="Infomercial">Infomercials</a></li> <li><a href="/wiki/Mobile_marketing" title="Mobile marketing">Mobiles</a></li> <li><a href="/wiki/Promotional_model" title="Promotional model">Modeling</a></li> <li><a href="/wiki/Radio_advertisement" title="Radio advertisement">Radio</a></li> <li><a href="/wiki/Sex_in_advertising" title="Sex in advertising">Sex</a></li> <li><a href="/wiki/Advertising_slogan" title="Advertising slogan">Slogans</a></li> <li><a href="/wiki/Testimonial" title="Testimonial">Testimonials</a></li> <li><a href="/wiki/Television_advertisement" title="Television advertisement">TV</a></li> <li><a href="/wiki/Criticism_of_advertising" title="Criticism of advertising">Criticism of advertising</a></li> <li><a href="/wiki/Annoyance_factor" title="Annoyance factor">Annoyance factor</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><div class="hlist"><ul><li><a href="/wiki/Censorship" title="Censorship">Censorship</a></li><li><a href="/wiki/Media_regulation" class="mw-redirect" title="Media regulation">Media regulation</a></li></ul></div></th><td class="navbox-list-with-group navbox-list navbox-even hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Book_censorship" title="Book censorship">Books</a></li> <li><a href="/wiki/Broadcast_law" title="Broadcast law">Broadcast law</a></li> <li><a href="/wiki/Burning_of_books_and_burying_of_scholars" title="Burning of books and burying of scholars">Burying of scholars</a></li> <li><a href="/wiki/Catch_and_kill" title="Catch and kill">Catch and kill</a></li> <li><a href="/wiki/Corporate_censorship" title="Corporate censorship">Corporate</a></li> <li><a href="/wiki/Cover-up" title="Cover-up">Cover-ups</a></li> <li><a href="/wiki/Euphemism" title="Euphemism">Euphemism</a></li> <li><a href="/wiki/Film_censorship" title="Film censorship">Films</a></li> <li><a href="/wiki/Historical_negationism" title="Historical negationism">Historical negationism</a></li> <li><a href="/wiki/Internet_censorship" title="Internet censorship">Internet</a></li> <li><a href="/wiki/Political_censorship" title="Political censorship">Political</a></li> <li><a href="/wiki/Religious_censorship" title="Religious censorship">Religious</a></li> <li><a href="/wiki/Self-censorship" title="Self-censorship">Self</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Hoax" title="Hoax">Hoaxing</a></th><td class="navbox-list-with-group navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Alternative_facts" title="Alternative facts">Alternative facts</a></li> <li><a href="/wiki/April_Fools%27_Day" title="April Fools&#39; Day">April Fools'</a></li> <li><a class="mw-selflink selflink">Deepfake</a></li> <li><a href="/wiki/Fake_news" title="Fake news">Fake news</a> <ul><li><a href="/wiki/Fake_news_website" title="Fake news website">websites</a></li></ul></li> <li><a href="/wiki/Fakelore" class="mw-redirect" title="Fakelore">Fakelore</a></li> <li><a href="/wiki/False_document" title="False document">False document</a></li> <li><a href="/wiki/Fictitious_entry" title="Fictitious entry">Fictitious entries</a></li> <li><a href="/wiki/Firehose_of_falsehood" title="Firehose of falsehood">Firehose of falsehood</a></li> <li><a href="/wiki/Forgery" title="Forgery">Forgery</a></li> <li><a href="/wiki/Gaslighting" title="Gaslighting">Gaslighting</a></li> <li><a href="/wiki/List_of_hoaxes" title="List of hoaxes">List</a></li> <li><a href="/wiki/Literary_forgery" title="Literary forgery">Literary</a></li> <li><a href="/wiki/Lying_press" title="Lying press">Lying press</a></li> <li><a href="/wiki/Photograph_manipulation" title="Photograph manipulation">Photograph manipulation</a></li> <li><a href="/wiki/Racial_hoax" title="Racial hoax">Racial</a></li> <li><a href="/wiki/Urban_legends_and_myths" class="mw-redirect" title="Urban legends and myths">Urban legend</a></li> <li><a href="/wiki/Virus_hoax" title="Virus hoax">Virus</a></li> <li><a href="/wiki/Video_manipulation" title="Video manipulation">Video manipulation</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Marketing" title="Marketing">Marketing</a></th><td class="navbox-list-with-group navbox-list navbox-even hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Brand" title="Brand">Branding</a></li> <li><a href="/wiki/Loyalty_marketing" title="Loyalty marketing">Loyalty</a></li> <li><a href="/wiki/Product_marketing" title="Product marketing">Product</a></li> <li><a href="/wiki/Product_placement" title="Product placement">Product placement</a></li> <li><a href="/wiki/Publicity" title="Publicity">Publicity</a></li> <li><a href="/wiki/Market_research" title="Market research">Research</a></li> <li><a href="/wiki/Word-of-mouth_marketing" title="Word-of-mouth marketing">Word of mouth</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/News_media" title="News media">News media</a></th><td class="navbox-list-with-group navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Agenda-setting_theory" title="Agenda-setting theory">Agenda-setting</a></li> <li><a href="/wiki/News_broadcasting" title="News broadcasting">Broadcasting</a></li> <li><a href="/wiki/Media_circus" title="Media circus">Circus</a></li> <li><a href="/wiki/24-hour_news_cycle" title="24-hour news cycle">Cycle</a></li> <li><a href="/wiki/Emotive_conjugation" title="Emotive conjugation">Emotive conjugation</a></li> <li><a href="/wiki/False_balance" title="False balance">False balance</a></li> <li><a href="/wiki/Infotainment" title="Infotainment">Infotainment</a></li> <li><a href="/wiki/Managing_the_news" title="Managing the news">Managing</a></li> <li><a href="/wiki/Narcotizing_dysfunction" title="Narcotizing dysfunction">Narcotizing dysfunction</a></li> <li><a href="/wiki/Newspeak" title="Newspeak">Newspeak</a></li> <li><a href="/wiki/Media_event" title="Media event">Pseudo-event</a></li> <li><a href="/wiki/Media_scrum" title="Media scrum">Scrum</a></li> <li><a href="/wiki/Sensationalism" title="Sensationalism">Sensationalism</a></li> <li><a href="/wiki/Tabloid_journalism" title="Tabloid journalism">Tabloid journalism</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Political_campaign" title="Political campaign">Political campaigning</a></th><td class="navbox-list-with-group navbox-list navbox-even hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Campaign_advertising" title="Campaign advertising">Advertising</a></li> <li><a href="/wiki/Astroturfing" title="Astroturfing">Astroturfing</a></li> <li><a href="/wiki/Attack_ad" title="Attack ad">Attack ad</a></li> <li><a href="/wiki/Canvassing" title="Canvassing">Canvassing</a></li> <li><a href="/wiki/Character_assassination" title="Character assassination">Character assassination</a></li> <li><a href="/wiki/Dog_whistle_(politics)" title="Dog whistle (politics)">Dog whistle</a></li> <li><a href="/wiki/Election_promise" title="Election promise">Election promises</a></li> <li><a href="/wiki/Lawn_sign" title="Lawn sign">Lawn signs</a></li> <li><a href="/wiki/Party_platform" title="Party platform">Party platforms (or manifestos)</a></li> <li><a href="/wiki/Name_recognition" title="Name recognition">Name recognition</a></li> <li><a href="/wiki/Negative_campaigning" title="Negative campaigning">Negative</a></li> <li><a href="/wiki/Push_poll" title="Push poll">Push polling</a></li> <li><a href="/wiki/Smear_campaign" title="Smear campaign">Smear campaign</a></li> <li><a href="/wiki/Wedge_issue" title="Wedge issue">Wedge issue</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Propaganda" title="Propaganda">Propaganda</a></th><td class="navbox-list-with-group navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Bandwagon_effect" title="Bandwagon effect">Bandwagon</a></li> <li><a href="/wiki/Big_lie" title="Big lie">Big lie</a></li> <li><a href="/wiki/Crowd_manipulation" title="Crowd manipulation">Crowd manipulation</a></li> <li><a href="/wiki/Disinformation" title="Disinformation">Disinformation</a></li> <li><a href="/wiki/Fearmongering" title="Fearmongering">Fearmongering</a></li> <li><a href="/wiki/Framing_(social_sciences)" title="Framing (social sciences)">Framing</a></li> <li><a href="/wiki/Indoctrination" title="Indoctrination">Indoctrination</a></li> <li><a href="/wiki/Loaded_language" title="Loaded language">Loaded language</a></li> <li><a href="/wiki/National_myth" title="National myth">National mythology</a></li> <li><a href="/wiki/Rally_%27round_the_flag_effect" title="Rally &#39;round the flag effect">Rally 'round the flag effect</a></li> <li><a href="/wiki/Propaganda_techniques" title="Propaganda techniques">Techniques</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Psychological_warfare" title="Psychological warfare">Psychological warfare</a></th><td class="navbox-list-with-group navbox-list navbox-even hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Airborne_leaflet_propaganda" title="Airborne leaflet propaganda">Airborne leaflets</a></li> <li><a href="/wiki/False_flag" title="False flag">False flag</a></li> <li><a href="/wiki/Fifth_column" title="Fifth column">Fifth column</a></li> <li><a href="/wiki/Information_warfare" title="Information warfare">Information (IT)</a></li> <li><a href="/wiki/Lawfare" title="Lawfare">Lawfare</a></li> <li><a href="/wiki/Political_warfare" title="Political warfare">Political</a></li> <li><a href="/wiki/Public_diplomacy" title="Public diplomacy">Public diplomacy</a></li> <li><a href="/wiki/Sedition" title="Sedition">Sedition</a></li> <li><a href="/wiki/Subversion" title="Subversion">Subversion</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Public_relations" title="Public relations">Public relations</a></th><td class="navbox-list-with-group navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Cult_of_personality" title="Cult of personality">Cult of personality</a></li> <li><a href="/wiki/Doublespeak" title="Doublespeak">Doublespeak</a></li> <li><a href="/wiki/Non-apology_apology" title="Non-apology apology">Non-apology apology</a></li> <li><a href="/wiki/Reputation_management" title="Reputation management">Reputation management</a></li> <li><a href="/wiki/Slogan" title="Slogan">Slogans</a></li> <li><a href="/wiki/Sound_bite" title="Sound bite">Sound bites</a></li> <li><a href="/wiki/Spin_(propaganda)" title="Spin (propaganda)">Spin</a></li> <li><a href="/wiki/Transfer_(propaganda)" title="Transfer (propaganda)">Transfer</a></li> <li><a href="/wiki/Understatement" title="Understatement">Understatement</a></li> <li><a href="/wiki/Weasel_word" title="Weasel word">Weasel words</a></li> <li><a href="/wiki/Corporate_propaganda" title="Corporate propaganda">Corporate propaganda</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Sales" title="Sales">Sales</a></th><td class="navbox-list-with-group navbox-list navbox-even hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Cold_calling" title="Cold calling">Cold calling</a></li> <li><a href="/wiki/Door-to-door" title="Door-to-door">Door-to-door</a></li> <li><a href="/wiki/Pricing" title="Pricing">Pricing</a></li> <li><a href="/wiki/Product_demonstration" title="Product demonstration">Product demonstrations</a></li> <li><a href="/wiki/Sales_promotion" title="Sales promotion">Promotion</a> <ul><li><a href="/wiki/Spaving" title="Spaving">Spaving</a></li></ul></li> <li><a href="/wiki/Promotional_merchandise" title="Promotional merchandise">Promotional merchandise</a></li> <li><a href="/wiki/Telemarketing" title="Telemarketing">Telemarketing</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Related</th><td class="navbox-list-with-group navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Influence-for-hire" title="Influence-for-hire">Influence-for-hire</a></li> <li><a href="/wiki/Media_bias" title="Media bias">Media bias</a> <ul><li><a href="/wiki/Media_bias_in_the_United_States" title="Media bias in the United States">United States</a></li></ul></li> <li><a href="/wiki/Concentration_of_media_ownership" title="Concentration of media ownership">Media concentration</a></li> <li><a href="/wiki/Media_democracy" title="Media democracy">Media democracy</a></li> <li><a href="/wiki/Media_ecology" title="Media ecology">Media ecology</a></li> <li><a href="/wiki/Media_ethics" title="Media ethics">Media ethics</a></li> <li><a href="/wiki/Media_franchise" title="Media franchise">Media franchise</a></li> <li><a href="/wiki/Influence_of_mass_media" title="Influence of mass media">Media influence</a></li> <li><a href="/wiki/Media_proprietor" title="Media proprietor">Media proprietor</a></li></ul> </div></td></tr></tbody></table></div> <p class="mw-empty-elt"> </p> <!-- NewPP limit report Parsed by mw‐web.codfw.main‐5c59558b9d‐b4csx Cached time: 20241130110210 Cache expiry: 2592000 Reduced expiry: false Complications: [vary‐revision‐sha1, show‐toc] CPU time usage: 2.594 seconds Real time usage: 2.869 seconds Preprocessor visited node count: 15647/1000000 Post‐expand include size: 632166/2097152 bytes Template argument size: 4287/2097152 bytes Highest expansion depth: 12/100 Expensive parser function count: 10/500 Unstrip recursion depth: 1/20 Unstrip post‐expand size: 1026497/5000000 bytes Lua time usage: 1.697/10.000 seconds Lua memory usage: 21540518/52428800 bytes Lua Profile: MediaWiki\Extension\Scribunto\Engines\LuaSandbox\LuaSandboxCallback::callParserFunction 280 ms 15.6% MediaWiki\Extension\Scribunto\Engines\LuaSandbox\LuaSandboxCallback::match 260 ms 14.4% MediaWiki\Extension\Scribunto\Engines\LuaSandbox\LuaSandboxCallback::gsub 220 ms 12.2% ? 160 ms 8.9% dataWrapper <mw.lua:672> 160 ms 8.9% recursiveClone <mwInit.lua:45> 80 ms 4.4% MediaWiki\Extension\Scribunto\Engines\LuaSandbox\LuaSandboxCallback::find 80 ms 4.4% (for generator) 80 ms 4.4% MediaWiki\Extension\Scribunto\Engines\LuaSandbox\LuaSandboxCallback::getEntityStatements 60 ms 3.3% chunk <Module:Citation/CS1> 60 ms 3.3% [others] 360 ms 20.0% Number of Wikibase entities loaded: 4/400 --> <!-- Transclusion expansion time report (%,ms,calls,template) 100.00% 2495.476 1 -total 68.69% 1714.137 1 Template:Reflist 26.14% 652.235 140 Template:Cite_web 17.05% 425.590 66 Template:Cite_news 6.94% 173.168 29 Template:Cite_journal 4.99% 124.510 1 Template:Cite_Q 4.78% 119.336 1 Template:Nihongo 4.01% 100.160 1 Template:Artificial_intelligence 3.87% 96.684 1 Template:Sidebar_with_collapsible_lists 3.53% 88.074 1 Template:Short_description --> <!-- Saved in parser cache with key enwiki:pcache:idhash:56641018-0!canonical and timestamp 20241130110210 and revision id 1258994312. Rendering was triggered because: page-view --> </div><!--esi <esi:include src="/esitest-fa8a495983347898/content" /> --><noscript><img src="https://login.wikimedia.org/wiki/Special:CentralAutoLogin/start?type=1x1&amp;useformat=desktop" alt="" width="1" height="1" style="border: none; position: absolute;"></noscript> <div class="printfooter" data-nosnippet="">Retrieved from "<a dir="ltr" href="https://en.wikipedia.org/w/index.php?title=Deepfake&amp;oldid=1258994312">https://en.wikipedia.org/w/index.php?title=Deepfake&amp;oldid=1258994312</a>"</div></div> <div id="catlinks" class="catlinks" data-mw="interface"><div id="mw-normal-catlinks" class="mw-normal-catlinks"><a href="/wiki/Help:Category" title="Help:Category">Categories</a>: <ul><li><a href="/wiki/Category:Deepfakes" title="Category:Deepfakes">Deepfakes</a></li><li><a href="/wiki/Category:2018_neologisms" title="Category:2018 neologisms">2018 neologisms</a></li><li><a href="/wiki/Category:Generative_artificial_intelligence" title="Category:Generative artificial intelligence">Generative artificial intelligence</a></li><li><a href="/wiki/Category:AI_safety" title="Category:AI safety">AI safety</a></li><li><a href="/wiki/Category:Internet_memes_introduced_in_2020" title="Category:Internet memes introduced in 2020">Internet memes introduced in 2020</a></li><li><a href="/wiki/Category:Internet_memes" title="Category:Internet memes">Internet memes</a></li><li><a href="/wiki/Category:Media_studies" title="Category:Media studies">Media studies</a></li><li><a href="/wiki/Category:Text-to-image_generation" title="Category:Text-to-image generation">Text-to-image generation</a></li><li><a href="/wiki/Category:Text-to-video_generation" title="Category:Text-to-video generation">Text-to-video generation</a></li></ul></div><div id="mw-hidden-catlinks" class="mw-hidden-catlinks mw-hidden-cats-hidden">Hidden categories: <ul><li><a href="/wiki/Category:Webarchive_template_wayback_links" title="Category:Webarchive template wayback links">Webarchive template wayback links</a></li><li><a href="/wiki/Category:CS1_French-language_sources_(fr)" title="Category:CS1 French-language sources (fr)">CS1 French-language sources (fr)</a></li><li><a href="/wiki/Category:CS1_German-language_sources_(de)" title="Category:CS1 German-language sources (de)">CS1 German-language sources (de)</a></li><li><a href="/wiki/Category:CS1_Austrian_German-language_sources_(de-at)" title="Category:CS1 Austrian German-language sources (de-at)">CS1 Austrian German-language sources (de-at)</a></li><li><a href="/wiki/Category:CS1_maint:_multiple_names:_authors_list" title="Category:CS1 maint: multiple names: authors list">CS1 maint: multiple names: authors list</a></li><li><a href="/wiki/Category:Articles_with_short_description" title="Category:Articles with short description">Articles with short description</a></li><li><a href="/wiki/Category:Short_description_is_different_from_Wikidata" title="Category:Short description is different from Wikidata">Short description is different from Wikidata</a></li><li><a href="/wiki/Category:Articles_that_may_be_too_long_from_November_2024" title="Category:Articles that may be too long from November 2024">Articles that may be too long from November 2024</a></li><li><a href="/wiki/Category:All_articles_with_unsourced_statements" title="Category:All articles with unsourced statements">All articles with unsourced statements</a></li><li><a href="/wiki/Category:Articles_with_unsourced_statements_from_June_2024" title="Category:Articles with unsourced statements from June 2024">Articles with unsourced statements from June 2024</a></li><li><a href="/wiki/Category:Articles_containing_Japanese-language_text" title="Category:Articles containing Japanese-language text">Articles containing Japanese-language text</a></li><li><a href="/wiki/Category:Articles_with_unsourced_statements_from_September_2023" title="Category:Articles with unsourced statements from September 2023">Articles with unsourced statements from September 2023</a></li><li><a href="/wiki/Category:Articles_with_limited_geographic_scope_from_November_2021" title="Category:Articles with limited geographic scope from November 2021">Articles with limited geographic scope from November 2021</a></li><li><a href="/wiki/Category:Articles_with_trivia_sections_from_November_2024" title="Category:Articles with trivia sections from November 2024">Articles with trivia sections from November 2024</a></li><li><a href="/wiki/Category:Commons_category_link_from_Wikidata" title="Category:Commons category link from Wikidata">Commons category link from Wikidata</a></li><li><a href="/wiki/Category:Use_British_English_from_April_2023" title="Category:Use British English from April 2023">Use British English from April 2023</a></li><li><a href="/wiki/Category:Use_dmy_dates_from_July_2020" title="Category:Use dmy dates from July 2020">Use dmy dates from July 2020</a></li><li><a href="/wiki/Category:Articles_containing_video_clips" title="Category:Articles containing video clips">Articles containing video clips</a></li></ul></div></div> </div> </main> </div> <div class="mw-footer-container"> <footer id="footer" class="mw-footer" > <ul id="footer-info"> <li id="footer-info-lastmod"> This page was last edited on 22 November 2024, at 20:15<span class="anonymous-show">&#160;(UTC)</span>.</li> <li id="footer-info-copyright">Text is available under the <a href="/wiki/Wikipedia:Text_of_the_Creative_Commons_Attribution-ShareAlike_4.0_International_License" title="Wikipedia:Text of the Creative Commons Attribution-ShareAlike 4.0 International License">Creative Commons Attribution-ShareAlike 4.0 License</a>; additional terms may apply. By using this site, you agree to the <a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Terms_of_Use" class="extiw" title="foundation:Special:MyLanguage/Policy:Terms of Use">Terms of Use</a> and <a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Privacy_policy" class="extiw" title="foundation:Special:MyLanguage/Policy:Privacy policy">Privacy Policy</a>. Wikipedia® is a registered trademark of the <a rel="nofollow" class="external text" href="https://wikimediafoundation.org/">Wikimedia Foundation, Inc.</a>, a non-profit organization.</li> </ul> <ul id="footer-places"> <li id="footer-places-privacy"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Privacy_policy">Privacy policy</a></li> <li id="footer-places-about"><a href="/wiki/Wikipedia:About">About Wikipedia</a></li> <li id="footer-places-disclaimers"><a href="/wiki/Wikipedia:General_disclaimer">Disclaimers</a></li> <li id="footer-places-contact"><a href="//en.wikipedia.org/wiki/Wikipedia:Contact_us">Contact Wikipedia</a></li> <li id="footer-places-wm-codeofconduct"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Universal_Code_of_Conduct">Code of Conduct</a></li> <li id="footer-places-developers"><a href="https://developer.wikimedia.org">Developers</a></li> <li id="footer-places-statslink"><a href="https://stats.wikimedia.org/#/en.wikipedia.org">Statistics</a></li> <li id="footer-places-cookiestatement"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Cookie_statement">Cookie statement</a></li> <li id="footer-places-mobileview"><a href="//en.m.wikipedia.org/w/index.php?title=Deepfake&amp;mobileaction=toggle_view_mobile" class="noprint stopMobileRedirectToggle">Mobile view</a></li> </ul> <ul id="footer-icons" class="noprint"> <li id="footer-copyrightico"><a href="https://wikimediafoundation.org/" class="cdx-button cdx-button--fake-button cdx-button--size-large cdx-button--fake-button--enabled"><img src="/static/images/footer/wikimedia-button.svg" width="84" height="29" alt="Wikimedia Foundation" loading="lazy"></a></li> <li id="footer-poweredbyico"><a href="https://www.mediawiki.org/" class="cdx-button cdx-button--fake-button cdx-button--size-large cdx-button--fake-button--enabled"><img src="/w/resources/assets/poweredby_mediawiki.svg" alt="Powered by MediaWiki" width="88" height="31" loading="lazy"></a></li> </ul> </footer> </div> </div> </div> <div class="vector-settings" id="p-dock-bottom"> <ul></ul> </div><script>(RLQ=window.RLQ||[]).push(function(){mw.config.set({"wgHostname":"mw-web.codfw.main-5c59558b9d-lgdvq","wgBackendResponseTime":159,"wgPageParseReport":{"limitreport":{"cputime":"2.594","walltime":"2.869","ppvisitednodes":{"value":15647,"limit":1000000},"postexpandincludesize":{"value":632166,"limit":2097152},"templateargumentsize":{"value":4287,"limit":2097152},"expansiondepth":{"value":12,"limit":100},"expensivefunctioncount":{"value":10,"limit":500},"unstrip-depth":{"value":1,"limit":20},"unstrip-size":{"value":1026497,"limit":5000000},"entityaccesscount":{"value":4,"limit":400},"timingprofile":["100.00% 2495.476 1 -total"," 68.69% 1714.137 1 Template:Reflist"," 26.14% 652.235 140 Template:Cite_web"," 17.05% 425.590 66 Template:Cite_news"," 6.94% 173.168 29 Template:Cite_journal"," 4.99% 124.510 1 Template:Cite_Q"," 4.78% 119.336 1 Template:Nihongo"," 4.01% 100.160 1 Template:Artificial_intelligence"," 3.87% 96.684 1 Template:Sidebar_with_collapsible_lists"," 3.53% 88.074 1 Template:Short_description"]},"scribunto":{"limitreport-timeusage":{"value":"1.697","limit":"10.000"},"limitreport-memusage":{"value":21540518,"limit":52428800},"limitreport-profile":[["MediaWiki\\Extension\\Scribunto\\Engines\\LuaSandbox\\LuaSandboxCallback::callParserFunction","280","15.6"],["MediaWiki\\Extension\\Scribunto\\Engines\\LuaSandbox\\LuaSandboxCallback::match","260","14.4"],["MediaWiki\\Extension\\Scribunto\\Engines\\LuaSandbox\\LuaSandboxCallback::gsub","220","12.2"],["?","160","8.9"],["dataWrapper \u003Cmw.lua:672\u003E","160","8.9"],["recursiveClone \u003CmwInit.lua:45\u003E","80","4.4"],["MediaWiki\\Extension\\Scribunto\\Engines\\LuaSandbox\\LuaSandboxCallback::find","80","4.4"],["(for generator)","80","4.4"],["MediaWiki\\Extension\\Scribunto\\Engines\\LuaSandbox\\LuaSandboxCallback::getEntityStatements","60","3.3"],["chunk \u003CModule:Citation/CS1\u003E","60","3.3"],["[others]","360","20.0"]]},"cachereport":{"origin":"mw-web.codfw.main-5c59558b9d-b4csx","timestamp":"20241130110210","ttl":2592000,"transientcontent":false}}});});</script> <script type="application/ld+json">{"@context":"https:\/\/schema.org","@type":"Article","name":"Deepfake","url":"https:\/\/en.wikipedia.org\/wiki\/Deepfake","sameAs":"http:\/\/www.wikidata.org\/entity\/Q49473179","mainEntity":"http:\/\/www.wikidata.org\/entity\/Q49473179","author":{"@type":"Organization","name":"Contributors to Wikimedia projects"},"publisher":{"@type":"Organization","name":"Wikimedia Foundation, Inc.","logo":{"@type":"ImageObject","url":"https:\/\/www.wikimedia.org\/static\/images\/wmf-hor-googpub.png"}},"datePublished":"2018-02-20T17:16:56Z","dateModified":"2024-11-22T20:15:01Z","image":"https:\/\/upload.wikimedia.org\/wikipedia\/commons\/9\/9c\/Dictators_-_Kim_Jong-Un_by_RepresentUs.webm","headline":"artificial intelligence-based human image synthesis technique"}</script> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10