CINXE.COM

AI takeover - Wikipedia

<!DOCTYPE html> <html class="client-nojs vector-feature-language-in-header-enabled vector-feature-language-in-main-page-header-disabled vector-feature-sticky-header-disabled vector-feature-page-tools-pinned-disabled vector-feature-toc-pinned-clientpref-1 vector-feature-main-menu-pinned-disabled vector-feature-limited-width-clientpref-1 vector-feature-limited-width-content-enabled vector-feature-custom-font-size-clientpref-1 vector-feature-appearance-pinned-clientpref-1 vector-feature-night-mode-enabled skin-theme-clientpref-day vector-toc-available" lang="en" dir="ltr"> <head> <meta charset="UTF-8"> <title>AI takeover - Wikipedia</title> <script>(function(){var className="client-js vector-feature-language-in-header-enabled vector-feature-language-in-main-page-header-disabled vector-feature-sticky-header-disabled vector-feature-page-tools-pinned-disabled vector-feature-toc-pinned-clientpref-1 vector-feature-main-menu-pinned-disabled vector-feature-limited-width-clientpref-1 vector-feature-limited-width-content-enabled vector-feature-custom-font-size-clientpref-1 vector-feature-appearance-pinned-clientpref-1 vector-feature-night-mode-enabled skin-theme-clientpref-day vector-toc-available";var cookie=document.cookie.match(/(?:^|; )enwikimwclientpreferences=([^;]+)/);if(cookie){cookie[1].split('%2C').forEach(function(pref){className=className.replace(new RegExp('(^| )'+pref.replace(/-clientpref-\w+$|[^\w-]+/g,'')+'-clientpref-\\w+( |$)'),'$1'+pref+'$2');});}document.documentElement.className=className;}());RLCONF={"wgBreakFrames":false,"wgSeparatorTransformTable":["",""],"wgDigitTransformTable":["",""],"wgDefaultDateFormat":"dmy", "wgMonthNames":["","January","February","March","April","May","June","July","August","September","October","November","December"],"wgRequestId":"e7267447-2de8-427d-81dc-87eeccee1f81","wgCanonicalNamespace":"","wgCanonicalSpecialPageName":false,"wgNamespaceNumber":0,"wgPageName":"AI_takeover","wgTitle":"AI takeover","wgCurRevisionId":1259605660,"wgRevisionId":1259605660,"wgArticleId":813176,"wgIsArticle":true,"wgIsRedirect":false,"wgAction":"view","wgUserName":null,"wgUserGroups":["*"],"wgCategories":["Webarchive template wayback links","Articles with short description","Short description is different from Wikidata","Articles with excerpts","Doomsday scenarios","Future problems","Science fiction themes","Existential risk from artificial general intelligence","Technophobia"],"wgPageViewLanguage":"en","wgPageContentLanguage":"en","wgPageContentModel":"wikitext","wgRelevantPageName":"AI_takeover","wgRelevantArticleId":813176,"wgIsProbablyEditable":true,"wgRelevantPageIsProbablyEditable": true,"wgRestrictionEdit":[],"wgRestrictionMove":[],"wgNoticeProject":"wikipedia","wgCiteReferencePreviewsActive":false,"wgFlaggedRevsParams":{"tags":{"status":{"levels":1}}},"wgMediaViewerOnClick":true,"wgMediaViewerEnabledByDefault":true,"wgPopupsFlags":0,"wgVisualEditor":{"pageLanguageCode":"en","pageLanguageDir":"ltr","pageVariantFallbacks":"en"},"wgMFDisplayWikibaseDescriptions":{"search":true,"watchlist":true,"tagline":false,"nearby":true},"wgWMESchemaEditAttemptStepOversample":false,"wgWMEPageLength":40000,"wgRelatedArticlesCompat":[],"wgCentralAuthMobileDomain":false,"wgEditSubmitButtonLabelPublish":true,"wgULSPosition":"interlanguage","wgULSisCompactLinksEnabled":false,"wgVector2022LanguageInHeader":true,"wgULSisLanguageSelectorEmpty":false,"wgWikibaseItemId":"Q2254427","wgCheckUserClientHintsHeadersJsApi":["brands","architecture","bitness","fullVersionList","mobile","model","platform","platformVersion"],"GEHomepageSuggestedEditsEnableTopics":true,"wgGETopicsMatchModeEnabled": false,"wgGEStructuredTaskRejectionReasonTextInputEnabled":false,"wgGELevelingUpEnabledForUser":false};RLSTATE={"ext.globalCssJs.user.styles":"ready","site.styles":"ready","user.styles":"ready","ext.globalCssJs.user":"ready","user":"ready","user.options":"loading","ext.cite.styles":"ready","skins.vector.search.codex.styles":"ready","skins.vector.styles":"ready","skins.vector.icons":"ready","jquery.makeCollapsible.styles":"ready","ext.wikimediamessages.styles":"ready","ext.visualEditor.desktopArticleTarget.noscript":"ready","ext.uls.interlanguage":"ready","wikibase.client.init":"ready","ext.wikimediaBadges":"ready"};RLPAGEMODULES=["ext.cite.ux-enhancements","mediawiki.page.media","ext.scribunto.logs","site","mediawiki.page.ready","jquery.makeCollapsible","mediawiki.toc","skins.vector.js","ext.centralNotice.geoIP","ext.centralNotice.startUp","ext.gadget.ReferenceTooltips","ext.gadget.switcher","ext.urlShortener.toolbar","ext.centralauth.centralautologin","mmv.bootstrap","ext.popups", "ext.visualEditor.desktopArticleTarget.init","ext.visualEditor.targetLoader","ext.echo.centralauth","ext.eventLogging","ext.wikimediaEvents","ext.navigationTiming","ext.uls.interface","ext.cx.eventlogging.campaigns","ext.cx.uls.quick.actions","wikibase.client.vector-2022","ext.checkUser.clientHints","ext.quicksurveys.init","ext.growthExperiments.SuggestedEditSession","wikibase.sidebar.tracking"];</script> <script>(RLQ=window.RLQ||[]).push(function(){mw.loader.impl(function(){return["user.options@12s5i",function($,jQuery,require,module){mw.user.tokens.set({"patrolToken":"+\\","watchToken":"+\\","csrfToken":"+\\"}); }];});});</script> <link rel="stylesheet" href="/w/load.php?lang=en&amp;modules=ext.cite.styles%7Cext.uls.interlanguage%7Cext.visualEditor.desktopArticleTarget.noscript%7Cext.wikimediaBadges%7Cext.wikimediamessages.styles%7Cjquery.makeCollapsible.styles%7Cskins.vector.icons%2Cstyles%7Cskins.vector.search.codex.styles%7Cwikibase.client.init&amp;only=styles&amp;skin=vector-2022"> <script async="" src="/w/load.php?lang=en&amp;modules=startup&amp;only=scripts&amp;raw=1&amp;skin=vector-2022"></script> <meta name="ResourceLoaderDynamicStyles" content=""> <link rel="stylesheet" href="/w/load.php?lang=en&amp;modules=site.styles&amp;only=styles&amp;skin=vector-2022"> <meta name="generator" content="MediaWiki 1.44.0-wmf.4"> <meta name="referrer" content="origin"> <meta name="referrer" content="origin-when-cross-origin"> <meta name="robots" content="max-image-preview:standard"> <meta name="format-detection" content="telephone=no"> <meta property="og:image" content="https://upload.wikimedia.org/wikipedia/commons/d/d9/Capek_RUR.jpg"> <meta property="og:image:width" content="1200"> <meta property="og:image:height" content="937"> <meta property="og:image" content="https://upload.wikimedia.org/wikipedia/commons/d/d9/Capek_RUR.jpg"> <meta property="og:image:width" content="800"> <meta property="og:image:height" content="625"> <meta property="og:image:width" content="640"> <meta property="og:image:height" content="500"> <meta name="viewport" content="width=1120"> <meta property="og:title" content="AI takeover - Wikipedia"> <meta property="og:type" content="website"> <link rel="preconnect" href="//upload.wikimedia.org"> <link rel="alternate" media="only screen and (max-width: 640px)" href="//en.m.wikipedia.org/wiki/AI_takeover"> <link rel="alternate" type="application/x-wiki" title="Edit this page" href="/w/index.php?title=AI_takeover&amp;action=edit"> <link rel="apple-touch-icon" href="/static/apple-touch/wikipedia.png"> <link rel="icon" href="/static/favicon/wikipedia.ico"> <link rel="search" type="application/opensearchdescription+xml" href="/w/rest.php/v1/search" title="Wikipedia (en)"> <link rel="EditURI" type="application/rsd+xml" href="//en.wikipedia.org/w/api.php?action=rsd"> <link rel="canonical" href="https://en.wikipedia.org/wiki/AI_takeover"> <link rel="license" href="https://creativecommons.org/licenses/by-sa/4.0/deed.en"> <link rel="alternate" type="application/atom+xml" title="Wikipedia Atom feed" href="/w/index.php?title=Special:RecentChanges&amp;feed=atom"> <link rel="dns-prefetch" href="//meta.wikimedia.org" /> <link rel="dns-prefetch" href="//login.wikimedia.org"> </head> <body class="skin--responsive skin-vector skin-vector-search-vue mediawiki ltr sitedir-ltr mw-hide-empty-elt ns-0 ns-subject mw-editable page-AI_takeover rootpage-AI_takeover skin-vector-2022 action-view"><a class="mw-jump-link" href="#bodyContent">Jump to content</a> <div class="vector-header-container"> <header class="vector-header mw-header"> <div class="vector-header-start"> <nav class="vector-main-menu-landmark" aria-label="Site"> <div id="vector-main-menu-dropdown" class="vector-dropdown vector-main-menu-dropdown vector-button-flush-left vector-button-flush-right" > <input type="checkbox" id="vector-main-menu-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-main-menu-dropdown" class="vector-dropdown-checkbox " aria-label="Main menu" > <label id="vector-main-menu-dropdown-label" for="vector-main-menu-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-menu mw-ui-icon-wikimedia-menu"></span> <span class="vector-dropdown-label-text">Main menu</span> </label> <div class="vector-dropdown-content"> <div id="vector-main-menu-unpinned-container" class="vector-unpinned-container"> <div id="vector-main-menu" class="vector-main-menu vector-pinnable-element"> <div class="vector-pinnable-header vector-main-menu-pinnable-header vector-pinnable-header-unpinned" data-feature-name="main-menu-pinned" data-pinnable-element-id="vector-main-menu" data-pinned-container-id="vector-main-menu-pinned-container" data-unpinned-container-id="vector-main-menu-unpinned-container" > <div class="vector-pinnable-header-label">Main menu</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-main-menu.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-main-menu.unpin">hide</button> </div> <div id="p-navigation" class="vector-menu mw-portlet mw-portlet-navigation" > <div class="vector-menu-heading"> Navigation </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="n-mainpage-description" class="mw-list-item"><a href="/wiki/Main_Page" title="Visit the main page [z]" accesskey="z"><span>Main page</span></a></li><li id="n-contents" class="mw-list-item"><a href="/wiki/Wikipedia:Contents" title="Guides to browsing Wikipedia"><span>Contents</span></a></li><li id="n-currentevents" class="mw-list-item"><a href="/wiki/Portal:Current_events" title="Articles related to current events"><span>Current events</span></a></li><li id="n-randompage" class="mw-list-item"><a href="/wiki/Special:Random" title="Visit a randomly selected article [x]" accesskey="x"><span>Random article</span></a></li><li id="n-aboutsite" class="mw-list-item"><a href="/wiki/Wikipedia:About" title="Learn about Wikipedia and how it works"><span>About Wikipedia</span></a></li><li id="n-contactpage" class="mw-list-item"><a href="//en.wikipedia.org/wiki/Wikipedia:Contact_us" title="How to contact Wikipedia"><span>Contact us</span></a></li> </ul> </div> </div> <div id="p-interaction" class="vector-menu mw-portlet mw-portlet-interaction" > <div class="vector-menu-heading"> Contribute </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="n-help" class="mw-list-item"><a href="/wiki/Help:Contents" title="Guidance on how to use and edit Wikipedia"><span>Help</span></a></li><li id="n-introduction" class="mw-list-item"><a href="/wiki/Help:Introduction" title="Learn how to edit Wikipedia"><span>Learn to edit</span></a></li><li id="n-portal" class="mw-list-item"><a href="/wiki/Wikipedia:Community_portal" title="The hub for editors"><span>Community portal</span></a></li><li id="n-recentchanges" class="mw-list-item"><a href="/wiki/Special:RecentChanges" title="A list of recent changes to Wikipedia [r]" accesskey="r"><span>Recent changes</span></a></li><li id="n-upload" class="mw-list-item"><a href="/wiki/Wikipedia:File_upload_wizard" title="Add images or other media for use on Wikipedia"><span>Upload file</span></a></li> </ul> </div> </div> </div> </div> </div> </div> </nav> <a href="/wiki/Main_Page" class="mw-logo"> <img class="mw-logo-icon" src="/static/images/icons/wikipedia.png" alt="" aria-hidden="true" height="50" width="50"> <span class="mw-logo-container skin-invert"> <img class="mw-logo-wordmark" alt="Wikipedia" src="/static/images/mobile/copyright/wikipedia-wordmark-en.svg" style="width: 7.5em; height: 1.125em;"> <img class="mw-logo-tagline" alt="The Free Encyclopedia" src="/static/images/mobile/copyright/wikipedia-tagline-en.svg" width="117" height="13" style="width: 7.3125em; height: 0.8125em;"> </span> </a> </div> <div class="vector-header-end"> <div id="p-search" role="search" class="vector-search-box-vue vector-search-box-collapses vector-search-box-show-thumbnail vector-search-box-auto-expand-width vector-search-box"> <a href="/wiki/Special:Search" class="cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only search-toggle" title="Search Wikipedia [f]" accesskey="f"><span class="vector-icon mw-ui-icon-search mw-ui-icon-wikimedia-search"></span> <span>Search</span> </a> <div class="vector-typeahead-search-container"> <div class="cdx-typeahead-search cdx-typeahead-search--show-thumbnail cdx-typeahead-search--auto-expand-width"> <form action="/w/index.php" id="searchform" class="cdx-search-input cdx-search-input--has-end-button"> <div id="simpleSearch" class="cdx-search-input__input-wrapper" data-search-loc="header-moved"> <div class="cdx-text-input cdx-text-input--has-start-icon"> <input class="cdx-text-input__input" type="search" name="search" placeholder="Search Wikipedia" aria-label="Search Wikipedia" autocapitalize="sentences" title="Search Wikipedia [f]" accesskey="f" id="searchInput" > <span class="cdx-text-input__icon cdx-text-input__start-icon"></span> </div> <input type="hidden" name="title" value="Special:Search"> </div> <button class="cdx-button cdx-search-input__end-button">Search</button> </form> </div> </div> </div> <nav class="vector-user-links vector-user-links-wide" aria-label="Personal tools"> <div class="vector-user-links-main"> <div id="p-vector-user-menu-preferences" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <div id="p-vector-user-menu-userpage" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <nav class="vector-appearance-landmark" aria-label="Appearance"> <div id="vector-appearance-dropdown" class="vector-dropdown " title="Change the appearance of the page&#039;s font size, width, and color" > <input type="checkbox" id="vector-appearance-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-appearance-dropdown" class="vector-dropdown-checkbox " aria-label="Appearance" > <label id="vector-appearance-dropdown-label" for="vector-appearance-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-appearance mw-ui-icon-wikimedia-appearance"></span> <span class="vector-dropdown-label-text">Appearance</span> </label> <div class="vector-dropdown-content"> <div id="vector-appearance-unpinned-container" class="vector-unpinned-container"> </div> </div> </div> </nav> <div id="p-vector-user-menu-notifications" class="vector-menu mw-portlet emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> <div id="p-vector-user-menu-overflow" class="vector-menu mw-portlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-sitesupport-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="https://donate.wikimedia.org/wiki/Special:FundraiserRedirector?utm_source=donate&amp;utm_medium=sidebar&amp;utm_campaign=C13_en.wikipedia.org&amp;uselang=en" class=""><span>Donate</span></a> </li> <li id="pt-createaccount-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="/w/index.php?title=Special:CreateAccount&amp;returnto=AI+takeover" title="You are encouraged to create an account and log in; however, it is not mandatory" class=""><span>Create account</span></a> </li> <li id="pt-login-2" class="user-links-collapsible-item mw-list-item user-links-collapsible-item"><a data-mw="interface" href="/w/index.php?title=Special:UserLogin&amp;returnto=AI+takeover" title="You&#039;re encouraged to log in; however, it&#039;s not mandatory. [o]" accesskey="o" class=""><span>Log in</span></a> </li> </ul> </div> </div> </div> <div id="vector-user-links-dropdown" class="vector-dropdown vector-user-menu vector-button-flush-right vector-user-menu-logged-out" title="Log in and more options" > <input type="checkbox" id="vector-user-links-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-user-links-dropdown" class="vector-dropdown-checkbox " aria-label="Personal tools" > <label id="vector-user-links-dropdown-label" for="vector-user-links-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-ellipsis mw-ui-icon-wikimedia-ellipsis"></span> <span class="vector-dropdown-label-text">Personal tools</span> </label> <div class="vector-dropdown-content"> <div id="p-personal" class="vector-menu mw-portlet mw-portlet-personal user-links-collapsible-item" title="User menu" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-sitesupport" class="user-links-collapsible-item mw-list-item"><a href="https://donate.wikimedia.org/wiki/Special:FundraiserRedirector?utm_source=donate&amp;utm_medium=sidebar&amp;utm_campaign=C13_en.wikipedia.org&amp;uselang=en"><span>Donate</span></a></li><li id="pt-createaccount" class="user-links-collapsible-item mw-list-item"><a href="/w/index.php?title=Special:CreateAccount&amp;returnto=AI+takeover" title="You are encouraged to create an account and log in; however, it is not mandatory"><span class="vector-icon mw-ui-icon-userAdd mw-ui-icon-wikimedia-userAdd"></span> <span>Create account</span></a></li><li id="pt-login" class="user-links-collapsible-item mw-list-item"><a href="/w/index.php?title=Special:UserLogin&amp;returnto=AI+takeover" title="You&#039;re encouraged to log in; however, it&#039;s not mandatory. [o]" accesskey="o"><span class="vector-icon mw-ui-icon-logIn mw-ui-icon-wikimedia-logIn"></span> <span>Log in</span></a></li> </ul> </div> </div> <div id="p-user-menu-anon-editor" class="vector-menu mw-portlet mw-portlet-user-menu-anon-editor" > <div class="vector-menu-heading"> Pages for logged out editors <a href="/wiki/Help:Introduction" aria-label="Learn more about editing"><span>learn more</span></a> </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="pt-anoncontribs" class="mw-list-item"><a href="/wiki/Special:MyContributions" title="A list of edits made from this IP address [y]" accesskey="y"><span>Contributions</span></a></li><li id="pt-anontalk" class="mw-list-item"><a href="/wiki/Special:MyTalk" title="Discussion about edits from this IP address [n]" accesskey="n"><span>Talk</span></a></li> </ul> </div> </div> </div> </div> </nav> </div> </header> </div> <div class="mw-page-container"> <div class="mw-page-container-inner"> <div class="vector-sitenotice-container"> <div id="siteNotice"><!-- CentralNotice --></div> </div> <div class="vector-column-start"> <div class="vector-main-menu-container"> <div id="mw-navigation"> <nav id="mw-panel" class="vector-main-menu-landmark" aria-label="Site"> <div id="vector-main-menu-pinned-container" class="vector-pinned-container"> </div> </nav> </div> </div> <div class="vector-sticky-pinned-container"> <nav id="mw-panel-toc" aria-label="Contents" data-event-name="ui.sidebar-toc" class="mw-table-of-contents-container vector-toc-landmark"> <div id="vector-toc-pinned-container" class="vector-pinned-container"> <div id="vector-toc" class="vector-toc vector-pinnable-element"> <div class="vector-pinnable-header vector-toc-pinnable-header vector-pinnable-header-pinned" data-feature-name="toc-pinned" data-pinnable-element-id="vector-toc" > <h2 class="vector-pinnable-header-label">Contents</h2> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-toc.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-toc.unpin">hide</button> </div> <ul class="vector-toc-contents" id="mw-panel-toc-list"> <li id="toc-mw-content-text" class="vector-toc-list-item vector-toc-level-1"> <a href="#" class="vector-toc-link"> <div class="vector-toc-text">(Top)</div> </a> </li> <li id="toc-Types" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Types"> <div class="vector-toc-text"> <span class="vector-toc-numb">1</span> <span>Types</span> </div> </a> <button aria-controls="toc-Types-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle Types subsection</span> </button> <ul id="toc-Types-sublist" class="vector-toc-list"> <li id="toc-Automation_of_the_economy" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Automation_of_the_economy"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.1</span> <span>Automation of the economy</span> </div> </a> <ul id="toc-Automation_of_the_economy-sublist" class="vector-toc-list"> <li id="toc-Technologies_that_may_displace_workers" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Technologies_that_may_displace_workers"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.1.1</span> <span>Technologies that may displace workers</span> </div> </a> <ul id="toc-Technologies_that_may_displace_workers-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Computer-integrated_manufacturing" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Computer-integrated_manufacturing"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.1.2</span> <span>Computer-integrated manufacturing</span> </div> </a> <ul id="toc-Computer-integrated_manufacturing-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-White-collar_machines" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#White-collar_machines"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.1.3</span> <span>White-collar machines</span> </div> </a> <ul id="toc-White-collar_machines-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Autonomous_cars" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Autonomous_cars"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.1.4</span> <span>Autonomous cars</span> </div> </a> <ul id="toc-Autonomous_cars-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-AI-generated_content" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#AI-generated_content"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.1.5</span> <span>AI-generated content</span> </div> </a> <ul id="toc-AI-generated_content-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Eradication" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Eradication"> <div class="vector-toc-text"> <span class="vector-toc-numb">1.2</span> <span>Eradication</span> </div> </a> <ul id="toc-Eradication-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-In_fiction" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#In_fiction"> <div class="vector-toc-text"> <span class="vector-toc-numb">2</span> <span>In fiction</span> </div> </a> <ul id="toc-In_fiction-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Contributing_factors" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Contributing_factors"> <div class="vector-toc-text"> <span class="vector-toc-numb">3</span> <span>Contributing factors</span> </div> </a> <button aria-controls="toc-Contributing_factors-sublist" class="cdx-button cdx-button--weight-quiet cdx-button--icon-only vector-toc-toggle"> <span class="vector-icon mw-ui-icon-wikimedia-expand"></span> <span>Toggle Contributing factors subsection</span> </button> <ul id="toc-Contributing_factors-sublist" class="vector-toc-list"> <li id="toc-Advantages_of_superhuman_intelligence_over_humans" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Advantages_of_superhuman_intelligence_over_humans"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.1</span> <span>Advantages of superhuman intelligence over humans</span> </div> </a> <ul id="toc-Advantages_of_superhuman_intelligence_over_humans-sublist" class="vector-toc-list"> <li id="toc-Sources_of_AI_advantage" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Sources_of_AI_advantage"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.1.1</span> <span>Sources of AI advantage</span> </div> </a> <ul id="toc-Sources_of_AI_advantage-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> <li id="toc-Possibility_of_unfriendly_AI_preceding_friendly_AI" class="vector-toc-list-item vector-toc-level-2"> <a class="vector-toc-link" href="#Possibility_of_unfriendly_AI_preceding_friendly_AI"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.2</span> <span>Possibility of unfriendly AI preceding friendly AI</span> </div> </a> <ul id="toc-Possibility_of_unfriendly_AI_preceding_friendly_AI-sublist" class="vector-toc-list"> <li id="toc-Is_strong_AI_inherently_dangerous?" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Is_strong_AI_inherently_dangerous?"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.2.1</span> <span>Is strong AI inherently dangerous?</span> </div> </a> <ul id="toc-Is_strong_AI_inherently_dangerous?-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Odds_of_conflict" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Odds_of_conflict"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.2.2</span> <span>Odds of conflict</span> </div> </a> <ul id="toc-Odds_of_conflict-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Precautions" class="vector-toc-list-item vector-toc-level-3"> <a class="vector-toc-link" href="#Precautions"> <div class="vector-toc-text"> <span class="vector-toc-numb">3.2.3</span> <span>Precautions</span> </div> </a> <ul id="toc-Precautions-sublist" class="vector-toc-list"> </ul> </li> </ul> </li> </ul> </li> <li id="toc-Warnings" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Warnings"> <div class="vector-toc-text"> <span class="vector-toc-numb">4</span> <span>Warnings</span> </div> </a> <ul id="toc-Warnings-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Prevention_through_AI_alignment" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Prevention_through_AI_alignment"> <div class="vector-toc-text"> <span class="vector-toc-numb">5</span> <span>Prevention through AI alignment</span> </div> </a> <ul id="toc-Prevention_through_AI_alignment-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-See_also" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#See_also"> <div class="vector-toc-text"> <span class="vector-toc-numb">6</span> <span>See also</span> </div> </a> <ul id="toc-See_also-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-Notes" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#Notes"> <div class="vector-toc-text"> <span class="vector-toc-numb">7</span> <span>Notes</span> </div> </a> <ul id="toc-Notes-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-References" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#References"> <div class="vector-toc-text"> <span class="vector-toc-numb">8</span> <span>References</span> </div> </a> <ul id="toc-References-sublist" class="vector-toc-list"> </ul> </li> <li id="toc-External_links" class="vector-toc-list-item vector-toc-level-1 vector-toc-list-item-expanded"> <a class="vector-toc-link" href="#External_links"> <div class="vector-toc-text"> <span class="vector-toc-numb">9</span> <span>External links</span> </div> </a> <ul id="toc-External_links-sublist" class="vector-toc-list"> </ul> </li> </ul> </div> </div> </nav> </div> </div> <div class="mw-content-container"> <main id="content" class="mw-body"> <header class="mw-body-header vector-page-titlebar"> <nav aria-label="Contents" class="vector-toc-landmark"> <div id="vector-page-titlebar-toc" class="vector-dropdown vector-page-titlebar-toc vector-button-flush-left" > <input type="checkbox" id="vector-page-titlebar-toc-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-page-titlebar-toc" class="vector-dropdown-checkbox " aria-label="Toggle the table of contents" > <label id="vector-page-titlebar-toc-label" for="vector-page-titlebar-toc-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--icon-only " aria-hidden="true" ><span class="vector-icon mw-ui-icon-listBullet mw-ui-icon-wikimedia-listBullet"></span> <span class="vector-dropdown-label-text">Toggle the table of contents</span> </label> <div class="vector-dropdown-content"> <div id="vector-page-titlebar-toc-unpinned-container" class="vector-unpinned-container"> </div> </div> </div> </nav> <h1 id="firstHeading" class="firstHeading mw-first-heading"><span class="mw-page-title-main">AI takeover</span></h1> <div id="p-lang-btn" class="vector-dropdown mw-portlet mw-portlet-lang" > <input type="checkbox" id="p-lang-btn-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-p-lang-btn" class="vector-dropdown-checkbox mw-interlanguage-selector" aria-label="Go to an article in another language. Available in 20 languages" > <label id="p-lang-btn-label" for="p-lang-btn-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet cdx-button--action-progressive mw-portlet-lang-heading-20" aria-hidden="true" ><span class="vector-icon mw-ui-icon-language-progressive mw-ui-icon-wikimedia-language-progressive"></span> <span class="vector-dropdown-label-text">20 languages</span> </label> <div class="vector-dropdown-content"> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li class="interlanguage-link interwiki-af mw-list-item"><a href="https://af.wikipedia.org/wiki/KI-oorname" title="KI-oorname – Afrikaans" lang="af" hreflang="af" data-title="KI-oorname" data-language-autonym="Afrikaans" data-language-local-name="Afrikaans" class="interlanguage-link-target"><span>Afrikaans</span></a></li><li class="interlanguage-link interwiki-ar mw-list-item"><a href="https://ar.wikipedia.org/wiki/%D8%B3%D9%8A%D8%B7%D8%B1%D8%A9_%D8%A7%D9%84%D8%B0%D9%83%D8%A7%D8%A1_%D8%A7%D9%84%D8%A7%D8%B5%D8%B7%D9%86%D8%A7%D8%B9%D9%8A" title="سيطرة الذكاء الاصطناعي – Arabic" lang="ar" hreflang="ar" data-title="سيطرة الذكاء الاصطناعي" data-language-autonym="العربية" data-language-local-name="Arabic" class="interlanguage-link-target"><span>العربية</span></a></li><li class="interlanguage-link interwiki-ast mw-list-item"><a href="https://ast.wikipedia.org/wiki/Rebeli%C3%B3n_de_les_m%C3%A1quines" title="Rebelión de les máquines – Asturian" lang="ast" hreflang="ast" data-title="Rebelión de les máquines" data-language-autonym="Asturianu" data-language-local-name="Asturian" class="interlanguage-link-target"><span>Asturianu</span></a></li><li class="interlanguage-link interwiki-cs mw-list-item"><a href="https://cs.wikipedia.org/wiki/Vzpoura_stroj%C5%AF" title="Vzpoura strojů – Czech" lang="cs" hreflang="cs" data-title="Vzpoura strojů" data-language-autonym="Čeština" data-language-local-name="Czech" class="interlanguage-link-target"><span>Čeština</span></a></li><li class="interlanguage-link interwiki-es mw-list-item"><a href="https://es.wikipedia.org/wiki/Rebeli%C3%B3n_de_las_m%C3%A1quinas" title="Rebelión de las máquinas – Spanish" lang="es" hreflang="es" data-title="Rebelión de las máquinas" data-language-autonym="Español" data-language-local-name="Spanish" class="interlanguage-link-target"><span>Español</span></a></li><li class="interlanguage-link interwiki-fa mw-list-item"><a href="https://fa.wikipedia.org/wiki/%D8%B3%DB%8C%D8%B7%D8%B1%D9%87_%D9%87%D9%88%D8%B4_%D9%85%D8%B5%D9%86%D9%88%D8%B9%DB%8C" title="سیطره هوش مصنوعی – Persian" lang="fa" hreflang="fa" data-title="سیطره هوش مصنوعی" data-language-autonym="فارسی" data-language-local-name="Persian" class="interlanguage-link-target"><span>فارسی</span></a></li><li class="interlanguage-link interwiki-fr mw-list-item"><a href="https://fr.wikipedia.org/wiki/R%C3%A9volte_des_robots" title="Révolte des robots – French" lang="fr" hreflang="fr" data-title="Révolte des robots" data-language-autonym="Français" data-language-local-name="French" class="interlanguage-link-target"><span>Français</span></a></li><li class="interlanguage-link interwiki-ko mw-list-item"><a href="https://ko.wikipedia.org/wiki/AI%EC%97%90_%EC%9D%98%ED%95%9C_%ED%83%88%EC%B7%A8" title="AI에 의한 탈취 – Korean" lang="ko" hreflang="ko" data-title="AI에 의한 탈취" data-language-autonym="한국어" data-language-local-name="Korean" class="interlanguage-link-target"><span>한국어</span></a></li><li class="interlanguage-link interwiki-id mw-list-item"><a href="https://id.wikipedia.org/wiki/Pengambilalihan_kecerdasan_buatan" title="Pengambilalihan kecerdasan buatan – Indonesian" lang="id" hreflang="id" data-title="Pengambilalihan kecerdasan buatan" data-language-autonym="Bahasa Indonesia" data-language-local-name="Indonesian" class="interlanguage-link-target"><span>Bahasa Indonesia</span></a></li><li class="interlanguage-link interwiki-it mw-list-item"><a href="https://it.wikipedia.org/wiki/Ribellione_della_macchina" title="Ribellione della macchina – Italian" lang="it" hreflang="it" data-title="Ribellione della macchina" data-language-autonym="Italiano" data-language-local-name="Italian" class="interlanguage-link-target"><span>Italiano</span></a></li><li class="interlanguage-link interwiki-he mw-list-item"><a href="https://he.wikipedia.org/wiki/%D7%94%D7%A9%D7%AA%D7%9C%D7%98%D7%95%D7%AA_%D7%94%D7%91%D7%99%D7%A0%D7%94_%D7%94%D7%9E%D7%9C%D7%90%D7%9B%D7%95%D7%AA%D7%99%D7%AA" title="השתלטות הבינה המלאכותית – Hebrew" lang="he" hreflang="he" data-title="השתלטות הבינה המלאכותית" data-language-autonym="עברית" data-language-local-name="Hebrew" class="interlanguage-link-target"><span>עברית</span></a></li><li class="interlanguage-link interwiki-nl mw-list-item"><a href="https://nl.wikipedia.org/wiki/AI-overname" title="AI-overname – Dutch" lang="nl" hreflang="nl" data-title="AI-overname" data-language-autonym="Nederlands" data-language-local-name="Dutch" class="interlanguage-link-target"><span>Nederlands</span></a></li><li class="interlanguage-link interwiki-ja mw-list-item"><a href="https://ja.wikipedia.org/wiki/AI%E3%81%AB%E3%82%88%E3%82%8B%E4%B9%97%E3%81%A3%E5%8F%96%E3%82%8A" title="AIによる乗っ取り – Japanese" lang="ja" hreflang="ja" data-title="AIによる乗っ取り" data-language-autonym="日本語" data-language-local-name="Japanese" class="interlanguage-link-target"><span>日本語</span></a></li><li class="interlanguage-link interwiki-pt mw-list-item"><a href="https://pt.wikipedia.org/wiki/Rebeli%C3%A3o_das_m%C3%A1quinas" title="Rebelião das máquinas – Portuguese" lang="pt" hreflang="pt" data-title="Rebelião das máquinas" data-language-autonym="Português" data-language-local-name="Portuguese" class="interlanguage-link-target"><span>Português</span></a></li><li class="interlanguage-link interwiki-ro mw-list-item"><a href="https://ro.wikipedia.org/wiki/Revolt%C4%83_cibernetic%C4%83" title="Revoltă cibernetică – Romanian" lang="ro" hreflang="ro" data-title="Revoltă cibernetică" data-language-autonym="Română" data-language-local-name="Romanian" class="interlanguage-link-target"><span>Română</span></a></li><li class="interlanguage-link interwiki-ru mw-list-item"><a href="https://ru.wikipedia.org/wiki/%D0%92%D0%BE%D1%81%D1%81%D1%82%D0%B0%D0%BD%D0%B8%D0%B5_%D0%BC%D0%B0%D1%88%D0%B8%D0%BD" title="Восстание машин – Russian" lang="ru" hreflang="ru" data-title="Восстание машин" data-language-autonym="Русский" data-language-local-name="Russian" class="interlanguage-link-target"><span>Русский</span></a></li><li class="interlanguage-link interwiki-fi mw-list-item"><a href="https://fi.wikipedia.org/wiki/Koneiden_kapina" title="Koneiden kapina – Finnish" lang="fi" hreflang="fi" data-title="Koneiden kapina" data-language-autonym="Suomi" data-language-local-name="Finnish" class="interlanguage-link-target"><span>Suomi</span></a></li><li class="interlanguage-link interwiki-tr mw-list-item"><a href="https://tr.wikipedia.org/wiki/Yapay_zek%C3%A2n%C4%B1n_kontrol%C3%BC_devralmas%C4%B1" title="Yapay zekânın kontrolü devralması – Turkish" lang="tr" hreflang="tr" data-title="Yapay zekânın kontrolü devralması" data-language-autonym="Türkçe" data-language-local-name="Turkish" class="interlanguage-link-target"><span>Türkçe</span></a></li><li class="interlanguage-link interwiki-zh-yue mw-list-item"><a href="https://zh-yue.wikipedia.org/wiki/%E4%BA%BA%E5%B7%A5%E6%99%BA%E8%83%BD%E5%8F%9B%E8%AE%8A" title="人工智能叛變 – Cantonese" lang="yue" hreflang="yue" data-title="人工智能叛變" data-language-autonym="粵語" data-language-local-name="Cantonese" class="interlanguage-link-target"><span>粵語</span></a></li><li class="interlanguage-link interwiki-zh mw-list-item"><a href="https://zh.wikipedia.org/wiki/%E4%BA%BA%E5%B7%A5%E6%99%BA%E6%85%A7%E5%8F%9B%E8%AE%8A" title="人工智慧叛變 – Chinese" lang="zh" hreflang="zh" data-title="人工智慧叛變" data-language-autonym="中文" data-language-local-name="Chinese" class="interlanguage-link-target"><span>中文</span></a></li> </ul> <div class="after-portlet after-portlet-lang"><span class="wb-langlinks-edit wb-langlinks-link"><a href="https://www.wikidata.org/wiki/Special:EntityPage/Q2254427#sitelinks-wikipedia" title="Edit interlanguage links" class="wbc-editpage">Edit links</a></span></div> </div> </div> </div> </header> <div class="vector-page-toolbar"> <div class="vector-page-toolbar-container"> <div id="left-navigation"> <nav aria-label="Namespaces"> <div id="p-associated-pages" class="vector-menu vector-menu-tabs mw-portlet mw-portlet-associated-pages" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-nstab-main" class="selected vector-tab-noicon mw-list-item"><a href="/wiki/AI_takeover" title="View the content page [c]" accesskey="c"><span>Article</span></a></li><li id="ca-talk" class="vector-tab-noicon mw-list-item"><a href="/wiki/Talk:AI_takeover" rel="discussion" title="Discuss improvements to the content page [t]" accesskey="t"><span>Talk</span></a></li> </ul> </div> </div> <div id="vector-variants-dropdown" class="vector-dropdown emptyPortlet" > <input type="checkbox" id="vector-variants-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-variants-dropdown" class="vector-dropdown-checkbox " aria-label="Change language variant" > <label id="vector-variants-dropdown-label" for="vector-variants-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet" aria-hidden="true" ><span class="vector-dropdown-label-text">English</span> </label> <div class="vector-dropdown-content"> <div id="p-variants" class="vector-menu mw-portlet mw-portlet-variants emptyPortlet" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> </ul> </div> </div> </div> </div> </nav> </div> <div id="right-navigation" class="vector-collapsible"> <nav aria-label="Views"> <div id="p-views" class="vector-menu vector-menu-tabs mw-portlet mw-portlet-views" > <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-view" class="selected vector-tab-noicon mw-list-item"><a href="/wiki/AI_takeover"><span>Read</span></a></li><li id="ca-edit" class="vector-tab-noicon mw-list-item"><a href="/w/index.php?title=AI_takeover&amp;action=edit" title="Edit this page [e]" accesskey="e"><span>Edit</span></a></li><li id="ca-history" class="vector-tab-noicon mw-list-item"><a href="/w/index.php?title=AI_takeover&amp;action=history" title="Past revisions of this page [h]" accesskey="h"><span>View history</span></a></li> </ul> </div> </div> </nav> <nav class="vector-page-tools-landmark" aria-label="Page tools"> <div id="vector-page-tools-dropdown" class="vector-dropdown vector-page-tools-dropdown" > <input type="checkbox" id="vector-page-tools-dropdown-checkbox" role="button" aria-haspopup="true" data-event-name="ui.dropdown-vector-page-tools-dropdown" class="vector-dropdown-checkbox " aria-label="Tools" > <label id="vector-page-tools-dropdown-label" for="vector-page-tools-dropdown-checkbox" class="vector-dropdown-label cdx-button cdx-button--fake-button cdx-button--fake-button--enabled cdx-button--weight-quiet" aria-hidden="true" ><span class="vector-dropdown-label-text">Tools</span> </label> <div class="vector-dropdown-content"> <div id="vector-page-tools-unpinned-container" class="vector-unpinned-container"> <div id="vector-page-tools" class="vector-page-tools vector-pinnable-element"> <div class="vector-pinnable-header vector-page-tools-pinnable-header vector-pinnable-header-unpinned" data-feature-name="page-tools-pinned" data-pinnable-element-id="vector-page-tools" data-pinned-container-id="vector-page-tools-pinned-container" data-unpinned-container-id="vector-page-tools-unpinned-container" > <div class="vector-pinnable-header-label">Tools</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-page-tools.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-page-tools.unpin">hide</button> </div> <div id="p-cactions" class="vector-menu mw-portlet mw-portlet-cactions emptyPortlet vector-has-collapsible-items" title="More options" > <div class="vector-menu-heading"> Actions </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="ca-more-view" class="selected vector-more-collapsible-item mw-list-item"><a href="/wiki/AI_takeover"><span>Read</span></a></li><li id="ca-more-edit" class="vector-more-collapsible-item mw-list-item"><a href="/w/index.php?title=AI_takeover&amp;action=edit" title="Edit this page [e]" accesskey="e"><span>Edit</span></a></li><li id="ca-more-history" class="vector-more-collapsible-item mw-list-item"><a href="/w/index.php?title=AI_takeover&amp;action=history"><span>View history</span></a></li> </ul> </div> </div> <div id="p-tb" class="vector-menu mw-portlet mw-portlet-tb" > <div class="vector-menu-heading"> General </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="t-whatlinkshere" class="mw-list-item"><a href="/wiki/Special:WhatLinksHere/AI_takeover" title="List of all English Wikipedia pages containing links to this page [j]" accesskey="j"><span>What links here</span></a></li><li id="t-recentchangeslinked" class="mw-list-item"><a href="/wiki/Special:RecentChangesLinked/AI_takeover" rel="nofollow" title="Recent changes in pages linked from this page [k]" accesskey="k"><span>Related changes</span></a></li><li id="t-upload" class="mw-list-item"><a href="/wiki/Wikipedia:File_Upload_Wizard" title="Upload files [u]" accesskey="u"><span>Upload file</span></a></li><li id="t-specialpages" class="mw-list-item"><a href="/wiki/Special:SpecialPages" title="A list of all special pages [q]" accesskey="q"><span>Special pages</span></a></li><li id="t-permalink" class="mw-list-item"><a href="/w/index.php?title=AI_takeover&amp;oldid=1259605660" title="Permanent link to this revision of this page"><span>Permanent link</span></a></li><li id="t-info" class="mw-list-item"><a href="/w/index.php?title=AI_takeover&amp;action=info" title="More information about this page"><span>Page information</span></a></li><li id="t-cite" class="mw-list-item"><a href="/w/index.php?title=Special:CiteThisPage&amp;page=AI_takeover&amp;id=1259605660&amp;wpFormIdentifier=titleform" title="Information on how to cite this page"><span>Cite this page</span></a></li><li id="t-urlshortener" class="mw-list-item"><a href="/w/index.php?title=Special:UrlShortener&amp;url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FAI_takeover"><span>Get shortened URL</span></a></li><li id="t-urlshortener-qrcode" class="mw-list-item"><a href="/w/index.php?title=Special:QrCode&amp;url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FAI_takeover"><span>Download QR code</span></a></li> </ul> </div> </div> <div id="p-coll-print_export" class="vector-menu mw-portlet mw-portlet-coll-print_export" > <div class="vector-menu-heading"> Print/export </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="coll-download-as-rl" class="mw-list-item"><a href="/w/index.php?title=Special:DownloadAsPdf&amp;page=AI_takeover&amp;action=show-download-screen" title="Download this page as a PDF file"><span>Download as PDF</span></a></li><li id="t-print" class="mw-list-item"><a href="/w/index.php?title=AI_takeover&amp;printable=yes" title="Printable version of this page [p]" accesskey="p"><span>Printable version</span></a></li> </ul> </div> </div> <div id="p-wikibase-otherprojects" class="vector-menu mw-portlet mw-portlet-wikibase-otherprojects" > <div class="vector-menu-heading"> In other projects </div> <div class="vector-menu-content"> <ul class="vector-menu-content-list"> <li id="t-wikibase" class="wb-otherproject-link wb-otherproject-wikibase-dataitem mw-list-item"><a href="https://www.wikidata.org/wiki/Special:EntityPage/Q2254427" title="Structured data on this page hosted by Wikidata [g]" accesskey="g"><span>Wikidata item</span></a></li> </ul> </div> </div> </div> </div> </div> </div> </nav> </div> </div> </div> <div class="vector-column-end"> <div class="vector-sticky-pinned-container"> <nav class="vector-page-tools-landmark" aria-label="Page tools"> <div id="vector-page-tools-pinned-container" class="vector-pinned-container"> </div> </nav> <nav class="vector-appearance-landmark" aria-label="Appearance"> <div id="vector-appearance-pinned-container" class="vector-pinned-container"> <div id="vector-appearance" class="vector-appearance vector-pinnable-element"> <div class="vector-pinnable-header vector-appearance-pinnable-header vector-pinnable-header-pinned" data-feature-name="appearance-pinned" data-pinnable-element-id="vector-appearance" data-pinned-container-id="vector-appearance-pinned-container" data-unpinned-container-id="vector-appearance-unpinned-container" > <div class="vector-pinnable-header-label">Appearance</div> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-pin-button" data-event-name="pinnable-header.vector-appearance.pin">move to sidebar</button> <button class="vector-pinnable-header-toggle-button vector-pinnable-header-unpin-button" data-event-name="pinnable-header.vector-appearance.unpin">hide</button> </div> </div> </div> </nav> </div> </div> <div id="bodyContent" class="vector-body" aria-labelledby="firstHeading" data-mw-ve-target-container> <div class="vector-body-before-content"> <div class="mw-indicators"> </div> <div id="siteSub" class="noprint">From Wikipedia, the free encyclopedia</div> </div> <div id="contentSub"><div id="mw-content-subtitle"></div></div> <div id="mw-content-text" class="mw-body-content"><div class="mw-content-ltr mw-parser-output" lang="en" dir="ltr"><div class="shortdescription nomobile noexcerpt noprint searchaux" style="display:none">Hypothetical outcome of artificial intelligence</div> <figure class="mw-default-size" typeof="mw:File/Thumb"><a href="/wiki/File:Capek_RUR.jpg" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/d/d9/Capek_RUR.jpg/260px-Capek_RUR.jpg" decoding="async" width="260" height="203" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/d/d9/Capek_RUR.jpg/390px-Capek_RUR.jpg 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/d/d9/Capek_RUR.jpg/520px-Capek_RUR.jpg 2x" data-file-width="566" data-file-height="442" /></a><figcaption>Robots revolt in <i><a href="/wiki/R.U.R." title="R.U.R.">R.U.R.</a></i>, a 1920 Czech play translated as "Rossum's Universal Robots"</figcaption></figure> <style data-mw-deduplicate="TemplateStyles:r1129693374">.mw-parser-output .hlist dl,.mw-parser-output .hlist ol,.mw-parser-output .hlist ul{margin:0;padding:0}.mw-parser-output .hlist dd,.mw-parser-output .hlist dt,.mw-parser-output .hlist li{margin:0;display:inline}.mw-parser-output .hlist.inline,.mw-parser-output .hlist.inline dl,.mw-parser-output .hlist.inline ol,.mw-parser-output .hlist.inline ul,.mw-parser-output .hlist dl dl,.mw-parser-output .hlist dl ol,.mw-parser-output .hlist dl ul,.mw-parser-output .hlist ol dl,.mw-parser-output .hlist ol ol,.mw-parser-output .hlist ol ul,.mw-parser-output .hlist ul dl,.mw-parser-output .hlist ul ol,.mw-parser-output .hlist ul ul{display:inline}.mw-parser-output .hlist .mw-empty-li{display:none}.mw-parser-output .hlist dt::after{content:": "}.mw-parser-output .hlist dd::after,.mw-parser-output .hlist li::after{content:" · ";font-weight:bold}.mw-parser-output .hlist dd:last-child::after,.mw-parser-output .hlist dt:last-child::after,.mw-parser-output .hlist li:last-child::after{content:none}.mw-parser-output .hlist dd dd:first-child::before,.mw-parser-output .hlist dd dt:first-child::before,.mw-parser-output .hlist dd li:first-child::before,.mw-parser-output .hlist dt dd:first-child::before,.mw-parser-output .hlist dt dt:first-child::before,.mw-parser-output .hlist dt li:first-child::before,.mw-parser-output .hlist li dd:first-child::before,.mw-parser-output .hlist li dt:first-child::before,.mw-parser-output .hlist li li:first-child::before{content:" (";font-weight:normal}.mw-parser-output .hlist dd dd:last-child::after,.mw-parser-output .hlist dd dt:last-child::after,.mw-parser-output .hlist dd li:last-child::after,.mw-parser-output .hlist dt dd:last-child::after,.mw-parser-output .hlist dt dt:last-child::after,.mw-parser-output .hlist dt li:last-child::after,.mw-parser-output .hlist li dd:last-child::after,.mw-parser-output .hlist li dt:last-child::after,.mw-parser-output .hlist li li:last-child::after{content:")";font-weight:normal}.mw-parser-output .hlist ol{counter-reset:listitem}.mw-parser-output .hlist ol>li{counter-increment:listitem}.mw-parser-output .hlist ol>li::before{content:" "counter(listitem)"\a0 "}.mw-parser-output .hlist dd ol>li:first-child::before,.mw-parser-output .hlist dt ol>li:first-child::before,.mw-parser-output .hlist li ol>li:first-child::before{content:" ("counter(listitem)"\a0 "}</style><style data-mw-deduplicate="TemplateStyles:r1246091330">.mw-parser-output .sidebar{width:22em;float:right;clear:right;margin:0.5em 0 1em 1em;background:var(--background-color-neutral-subtle,#f8f9fa);border:1px solid var(--border-color-base,#a2a9b1);padding:0.2em;text-align:center;line-height:1.4em;font-size:88%;border-collapse:collapse;display:table}body.skin-minerva .mw-parser-output .sidebar{display:table!important;float:right!important;margin:0.5em 0 1em 1em!important}.mw-parser-output .sidebar-subgroup{width:100%;margin:0;border-spacing:0}.mw-parser-output .sidebar-left{float:left;clear:left;margin:0.5em 1em 1em 0}.mw-parser-output .sidebar-none{float:none;clear:both;margin:0.5em 1em 1em 0}.mw-parser-output .sidebar-outer-title{padding:0 0.4em 0.2em;font-size:125%;line-height:1.2em;font-weight:bold}.mw-parser-output .sidebar-top-image{padding:0.4em}.mw-parser-output .sidebar-top-caption,.mw-parser-output .sidebar-pretitle-with-top-image,.mw-parser-output .sidebar-caption{padding:0.2em 0.4em 0;line-height:1.2em}.mw-parser-output .sidebar-pretitle{padding:0.4em 0.4em 0;line-height:1.2em}.mw-parser-output .sidebar-title,.mw-parser-output .sidebar-title-with-pretitle{padding:0.2em 0.8em;font-size:145%;line-height:1.2em}.mw-parser-output .sidebar-title-with-pretitle{padding:0.1em 0.4em}.mw-parser-output .sidebar-image{padding:0.2em 0.4em 0.4em}.mw-parser-output .sidebar-heading{padding:0.1em 0.4em}.mw-parser-output .sidebar-content{padding:0 0.5em 0.4em}.mw-parser-output .sidebar-content-with-subgroup{padding:0.1em 0.4em 0.2em}.mw-parser-output .sidebar-above,.mw-parser-output .sidebar-below{padding:0.3em 0.8em;font-weight:bold}.mw-parser-output .sidebar-collapse .sidebar-above,.mw-parser-output .sidebar-collapse .sidebar-below{border-top:1px solid #aaa;border-bottom:1px solid #aaa}.mw-parser-output .sidebar-navbar{text-align:right;font-size:115%;padding:0 0.4em 0.4em}.mw-parser-output .sidebar-list-title{padding:0 0.4em;text-align:left;font-weight:bold;line-height:1.6em;font-size:105%}.mw-parser-output .sidebar-list-title-c{padding:0 0.4em;text-align:center;margin:0 3.3em}@media(max-width:640px){body.mediawiki .mw-parser-output .sidebar{width:100%!important;clear:both;float:none!important;margin-left:0!important;margin-right:0!important}}body.skin--responsive .mw-parser-output .sidebar a>img{max-width:none!important}@media screen{html.skin-theme-clientpref-night .mw-parser-output .sidebar:not(.notheme) .sidebar-list-title,html.skin-theme-clientpref-night .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle{background:transparent!important}html.skin-theme-clientpref-night .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle a{color:var(--color-progressive)!important}}@media screen and (prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .sidebar:not(.notheme) .sidebar-list-title,html.skin-theme-clientpref-os .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle{background:transparent!important}html.skin-theme-clientpref-os .mw-parser-output .sidebar:not(.notheme) .sidebar-title-with-pretitle a{color:var(--color-progressive)!important}}@media print{body.ns-0 .mw-parser-output .sidebar{display:none!important}}</style><table class="sidebar sidebar-collapse nomobile nowraplinks hlist"><tbody><tr><td class="sidebar-pretitle">Part of a series on</td></tr><tr><th class="sidebar-title-with-pretitle"><a href="/wiki/Artificial_intelligence" title="Artificial intelligence">Artificial intelligence</a></th></tr><tr><td class="sidebar-image"><figure class="mw-halign-center" typeof="mw:File"><a href="/wiki/File:Dall-e_3_(jan_%2724)_artificial_intelligence_icon.png" class="mw-file-description"><img src="//upload.wikimedia.org/wikipedia/commons/thumb/6/64/Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png/100px-Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png" decoding="async" width="100" height="100" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/6/64/Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png/150px-Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/6/64/Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png/200px-Dall-e_3_%28jan_%2724%29_artificial_intelligence_icon.png 2x" data-file-width="820" data-file-height="820" /></a><figcaption></figcaption></figure></td></tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/Artificial_intelligence#Goals" title="Artificial intelligence">Major goals</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Artificial_general_intelligence" title="Artificial general intelligence">Artificial general intelligence</a></li> <li><a href="/wiki/Intelligent_agent" title="Intelligent agent">Intelligent agent</a></li> <li><a href="/wiki/Recursive_self-improvement" title="Recursive self-improvement">Recursive self-improvement</a></li> <li><a href="/wiki/Automated_planning_and_scheduling" title="Automated planning and scheduling">Planning</a></li> <li><a href="/wiki/Computer_vision" title="Computer vision">Computer vision</a></li> <li><a href="/wiki/General_game_playing" title="General game playing">General game playing</a></li> <li><a href="/wiki/Knowledge_representation_and_reasoning" title="Knowledge representation and reasoning">Knowledge reasoning</a></li> <li><a href="/wiki/Natural_language_processing" title="Natural language processing">Natural language processing</a></li> <li><a href="/wiki/Robotics" title="Robotics">Robotics</a></li> <li><a href="/wiki/AI_safety" title="AI safety">AI safety</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)">Approaches</div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Machine_learning" title="Machine learning">Machine learning</a></li> <li><a href="/wiki/Symbolic_artificial_intelligence" title="Symbolic artificial intelligence">Symbolic</a></li> <li><a href="/wiki/Deep_learning" title="Deep learning">Deep learning</a></li> <li><a href="/wiki/Bayesian_network" title="Bayesian network">Bayesian networks</a></li> <li><a href="/wiki/Evolutionary_algorithm" title="Evolutionary algorithm">Evolutionary algorithms</a></li> <li><a href="/wiki/Hybrid_intelligent_system" title="Hybrid intelligent system">Hybrid intelligent systems</a></li> <li><a href="/wiki/Artificial_intelligence_systems_integration" title="Artificial intelligence systems integration">Systems integration</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/Applications_of_artificial_intelligence" title="Applications of artificial intelligence">Applications</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Machine_learning_in_bioinformatics" title="Machine learning in bioinformatics">Bioinformatics</a></li> <li><a href="/wiki/Deepfake" title="Deepfake">Deepfake</a></li> <li><a href="/wiki/Machine_learning_in_earth_sciences" title="Machine learning in earth sciences">Earth sciences</a></li> <li><a href="/wiki/Applications_of_artificial_intelligence#Finance" title="Applications of artificial intelligence"> Finance </a></li> <li><a href="/wiki/Generative_artificial_intelligence" title="Generative artificial intelligence">Generative AI</a> <ul><li><a href="/wiki/Artificial_intelligence_art" title="Artificial intelligence art">Art</a></li> <li><a href="/wiki/Generative_audio" title="Generative audio">Audio</a></li> <li><a href="/wiki/Music_and_artificial_intelligence" title="Music and artificial intelligence">Music</a></li></ul></li> <li><a href="/wiki/Artificial_intelligence_in_government" title="Artificial intelligence in government">Government</a></li> <li><a href="/wiki/Artificial_intelligence_in_healthcare" title="Artificial intelligence in healthcare">Healthcare</a> <ul><li><a href="/wiki/Artificial_intelligence_in_mental_health" title="Artificial intelligence in mental health">Mental health</a></li></ul></li> <li><a href="/wiki/Artificial_intelligence_in_industry" title="Artificial intelligence in industry">Industry</a></li> <li><a href="/wiki/Machine_translation" title="Machine translation">Translation</a></li> <li><a href="/wiki/Artificial_intelligence_arms_race" title="Artificial intelligence arms race"> Military </a></li> <li><a href="/wiki/Machine_learning_in_physics" title="Machine learning in physics">Physics</a></li> <li><a href="/wiki/List_of_artificial_intelligence_projects" title="List of artificial intelligence projects">Projects</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/Philosophy_of_artificial_intelligence" title="Philosophy of artificial intelligence">Philosophy</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Artificial_consciousness" title="Artificial consciousness">Artificial consciousness</a></li> <li><a href="/wiki/Chinese_room" title="Chinese room">Chinese room</a></li> <li><a href="/wiki/Friendly_artificial_intelligence" title="Friendly artificial intelligence">Friendly AI</a></li> <li><a href="/wiki/AI_control_problem" class="mw-redirect" title="AI control problem">Control problem</a>/<a class="mw-selflink selflink">Takeover</a></li> <li><a href="/wiki/Ethics_of_artificial_intelligence" title="Ethics of artificial intelligence">Ethics</a></li> <li><a href="/wiki/Existential_risk_from_artificial_general_intelligence" class="mw-redirect" title="Existential risk from artificial general intelligence">Existential risk</a></li> <li><a href="/wiki/Regulation_of_artificial_intelligence" title="Regulation of artificial intelligence">Regulation</a></li> <li><a href="/wiki/Turing_test" title="Turing test">Turing test</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)"><a href="/wiki/History_of_artificial_intelligence" title="History of artificial intelligence">History</a></div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Timeline_of_artificial_intelligence" title="Timeline of artificial intelligence">Timeline</a></li> <li><a href="/wiki/Progress_in_artificial_intelligence" title="Progress in artificial intelligence">Progress</a></li> <li><a href="/wiki/AI_winter" title="AI winter">AI winter</a></li> <li><a href="/wiki/AI_boom" title="AI boom">AI boom</a></li></ul></div></div></td> </tr><tr><td class="sidebar-content"> <div class="sidebar-list mw-collapsible mw-collapsed"><div class="sidebar-list-title" style="text-align:center;color: var(--color-base)">Glossary</div><div class="sidebar-list-content mw-collapsible-content"> <ul><li><a href="/wiki/Glossary_of_artificial_intelligence" title="Glossary of artificial intelligence">Glossary</a></li></ul></div></div></td> </tr><tr><td class="sidebar-navbar"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><style data-mw-deduplicate="TemplateStyles:r1239400231">.mw-parser-output .navbar{display:inline;font-size:88%;font-weight:normal}.mw-parser-output .navbar-collapse{float:left;text-align:left}.mw-parser-output .navbar-boxtext{word-spacing:0}.mw-parser-output .navbar ul{display:inline-block;white-space:nowrap;line-height:inherit}.mw-parser-output .navbar-brackets::before{margin-right:-0.125em;content:"[ "}.mw-parser-output .navbar-brackets::after{margin-left:-0.125em;content:" ]"}.mw-parser-output .navbar li{word-spacing:-0.125em}.mw-parser-output .navbar a>span,.mw-parser-output .navbar a>abbr{text-decoration:inherit}.mw-parser-output .navbar-mini abbr{font-variant:small-caps;border-bottom:none;text-decoration:none;cursor:inherit}.mw-parser-output .navbar-ct-full{font-size:114%;margin:0 7em}.mw-parser-output .navbar-ct-mini{font-size:114%;margin:0 4em}html.skin-theme-clientpref-night .mw-parser-output .navbar li a abbr{color:var(--color-base)!important}@media(prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .navbar li a abbr{color:var(--color-base)!important}}@media print{.mw-parser-output .navbar{display:none!important}}</style><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Artificial_intelligence" title="Template:Artificial intelligence"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Artificial_intelligence" title="Template talk:Artificial intelligence"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Artificial_intelligence" title="Special:EditPage/Template:Artificial intelligence"><abbr title="Edit this template">e</abbr></a></li></ul></div></td></tr></tbody></table> <p>An <b>AI takeover</b> is an imagined scenario in which <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">artificial intelligence</a> (AI) emerges as the dominant form of <a href="/wiki/Intelligence" title="Intelligence">intelligence</a> on Earth and <a href="/wiki/Computer_program" title="Computer program">computer programs</a> or <a href="/wiki/Robot" title="Robot">robots</a> effectively take control of the planet away from the <a href="/wiki/Human_species" class="mw-redirect" title="Human species">human species</a>, which relies on <a href="/wiki/Human_intelligence" title="Human intelligence">human intelligence</a>. Possible scenarios include <a href="/wiki/Technological_unemployment" title="Technological unemployment">replacement of the entire human workforce</a> due to <a href="/wiki/Automation" title="Automation">automation</a>, takeover by a <a href="/wiki/Superintelligent_AI" class="mw-redirect" title="Superintelligent AI">superintelligent AI</a> (ASI), and the notion of a <b>robot uprising</b>. Stories of AI takeovers <a href="/wiki/AI_takeover_in_popular_culture" title="AI takeover in popular culture">have been popular</a> throughout <a href="/wiki/Science_fiction" title="Science fiction">science fiction</a>, but recent advancements have made the threat more real. Some public figures, such as <a href="/wiki/Stephen_Hawking" title="Stephen Hawking">Stephen Hawking</a> and <a href="/wiki/Elon_Musk" title="Elon Musk">Elon Musk</a>, have advocated research into <a href="/wiki/AI_control_problem" class="mw-redirect" title="AI control problem">precautionary measures</a> to ensure future superintelligent machines remain under human control.<sup id="cite_ref-1" class="reference"><a href="#cite_note-1"><span class="cite-bracket">&#91;</span>1<span class="cite-bracket">&#93;</span></a></sup> </p> <meta property="mw:PageProp/toc" /> <div class="mw-heading mw-heading2"><h2 id="Types">Types</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=1" title="Edit section: Types"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <div class="mw-heading mw-heading3"><h3 id="Automation_of_the_economy">Automation of the economy</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=2" title="Edit section: Automation of the economy"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1236090951">.mw-parser-output .hatnote{font-style:italic}.mw-parser-output div.hatnote{padding-left:1.6em;margin-bottom:0.5em}.mw-parser-output .hatnote i{font-style:normal}.mw-parser-output .hatnote+link+.hatnote{margin-top:-0.5em}@media print{body.ns-0 .mw-parser-output .hatnote{display:none!important}}</style><div role="note" class="hatnote navigation-not-searchable">Main article: <a href="/wiki/Technological_unemployment" title="Technological unemployment">Technological unemployment</a></div> <p>The traditional consensus among economists has been that technological progress does not cause long-term unemployment. However, recent innovation in the fields of <a href="/wiki/Robotics" title="Robotics">robotics</a> and artificial intelligence has raised worries that human labor will become obsolete, leaving people in various sectors without jobs to earn a living, leading to an economic crisis.<sup id="cite_ref-2" class="reference"><a href="#cite_note-2"><span class="cite-bracket">&#91;</span>2<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-3" class="reference"><a href="#cite_note-3"><span class="cite-bracket">&#91;</span>3<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-4" class="reference"><a href="#cite_note-4"><span class="cite-bracket">&#91;</span>4<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-5" class="reference"><a href="#cite_note-5"><span class="cite-bracket">&#91;</span>5<span class="cite-bracket">&#93;</span></a></sup> Many small and medium size businesses may also be driven out of business if they cannot afford or licence the latest robotic and AI technology, and may need to focus on areas or services that cannot easily be replaced for continued viability in the face of such technology.<sup id="cite_ref-6" class="reference"><a href="#cite_note-6"><span class="cite-bracket">&#91;</span>6<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading4"><h4 id="Technologies_that_may_displace_workers">Technologies that may displace workers</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=3" title="Edit section: Technologies that may displace workers"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>AI technologies have been widely adopted in recent years. While these technologies have replaced some traditional workers, they also create new opportunities. Industries that are most susceptible to AI takeover include transportation, retail, and military. AI military technologies, for example, allow soldiers to work remotely without risk of injury. A study in 2024 highlights AI's ability to perform routine and repetitive tasks poses significant risks of job displacement, especially in sectors like manufacturing and administrative support.<sup id="cite_ref-7" class="reference"><a href="#cite_note-7"><span class="cite-bracket">&#91;</span>7<span class="cite-bracket">&#93;</span></a></sup> Author Dave Bond argues that as AI technologies continue to develop and expand, the relationship between humans and robots will change; they will become closely integrated in several aspects of life. AI will likely displace some workers while creating opportunities for new jobs in other sectors, especially in fields where tasks are repeatable.<sup id="cite_ref-8" class="reference"><a href="#cite_note-8"><span class="cite-bracket">&#91;</span>8<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-9" class="reference"><a href="#cite_note-9"><span class="cite-bracket">&#91;</span>9<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading4"><h4 id="Computer-integrated_manufacturing">Computer-integrated manufacturing</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=4" title="Edit section: Computer-integrated manufacturing"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236090951"><div role="note" class="hatnote navigation-not-searchable">See also: <a href="/wiki/Artificial_intelligence_in_industry" title="Artificial intelligence in industry">Artificial intelligence in industry</a></div> <p><a href="/wiki/Computer-integrated_manufacturing" title="Computer-integrated manufacturing">Computer-integrated manufacturing</a> uses computers to control the production process. This allows individual processes to exchange information with each other and initiate actions. Although manufacturing can be faster and less error-prone by the integration of computers, the main advantage is the ability to create automated manufacturing processes. Computer-integrated manufacturing is used in automotive, aviation, space, and ship building industries. </p> <div class="mw-heading mw-heading4"><h4 id="White-collar_machines">White-collar machines</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=5" title="Edit section: White-collar machines"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236090951"><div role="note" class="hatnote navigation-not-searchable">See also: <a href="/wiki/White-collar_worker" title="White-collar worker">White-collar worker</a></div> <p>The 21st century has seen a variety of skilled tasks partially taken over by machines, including translation, legal research, and journalism. Care work, entertainment, and other tasks requiring empathy, previously thought safe from automation, have also begun to be performed by robots.<sup id="cite_ref-10" class="reference"><a href="#cite_note-10"><span class="cite-bracket">&#91;</span>10<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-11" class="reference"><a href="#cite_note-11"><span class="cite-bracket">&#91;</span>11<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-12" class="reference"><a href="#cite_note-12"><span class="cite-bracket">&#91;</span>12<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-13" class="reference"><a href="#cite_note-13"><span class="cite-bracket">&#91;</span>13<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading4"><h4 id="Autonomous_cars">Autonomous cars</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=6" title="Edit section: Autonomous cars"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>An <a href="/wiki/Self-driving_car" title="Self-driving car">autonomous car</a> is a vehicle that is capable of sensing its environment and navigating without human input. Many such vehicles are being developed, but as of May 2017, automated cars permitted on public roads are not yet fully autonomous. They all require a human driver at the wheel who at a moment's notice can take control of the vehicle. Among the obstacles to widespread adoption of autonomous vehicles are concerns about the resulting loss of driving-related jobs in the road transport industry. On March 18, 2018, <a href="/wiki/Death_of_Elaine_Herzberg" title="Death of Elaine Herzberg">the first human was killed</a> by an autonomous vehicle in <a href="/wiki/Tempe,_Arizona" title="Tempe, Arizona">Tempe, Arizona</a> by an <a href="/wiki/Uber" title="Uber">Uber</a> self-driving car.<sup id="cite_ref-14" class="reference"><a href="#cite_note-14"><span class="cite-bracket">&#91;</span>14<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading4"><h4 id="AI-generated_content">AI-generated content</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=7" title="Edit section: AI-generated content"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236090951"><div role="note" class="hatnote navigation-not-searchable">See also: <a href="/wiki/Artificial_intelligence_art" title="Artificial intelligence art">Artificial intelligence art</a></div> <p>The use of automated content has become relevant since the technological advancements in artificial intelligence models such as <a href="/wiki/ChatGPT" title="ChatGPT">ChatGPT</a>, <a href="/wiki/DALL-E" title="DALL-E">DALL-E</a>, and <a href="/wiki/Stable_Diffusion" title="Stable Diffusion">Stable Diffusion</a>. In most cases, AI-generated content such as imagery, literature, and music are produced through text prompts and these AI models have been integrated into other creative programs. Artists are threatened by displacement from AI-generated content due to these models sampling from other creative works, producing results sometimes indiscernible to those of man-made content. This complication has become widespread enough to where other artists and programmers are creating software and utility programs to retaliate against these text-to-image models from giving accurate outputs. While some industries in the economy benefit from artificial intelligence through new jobs, this issue does not create new jobs and threatens replacement entirely. It has made public headlines in the media recently: In February 2024, <a href="/wiki/Willy%27s_Chocolate_Experience" title="Willy&#39;s Chocolate Experience">Willy's Chocolate Experience</a> in <a href="/wiki/Glasgow,_Scotland" class="mw-redirect" title="Glasgow, Scotland">Glasgow, Scotland</a> was an infamous children's event in which the imagery and scripts were created using artificial intelligence models to the dismay of children, parents, and actors involved. There is an ongoing lawsuit placed against <a href="/wiki/OpenAI" title="OpenAI">OpenAI</a> from <a href="/wiki/The_New_York_Times" title="The New York Times">The New York Times</a> where it is claimed that there is copyright infringement due to the sampling methods their artificial intelligence models use for their outputs.<sup id="cite_ref-15" class="reference"><a href="#cite_note-15"><span class="cite-bracket">&#91;</span>15<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-16" class="reference"><a href="#cite_note-16"><span class="cite-bracket">&#91;</span>16<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-17" class="reference"><a href="#cite_note-17"><span class="cite-bracket">&#91;</span>17<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-18" class="reference"><a href="#cite_note-18"><span class="cite-bracket">&#91;</span>18<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-19" class="reference"><a href="#cite_note-19"><span class="cite-bracket">&#91;</span>19<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Eradication">Eradication</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=8" title="Edit section: Eradication"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236090951"><div role="note" class="hatnote navigation-not-searchable">Main article: <a href="/wiki/Existential_risk_from_artificial_general_intelligence" class="mw-redirect" title="Existential risk from artificial general intelligence">Existential risk from artificial general intelligence</a></div> <p>Scientists such as <a href="/wiki/Stephen_Hawking" title="Stephen Hawking">Stephen Hawking</a> are confident that superhuman artificial intelligence is physically possible, stating "there is no physical law precluding particles from being organised in ways that perform even more advanced computations than the arrangements of particles in human brains".<sup id="cite_ref-20" class="reference"><a href="#cite_note-20"><span class="cite-bracket">&#91;</span>20<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-21" class="reference"><a href="#cite_note-21"><span class="cite-bracket">&#91;</span>21<span class="cite-bracket">&#93;</span></a></sup> Scholars like <a href="/wiki/Nick_Bostrom" title="Nick Bostrom">Nick Bostrom</a> debate how far off superhuman intelligence is, and whether it poses a risk to mankind. According to Bostrom, a superintelligent machine would not necessarily be motivated by the same <i>emotional</i> desire to collect power that often drives human beings but might rather treat power as a means toward attaining its ultimate goals; taking over the world would both increase its access to resources and help to prevent other agents from stopping the machine's plans. As an oversimplified example, a <a href="/wiki/Instrumental_convergence#Paperclip_maximizer" title="Instrumental convergence">paperclip maximizer</a> designed solely to create as many paperclips as possible would want to take over the world so that it can use all of the world's resources to create as many paperclips as possible, and, additionally, prevent humans from shutting it down or using those resources on things other than paperclips.<sup id="cite_ref-22" class="reference"><a href="#cite_note-22"><span class="cite-bracket">&#91;</span>22<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="In_fiction">In fiction</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=9" title="Edit section: In fiction"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236090951"><div role="note" class="hatnote navigation-not-searchable">Main article: <a href="/wiki/AI_takeovers_in_popular_culture" class="mw-redirect" title="AI takeovers in popular culture">AI takeovers in popular culture</a></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236090951"><div role="note" class="hatnote navigation-not-searchable">See also: <a href="/wiki/Artificial_intelligence_in_fiction" title="Artificial intelligence in fiction">Artificial intelligence in fiction</a> and <a href="/wiki/Self-replicating_machines_in_fiction" class="mw-redirect" title="Self-replicating machines in fiction">Self-replicating machines in fiction</a></div> <p>AI takeover is a common theme in <a href="/wiki/Science_fiction" title="Science fiction">science fiction</a>. Fictional scenarios typically differ vastly from those hypothesized by researchers in that they involve an active conflict between humans and an AI or robots with anthropomorphic motives who see them as a threat or otherwise have active desire to fight humans, as opposed to the researchers' concern of an AI that rapidly exterminates humans as a byproduct of pursuing its goals.<sup id="cite_ref-bostrom-superintelligence_23-0" class="reference"><a href="#cite_note-bostrom-superintelligence-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup> The idea is seen in <a href="/wiki/Karel_%C4%8Capek" title="Karel Čapek">Karel Čapek</a>'s <i><a href="/wiki/R.U.R." title="R.U.R.">R.U.R.</a></i>, which introduced the word <i>robot</i> in 1921,<sup id="cite_ref-24" class="reference"><a href="#cite_note-24"><span class="cite-bracket">&#91;</span>24<span class="cite-bracket">&#93;</span></a></sup> and can be glimpsed in <a href="/wiki/Mary_Shelley" title="Mary Shelley">Mary Shelley</a>'s <i><a href="/wiki/Frankenstein" title="Frankenstein">Frankenstein</a></i> (published in 1818), as Victor ponders whether, if he grants <a href="/wiki/Frankenstein%27s_monster" title="Frankenstein&#39;s monster">his monster's</a> request and makes him a wife, they would reproduce and their kind would destroy humanity.<sup id="cite_ref-25" class="reference"><a href="#cite_note-25"><span class="cite-bracket">&#91;</span>25<span class="cite-bracket">&#93;</span></a></sup> </p><p>According to <a href="/wiki/Toby_Ord" title="Toby Ord">Toby Ord</a>, the idea that an AI takeover requires robots is a misconception driven by the media and Hollywood. He argues that the most damaging humans in history were not physically the strongest, but that they used words instead to convince people and gain control of large parts of the world. He writes that a <i>sufficiently</i> intelligent AI with an access to the internet could scatter backup copies of itself, gather financial and human resources (via cyberattacks or blackmails), persuade people on a large scale, and exploit societal vulnerabilities that are too subtle for humans to anticipate.<sup id="cite_ref-26" class="reference"><a href="#cite_note-26"><span class="cite-bracket">&#91;</span>26<span class="cite-bracket">&#93;</span></a></sup> </p><p>The word "robot" from <i>R.U.R.</i> comes from the Czech word, <i>robota</i>, meaning laborer or <a href="/wiki/Serf" class="mw-redirect" title="Serf">serf</a>. The 1920 play was a protest against the rapid growth of technology, featuring manufactured "robots" with increasing capabilities who eventually revolt.<sup id="cite_ref-surgery_27-0" class="reference"><a href="#cite_note-surgery-27"><span class="cite-bracket">&#91;</span>27<span class="cite-bracket">&#93;</span></a></sup> <a href="/wiki/HAL_9000" title="HAL 9000">HAL 9000</a> (1968) and the original <a href="/wiki/Terminator_(character)" title="Terminator (character)">Terminator</a> (1984) are two iconic examples of hostile AI in pop culture.<sup id="cite_ref-28" class="reference"><a href="#cite_note-28"><span class="cite-bracket">&#91;</span>28<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Contributing_factors">Contributing factors</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=10" title="Edit section: Contributing factors"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <div class="mw-heading mw-heading3"><h3 id="Advantages_of_superhuman_intelligence_over_humans">Advantages of superhuman intelligence over humans</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=11" title="Edit section: Advantages of superhuman intelligence over humans"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p><a href="/wiki/Nick_Bostrom" title="Nick Bostrom">Nick Bostrom</a> and others have expressed concern that an AI with the abilities of a competent artificial intelligence researcher would be able to modify its own source code and increase its own intelligence. If its self-reprogramming leads to getting even better at being able to reprogram itself, the result could be a recursive <a href="/wiki/Intelligence_explosion" class="mw-redirect" title="Intelligence explosion">intelligence explosion</a> in which it would rapidly leave human intelligence far behind. Bostrom defines a superintelligence as "any intellect that greatly exceeds the cognitive performance of humans in virtually all domains of interest", and enumerates some advantages a superintelligence would have if it chose to compete against humans:<sup id="cite_ref-bostrom-superintelligence_23-1" class="reference"><a href="#cite_note-bostrom-superintelligence-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-BabcockKrámar2019_29-0" class="reference"><a href="#cite_note-BabcockKrámar2019-29"><span class="cite-bracket">&#91;</span>29<span class="cite-bracket">&#93;</span></a></sup> </p> <ul><li>Technology research: A machine with superhuman scientific research abilities would be able to beat the human research community to milestones such as nanotechnology or advanced biotechnology</li> <li><a href="/wiki/Strategy" title="Strategy">Strategizing</a>: A superintelligence might be able to simply outwit human opposition</li> <li>Social manipulation: A superintelligence might be able to recruit human support,<sup id="cite_ref-bostrom-superintelligence_23-2" class="reference"><a href="#cite_note-bostrom-superintelligence-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup> or covertly incite a war between humans<sup id="cite_ref-30" class="reference"><a href="#cite_note-30"><span class="cite-bracket">&#91;</span>30<span class="cite-bracket">&#93;</span></a></sup></li> <li>Economic productivity: As long as a copy of the AI could produce more economic wealth than the cost of its hardware, individual humans would have an incentive to voluntarily allow the <a href="/wiki/Artificial_General_Intelligence" class="mw-redirect" title="Artificial General Intelligence">Artificial General Intelligence</a> (AGI) to run a copy of itself on their systems</li> <li>Hacking: A superintelligence could find new exploits in computers connected to the Internet, and spread copies of itself onto those systems, or might steal money to finance its plans</li></ul> <div class="mw-heading mw-heading4"><h4 id="Sources_of_AI_advantage">Sources of AI advantage</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=12" title="Edit section: Sources of AI advantage"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>According to Bostrom, a computer program that faithfully emulates a human brain, or that runs algorithms that are as powerful as the human brain's algorithms, could still become a "speed superintelligence" if it can think orders of magnitude faster than a human, due to being made of silicon rather than flesh, or due to optimization increasing the speed of the AGI. Biological neurons operate at about 200&#160;Hz, whereas a modern microprocessor operates at a speed of about 2,000,000,000&#160;Hz. Human axons carry action potentials at around 120&#160;m/s, whereas computer signals travel near the speed of light.<sup id="cite_ref-bostrom-superintelligence_23-3" class="reference"><a href="#cite_note-bostrom-superintelligence-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup> </p><p>A network of human-level intelligences designed to network together and share complex thoughts and memories seamlessly, able to collectively work as a giant unified team without friction, or consisting of trillions of human-level intelligences, would become a "collective superintelligence".<sup id="cite_ref-bostrom-superintelligence_23-4" class="reference"><a href="#cite_note-bostrom-superintelligence-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup> </p><p>More broadly, any number of qualitative improvements to a human-level AGI could result in a "quality superintelligence", perhaps resulting in an AGI as far above us in intelligence as humans are above apes. The number of neurons in a human brain is limited by cranial volume and metabolic constraints, while the number of processors in a supercomputer can be indefinitely expanded. An AGI need not be limited by human constraints on <a href="/wiki/Working_memory" title="Working memory">working memory</a>, and might therefore be able to intuitively grasp more complex relationships than humans can. An AGI with specialized cognitive support for engineering or computer programming would have an advantage in these fields, compared with humans who evolved no specialized mental modules to specifically deal with those domains. Unlike humans, an AGI can spawn copies of itself and tinker with its copies' source code to attempt to further improve its algorithms.<sup id="cite_ref-bostrom-superintelligence_23-5" class="reference"><a href="#cite_note-bostrom-superintelligence-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading3"><h3 id="Possibility_of_unfriendly_AI_preceding_friendly_AI">Possibility of unfriendly AI preceding friendly AI</h3><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=13" title="Edit section: Possibility of unfriendly AI preceding friendly AI"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <div class="mw-heading mw-heading4"><h4 id="Is_strong_AI_inherently_dangerous?"><span id="Is_strong_AI_inherently_dangerous.3F"></span>Is strong AI inherently dangerous?</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=14" title="Edit section: Is strong AI inherently dangerous?"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236090951"><div role="note" class="hatnote navigation-not-searchable">Main article: <a href="/wiki/AI_alignment" title="AI alignment">AI alignment</a></div> <p>A significant problem is that unfriendly artificial intelligence is likely to be much easier to create than friendly AI. While both require large advances in recursive optimisation process design, friendly AI also requires the ability to make goal structures invariant under self-improvement (or the AI could transform itself into something unfriendly) and a goal structure that aligns with human values and does not undergo <a href="/wiki/Instrumental_convergence" title="Instrumental convergence">instrumental convergence</a> in ways that may automatically destroy the entire human race. An unfriendly AI, on the other hand, can optimize for an arbitrary goal structure, which does not need to be invariant under self-modification.<sup id="cite_ref-singinst12_31-0" class="reference"><a href="#cite_note-singinst12-31"><span class="cite-bracket">&#91;</span>31<span class="cite-bracket">&#93;</span></a></sup> </p><p>The sheer complexity of human value systems makes it very difficult to make AI's motivations human-friendly.<sup id="cite_ref-bostrom-superintelligence_23-6" class="reference"><a href="#cite_note-bostrom-superintelligence-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-Muehlhauser,_Luke_2012_32-0" class="reference"><a href="#cite_note-Muehlhauser,_Luke_2012-32"><span class="cite-bracket">&#91;</span>32<span class="cite-bracket">&#93;</span></a></sup> Unless moral philosophy provides us with a flawless ethical theory, an AI's utility function could allow for many potentially harmful scenarios that conform with a given ethical framework but not "common sense". According to <a href="/wiki/Eliezer_Yudkowsky" title="Eliezer Yudkowsky">Eliezer Yudkowsky</a>, there is little reason to suppose that an artificially designed mind would have such an adaptation.<sup id="cite_ref-Yudkowsky2011_33-0" class="reference"><a href="#cite_note-Yudkowsky2011-33"><span class="cite-bracket">&#91;</span>33<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading4"><h4 id="Odds_of_conflict">Odds of conflict</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=15" title="Edit section: Odds of conflict"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Many scholars, including evolutionary psychologist <a href="/wiki/Steven_Pinker" title="Steven Pinker">Steven Pinker</a>, argue that a superintelligent machine is likely to coexist peacefully with humans.<sup id="cite_ref-pinker_now_34-0" class="reference"><a href="#cite_note-pinker_now-34"><span class="cite-bracket">&#91;</span>34<span class="cite-bracket">&#93;</span></a></sup> </p><p>The fear of cybernetic revolt is often based on interpretations of humanity's history, which is rife with incidents of enslavement and genocide. Such fears stem from a belief that competitiveness and aggression are necessary in any intelligent being's goal system. However, such human competitiveness stems from the evolutionary background to our intelligence, where the survival and reproduction of genes in the face of human and non-human competitors was the central goal.<sup id="cite_ref-35" class="reference"><a href="#cite_note-35"><span class="cite-bracket">&#91;</span>35<span class="cite-bracket">&#93;</span></a></sup> According to AI researcher <a href="/wiki/Steve_Omohundro" title="Steve Omohundro">Steve Omohundro</a>, an arbitrary intelligence could have arbitrary goals: there is no particular reason that an artificially intelligent machine (not sharing humanity's evolutionary context) would be hostile—or friendly—unless its creator programs it to be such and it is not inclined or capable of modifying its programming. But the question remains: what would happen if AI systems could interact and evolve (evolution in this context means self-modification or selection and reproduction) and need to compete over resources—would that create goals of self-preservation? AI's goal of self-preservation could be in conflict with some goals of humans.<sup id="cite_ref-36" class="reference"><a href="#cite_note-36"><span class="cite-bracket">&#91;</span>36<span class="cite-bracket">&#93;</span></a></sup> </p><p>Many scholars dispute the likelihood of unanticipated cybernetic revolt as depicted in science fiction such as <i><a href="/wiki/The_Matrix" title="The Matrix">The Matrix</a></i>, arguing that it is more likely that any artificial intelligence powerful enough to threaten humanity would probably be programmed not to attack it. Pinker acknowledges the possibility of deliberate "bad actors", but states that in the absence of bad actors, unanticipated accidents are not a significant threat; Pinker argues that a culture of engineering safety will prevent AI researchers from accidentally unleashing malign superintelligence.<sup id="cite_ref-pinker_now_34-1" class="reference"><a href="#cite_note-pinker_now-34"><span class="cite-bracket">&#91;</span>34<span class="cite-bracket">&#93;</span></a></sup> In contrast, Yudkowsky argues that humanity is less likely to be threatened by deliberately aggressive AIs than by AIs which were programmed such that their <a href="/wiki/Unintended_consequence" class="mw-redirect" title="Unintended consequence">goals are unintentionally incompatible</a> with human survival or well-being (as in the film <i><a href="/wiki/I,_Robot_(film)" title="I, Robot (film)">I, Robot</a></i> and in the short story "<a href="/wiki/The_Evitable_Conflict" title="The Evitable Conflict">The Evitable Conflict</a>"). Omohundro suggests that present-day automation systems are not <a href="/wiki/AI_safety" title="AI safety">designed for safety</a> and that AIs may blindly optimize narrow <a href="/wiki/Utility" title="Utility">utility</a> functions (say, playing chess at all costs), leading them to seek self-preservation and elimination of obstacles, including humans who might turn them off.<sup id="cite_ref-Tucker2014_37-0" class="reference"><a href="#cite_note-Tucker2014-37"><span class="cite-bracket">&#91;</span>37<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading4"><h4 id="Precautions">Precautions</h4><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=16" title="Edit section: Precautions"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236090951"><div role="note" class="hatnote navigation-not-searchable">Main article: <a href="/wiki/AI_control_problem" class="mw-redirect" title="AI control problem">AI control problem</a></div> <p>The <b>AI control problem</b> is the issue of how to build a <a href="/wiki/Superintelligence" title="Superintelligence">superintelligent</a> agent that will aid its creators, while avoiding inadvertently building a superintelligence that will harm its creators.<sup id="cite_ref-38" class="reference"><a href="#cite_note-38"><span class="cite-bracket">&#91;</span>38<span class="cite-bracket">&#93;</span></a></sup> Some scholars argue that solutions to the control problem might also find applications in existing non-superintelligent AI.<sup id="cite_ref-bbc-google_39-0" class="reference"><a href="#cite_note-bbc-google-39"><span class="cite-bracket">&#91;</span>39<span class="cite-bracket">&#93;</span></a></sup> </p><p>Major approaches to the control problem include <i>alignment</i>, which aims to align AI goal systems with human values, and <i>capability control</i>, which aims to reduce an AI system's capacity to harm humans or gain control. An example of "capability control" is to research whether a superintelligence AI could be successfully confined in an "<a href="/wiki/AI_box" class="mw-redirect" title="AI box">AI box</a>". According to Bostrom, such capability control proposals are not reliable or sufficient to solve the control problem in the long term, but may potentially act as valuable supplements to alignment efforts.<sup id="cite_ref-bostrom-superintelligence_23-7" class="reference"><a href="#cite_note-bostrom-superintelligence-23"><span class="cite-bracket">&#91;</span>23<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Warnings">Warnings</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=17" title="Edit section: Warnings"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <p>Physicist <a href="/wiki/Stephen_Hawking" title="Stephen Hawking">Stephen Hawking</a>, <a href="/wiki/Microsoft" title="Microsoft">Microsoft</a> founder <a href="/wiki/Bill_Gates" title="Bill Gates">Bill Gates</a>, and <a href="/wiki/SpaceX" title="SpaceX">SpaceX</a> founder <a href="/wiki/Elon_Musk" title="Elon Musk">Elon Musk</a> have expressed concerns about the possibility that AI could develop to the point that humans could not control it, with Hawking theorizing that this could "spell the end of the human race".<sup id="cite_ref-40" class="reference"><a href="#cite_note-40"><span class="cite-bracket">&#91;</span>40<span class="cite-bracket">&#93;</span></a></sup> Stephen Hawking said in 2014 that "Success in creating AI would be the biggest event in human history. Unfortunately, it might also be the last, unless we learn how to avoid the risks." Hawking believed that in the coming decades, AI could offer "incalculable benefits and risks" such as "technology outsmarting <a href="/wiki/Financial_market" title="Financial market">financial markets</a>, out-inventing human researchers, out-manipulating human leaders, and developing weapons we cannot even understand." In January 2015, <a href="/wiki/Nick_Bostrom" title="Nick Bostrom">Nick Bostrom</a> joined Stephen Hawking, <a href="/wiki/Max_Tegmark" title="Max Tegmark">Max Tegmark</a>, Elon Musk, Lord <a href="/wiki/Martin_Rees,_Baron_Rees_of_Ludlow" class="mw-redirect" title="Martin Rees, Baron Rees of Ludlow">Martin Rees</a>, <a href="/wiki/Jaan_Tallinn" title="Jaan Tallinn">Jaan Tallinn</a>, and numerous AI researchers in signing the <a href="/wiki/Future_of_Life_Institute" title="Future of Life Institute">Future of Life Institute</a>'s open letter speaking to the potential risks and benefits associated with <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">artificial intelligence</a>. The signatories "believe that research on how to make AI systems robust and beneficial is both important and timely, and that there are concrete research directions that can be pursued today."<sup id="cite_ref-41" class="reference"><a href="#cite_note-41"><span class="cite-bracket">&#91;</span>41<span class="cite-bracket">&#93;</span></a></sup><sup id="cite_ref-42" class="reference"><a href="#cite_note-42"><span class="cite-bracket">&#91;</span>42<span class="cite-bracket">&#93;</span></a></sup> </p><p>Arthur C. Clarke's Odyssey series and Charles Stross's Accelerando relate to humanity's narcissistic injuries in the face of powerful artificial intelligences threatening humanity's self-perception.<sup id="cite_ref-43" class="reference"><a href="#cite_note-43"><span class="cite-bracket">&#91;</span>43<span class="cite-bracket">&#93;</span></a></sup> </p> <div class="mw-heading mw-heading2"><h2 id="Prevention_through_AI_alignment">Prevention through AI alignment</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=18" title="Edit section: Prevention through AI alignment"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <div class="excerpt-block"><style data-mw-deduplicate="TemplateStyles:r1066933788">.mw-parser-output .excerpt-hat .mw-editsection-like{font-style:normal}</style><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236090951"><div role="note" class="hatnote navigation-not-searchable dablink excerpt-hat selfref">This paragraph is an excerpt from <a href="/wiki/AI_alignment" title="AI alignment">AI alignment</a>.<span class="mw-editsection-like plainlinks"><span class="mw-editsection-bracket">[</span><a class="external text" href="https://en.wikipedia.org/w/index.php?title=AI_alignment&amp;action=edit">edit</a><span class="mw-editsection-bracket">]</span></span></div><div class="excerpt"> In the field of <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">artificial intelligence</a> (AI), <a href="/wiki/AI_alignment" title="AI alignment">AI alignment</a> aims to steer AI systems toward a person's or group's intended goals, preferences, and ethical principles. An AI system is considered <i>aligned</i> if it advances the intended objectives. A <i>misaligned</i> AI system pursues unintended objectives.<sup id="cite_ref-AI_alignment_aima4_44-0" class="reference"><a href="#cite_note-AI_alignment_aima4-44"><span class="cite-bracket">&#91;</span>44<span class="cite-bracket">&#93;</span></a></sup></div></div> <div class="mw-heading mw-heading2"><h2 id="See_also">See also</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=19" title="Edit section: See also"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1184024115">.mw-parser-output .div-col{margin-top:0.3em;column-width:30em}.mw-parser-output .div-col-small{font-size:90%}.mw-parser-output .div-col-rules{column-rule:1px solid #aaa}.mw-parser-output .div-col dl,.mw-parser-output .div-col ol,.mw-parser-output .div-col ul{margin-top:0}.mw-parser-output .div-col li,.mw-parser-output .div-col dd{page-break-inside:avoid;break-inside:avoid-column}</style><div class="div-col" style="column-width: 30em;"> <ul><li><a href="/wiki/Philosophy_of_artificial_intelligence" title="Philosophy of artificial intelligence">Philosophy of artificial intelligence</a></li> <li><a href="/wiki/Artificial_intelligence_arms_race" title="Artificial intelligence arms race">Artificial intelligence arms race</a></li> <li><a href="/wiki/Autonomous_robot" title="Autonomous robot">Autonomous robot</a> <ul><li><a href="/wiki/Industrial_robot" title="Industrial robot">Industrial robot</a></li> <li><a href="/wiki/Mobile_robot" title="Mobile robot">Mobile robot</a></li> <li><a href="/wiki/Self-replicating_machine" title="Self-replicating machine">Self-replicating machine</a></li></ul></li> <li><a href="/wiki/Cyberocracy" title="Cyberocracy">Cyberocracy</a></li> <li><a href="/wiki/Effective_altruism" title="Effective altruism">Effective altruism</a></li> <li><a href="/wiki/Existential_risk_from_artificial_general_intelligence" class="mw-redirect" title="Existential risk from artificial general intelligence">Existential risk from artificial general intelligence</a></li> <li><a href="/wiki/Future_of_Humanity_Institute" title="Future of Humanity Institute">Future of Humanity Institute</a></li> <li><a href="/wiki/Global_catastrophic_risk" title="Global catastrophic risk">Global catastrophic risk</a> (existential risk)</li> <li><a href="/wiki/Government_by_algorithm" title="Government by algorithm">Government by algorithm</a></li> <li><a href="/wiki/Human_extinction" title="Human extinction">Human extinction</a></li> <li><a href="/wiki/Machine_ethics" title="Machine ethics">Machine ethics</a></li> <li><a href="/wiki/Machine_learning" title="Machine learning">Machine learning</a>/<a href="/wiki/Deep_learning" title="Deep learning">Deep learning</a></li> <li><a href="/wiki/Transhumanism" title="Transhumanism">Transhumanism</a></li> <li><a href="/wiki/Self-replication" title="Self-replication">Self-replication</a></li> <li><a href="/wiki/Technophobia" title="Technophobia">Technophobia</a></li> <li><a href="/wiki/Technological_singularity" title="Technological singularity">Technological singularity</a> <ul><li><a href="/wiki/Intelligence_explosion" class="mw-redirect" title="Intelligence explosion">Intelligence explosion</a></li> <li><a href="/wiki/Superintelligence" title="Superintelligence">Superintelligence</a> <ul><li><i><a href="/wiki/Superintelligence:_Paths,_Dangers,_Strategies" title="Superintelligence: Paths, Dangers, Strategies">Superintelligence: Paths, Dangers, Strategies</a></i></li></ul></li></ul></li></ul> </div> <div class="mw-heading mw-heading2"><h2 id="Notes">Notes</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=20" title="Edit section: Notes"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <style data-mw-deduplicate="TemplateStyles:r1239543626">.mw-parser-output .reflist{margin-bottom:0.5em;list-style-type:decimal}@media screen{.mw-parser-output .reflist{font-size:90%}}.mw-parser-output .reflist .references{font-size:100%;margin-bottom:0;list-style-type:inherit}.mw-parser-output .reflist-columns-2{column-width:30em}.mw-parser-output .reflist-columns-3{column-width:25em}.mw-parser-output .reflist-columns{margin-top:0.3em}.mw-parser-output .reflist-columns ol{margin-top:0}.mw-parser-output .reflist-columns li{page-break-inside:avoid;break-inside:avoid-column}.mw-parser-output .reflist-upper-alpha{list-style-type:upper-alpha}.mw-parser-output .reflist-upper-roman{list-style-type:upper-roman}.mw-parser-output .reflist-lower-alpha{list-style-type:lower-alpha}.mw-parser-output .reflist-lower-greek{list-style-type:lower-greek}.mw-parser-output .reflist-lower-roman{list-style-type:lower-roman}</style><div class="reflist reflist-lower-alpha"> </div> <div class="mw-heading mw-heading2"><h2 id="References">References</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=21" title="Edit section: References"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1239543626"><div class="reflist"> <div class="mw-references-wrap mw-references-columns"><ol class="references"> <li id="cite_note-1"><span class="mw-cite-backlink"><b><a href="#cite_ref-1">^</a></b></span> <span class="reference-text"><style data-mw-deduplicate="TemplateStyles:r1238218222">.mw-parser-output cite.citation{font-style:inherit;word-wrap:break-word}.mw-parser-output .citation q{quotes:"\"""\"""'""'"}.mw-parser-output .citation:target{background-color:rgba(0,127,255,0.133)}.mw-parser-output .id-lock-free.id-lock-free a{background:url("//upload.wikimedia.org/wikipedia/commons/6/65/Lock-green.svg")right 0.1em center/9px no-repeat}.mw-parser-output .id-lock-limited.id-lock-limited a,.mw-parser-output .id-lock-registration.id-lock-registration a{background:url("//upload.wikimedia.org/wikipedia/commons/d/d6/Lock-gray-alt-2.svg")right 0.1em center/9px no-repeat}.mw-parser-output .id-lock-subscription.id-lock-subscription a{background:url("//upload.wikimedia.org/wikipedia/commons/a/aa/Lock-red-alt-2.svg")right 0.1em center/9px no-repeat}.mw-parser-output .cs1-ws-icon a{background:url("//upload.wikimedia.org/wikipedia/commons/4/4c/Wikisource-logo.svg")right 0.1em center/12px no-repeat}body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-free a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-limited a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-registration a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .id-lock-subscription a,body:not(.skin-timeless):not(.skin-minerva) .mw-parser-output .cs1-ws-icon a{background-size:contain;padding:0 1em 0 0}.mw-parser-output .cs1-code{color:inherit;background:inherit;border:none;padding:inherit}.mw-parser-output .cs1-hidden-error{display:none;color:var(--color-error,#d33)}.mw-parser-output .cs1-visible-error{color:var(--color-error,#d33)}.mw-parser-output .cs1-maint{display:none;color:#085;margin-left:0.3em}.mw-parser-output .cs1-kern-left{padding-left:0.2em}.mw-parser-output .cs1-kern-right{padding-right:0.2em}.mw-parser-output .citation .mw-selflink{font-weight:inherit}@media screen{.mw-parser-output .cs1-format{font-size:95%}html.skin-theme-clientpref-night .mw-parser-output .cs1-maint{color:#18911f}}@media screen and (prefers-color-scheme:dark){html.skin-theme-clientpref-os .mw-parser-output .cs1-maint{color:#18911f}}</style><cite id="CITEREFLewis2015" class="citation web cs1">Lewis, Tanya (2015-01-12). <a rel="nofollow" class="external text" href="http://www.livescience.com/49419-artificial-intelligence-dangers-letter.html">"<i>Don't Let Artificial Intelligence Take Over, Top Scientists Warn</i>"</a>. <i><a href="/wiki/LiveScience" class="mw-redirect" title="LiveScience">LiveScience</a></i>. <a href="/wiki/Purch" class="mw-redirect" title="Purch">Purch</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20180308100411/https://www.livescience.com/49419-artificial-intelligence-dangers-letter.html">Archived</a> from the original on 2018-03-08<span class="reference-accessdate">. Retrieved <span class="nowrap">October 20,</span> 2015</span>. <q>Stephen Hawking, Elon Musk and dozens of other top scientists and technology leaders have signed a letter warning of the potential dangers of developing artificial intelligence (AI).</q></cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=LiveScience&amp;rft.atitle=Don%27t+Let+Artificial+Intelligence+Take+Over%2C+Top+Scientists+Warn&amp;rft.date=2015-01-12&amp;rft.aulast=Lewis&amp;rft.aufirst=Tanya&amp;rft_id=http%3A%2F%2Fwww.livescience.com%2F49419-artificial-intelligence-dangers-letter.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-2"><span class="mw-cite-backlink"><b><a href="#cite_ref-2">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLee2017" class="citation web cs1">Lee, Kai-Fu (2017-06-24). <a rel="nofollow" class="external text" href="https://www.nytimes.com/2017/06/24/opinion/sunday/artificial-intelligence-economic-inequality.html">"The Real Threat of Artificial Intelligence"</a>. <i><a href="/wiki/The_New_York_Times" title="The New York Times">The New York Times</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200417183307/https://www.nytimes.com/2017/06/24/opinion/sunday/artificial-intelligence-economic-inequality.html">Archived</a> from the original on 2020-04-17<span class="reference-accessdate">. Retrieved <span class="nowrap">2017-08-15</span></span>. <q>These tools can outperform human beings at a given task. This kind of A.I. is spreading to thousands of domains, and as it does, it will eliminate many jobs.</q></cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=The+New+York+Times&amp;rft.atitle=The+Real+Threat+of+Artificial+Intelligence&amp;rft.date=2017-06-24&amp;rft.aulast=Lee&amp;rft.aufirst=Kai-Fu&amp;rft_id=https%3A%2F%2Fwww.nytimes.com%2F2017%2F06%2F24%2Fopinion%2Fsunday%2Fartificial-intelligence-economic-inequality.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-3"><span class="mw-cite-backlink"><b><a href="#cite_ref-3">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFLarson2017" class="citation web cs1">Larson, Nina (2017-06-08). <a rel="nofollow" class="external text" href="https://phys.org/news/2017-06-ai-good-world-ultra-lifelike-robot.html">"AI 'good for the world'... says ultra-lifelike robot"</a>. <i><a href="/wiki/Phys.org" title="Phys.org">Phys.org</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200306021915/https://phys.org/news/2017-06-ai-good-world-ultra-lifelike-robot.html">Archived</a> from the original on 2020-03-06<span class="reference-accessdate">. Retrieved <span class="nowrap">2017-08-15</span></span>. <q>Among the feared consequences of the rise of the robots is the growing impact they will have on human jobs and economies.</q></cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Phys.org&amp;rft.atitle=AI+%27good+for+the+world%27...+says+ultra-lifelike+robot&amp;rft.date=2017-06-08&amp;rft.aulast=Larson&amp;rft.aufirst=Nina&amp;rft_id=https%3A%2F%2Fphys.org%2Fnews%2F2017-06-ai-good-world-ultra-lifelike-robot.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-4"><span class="mw-cite-backlink"><b><a href="#cite_ref-4">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSantini2016" class="citation web cs1">Santini, Jean-Louis (2016-02-14). <a rel="nofollow" class="external text" href="https://phys.org/news/2016-02-intelligent-robots-threaten-millions-jobs.html#nRlv">"Intelligent robots threaten millions of jobs"</a>. <i><a href="/wiki/Phys.org" title="Phys.org">Phys.org</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190101014340/https://phys.org/news/2016-02-intelligent-robots-threaten-millions-jobs.html#nRlv">Archived</a> from the original on 2019-01-01<span class="reference-accessdate">. Retrieved <span class="nowrap">2017-08-15</span></span>. <q><span class="cs1-kern-left"></span>"We are approaching a time when machines will be able to outperform humans at almost any task," said Moshe Vardi, director of the Institute for Information Technology at Rice University in Texas.</q></cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Phys.org&amp;rft.atitle=Intelligent+robots+threaten+millions+of+jobs&amp;rft.date=2016-02-14&amp;rft.aulast=Santini&amp;rft.aufirst=Jean-Louis&amp;rft_id=https%3A%2F%2Fphys.org%2Fnews%2F2016-02-intelligent-robots-threaten-millions-jobs.html%23nRlv&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-5"><span class="mw-cite-backlink"><b><a href="#cite_ref-5">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWilliams-Grut2016" class="citation web cs1">Williams-Grut, Oscar (2016-02-15). <a rel="nofollow" class="external text" href="http://www.businessinsider.com/robots-will-steal-your-job-citi-ai-increase-unemployment-inequality-2016-2?r=UK&amp;IR=T">"Robots will steal your job: How AI could increase unemployment and inequality"</a>. <i><a href="/wiki/Businessinsider.com" class="mw-redirect" title="Businessinsider.com">Businessinsider.com</a></i>. <a href="/wiki/Business_Insider" title="Business Insider">Business Insider</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20170816061548/http://www.businessinsider.com/robots-will-steal-your-job-citi-ai-increase-unemployment-inequality-2016-2?r=UK&amp;IR=T">Archived</a> from the original on 2017-08-16<span class="reference-accessdate">. Retrieved <span class="nowrap">2017-08-15</span></span>. <q>Top computer scientists in the US warned that the rise of artificial intelligence (AI) and robots in the workplace could cause mass unemployment and dislocated economies, rather than simply unlocking productivity gains and freeing us all up to watch TV and play sports.</q></cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=Businessinsider.com&amp;rft.atitle=Robots+will+steal+your+job%3A+How+AI+could+increase+unemployment+and+inequality&amp;rft.date=2016-02-15&amp;rft.aulast=Williams-Grut&amp;rft.aufirst=Oscar&amp;rft_id=http%3A%2F%2Fwww.businessinsider.com%2Frobots-will-steal-your-job-citi-ai-increase-unemployment-inequality-2016-2%3Fr%3DUK%26IR%3DT&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-6"><span class="mw-cite-backlink"><b><a href="#cite_ref-6">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://web.archive.org/web/20171018073852/http://www.leanstaff.co.uk/robot-apocalypse/">"How can SMEs prepare for the rise of the robots?"</a>. <i>LeanStaff</i>. 2017-10-17. Archived from <a rel="nofollow" class="external text" href="http://www.leanstaff.co.uk/robot-apocalypse/">the original</a> on 2017-10-18<span class="reference-accessdate">. Retrieved <span class="nowrap">2017-10-17</span></span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=LeanStaff&amp;rft.atitle=How+can+SMEs+prepare+for+the+rise+of+the+robots%3F&amp;rft.date=2017-10-17&amp;rft_id=http%3A%2F%2Fwww.leanstaff.co.uk%2Frobot-apocalypse%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-7"><span class="mw-cite-backlink"><b><a href="#cite_ref-7">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHassan_SoueidanShoghari2024" class="citation journal cs1">Hassan Soueidan, Mohamad; Shoghari, Rodwan (2024-05-09). <a rel="nofollow" class="external text" href="https://techniumscience.com/index.php/socialsciences/article/view/10917">"The Impact of Artificial Intelligence on Job Loss: Risks for Governments"</a>. <i>Technium Social Sciences Journal</i>. <b>57</b>: 206–223. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.47577%2Ftssj.v57i1.10917">10.47577/tssj.v57i1.10917</a></span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Technium+Social+Sciences+Journal&amp;rft.atitle=The+Impact+of+Artificial+Intelligence+on+Job+Loss%3A+Risks+for+Governments&amp;rft.volume=57&amp;rft.pages=206-223&amp;rft.date=2024-05-09&amp;rft_id=info%3Adoi%2F10.47577%2Ftssj.v57i1.10917&amp;rft.aulast=Hassan+Soueidan&amp;rft.aufirst=Mohamad&amp;rft.au=Shoghari%2C+Rodwan&amp;rft_id=https%3A%2F%2Ftechniumscience.com%2Findex.php%2Fsocialsciences%2Farticle%2Fview%2F10917&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-8"><span class="mw-cite-backlink"><b><a href="#cite_ref-8">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFFrank2019" class="citation journal cs1">Frank, Morgan (2019-03-25). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6452673">"Toward understanding the impact of artificial intelligence on labor"</a>. <i>Proceedings of the National Academy of Sciences of the United States of America</i>. <b>116</b> (14): 6531–6539. <a href="/wiki/Bibcode_(identifier)" class="mw-redirect" title="Bibcode (identifier)">Bibcode</a>:<a rel="nofollow" class="external text" href="https://ui.adsabs.harvard.edu/abs/2019PNAS..116.6531F">2019PNAS..116.6531F</a>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1073%2Fpnas.1900949116">10.1073/pnas.1900949116</a></span>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a>&#160;<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6452673">6452673</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a>&#160;<a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/30910965">30910965</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Proceedings+of+the+National+Academy+of+Sciences+of+the+United+States+of+America&amp;rft.atitle=Toward+understanding+the+impact+of+artificial+intelligence+on+labor&amp;rft.volume=116&amp;rft.issue=14&amp;rft.pages=6531-6539&amp;rft.date=2019-03-25&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC6452673%23id-name%3DPMC&amp;rft_id=info%3Apmid%2F30910965&amp;rft_id=info%3Adoi%2F10.1073%2Fpnas.1900949116&amp;rft_id=info%3Abibcode%2F2019PNAS..116.6531F&amp;rft.aulast=Frank&amp;rft.aufirst=Morgan&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC6452673&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-9"><span class="mw-cite-backlink"><b><a href="#cite_ref-9">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBond2017" class="citation book cs1">Bond, Dave (2017). <i>Artificial Intelligence</i>. pp.&#160;67–69.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Artificial+Intelligence&amp;rft.pages=67-69&amp;rft.date=2017&amp;rft.aulast=Bond&amp;rft.aufirst=Dave&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-10"><span class="mw-cite-backlink"><b><a href="#cite_ref-10">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSkidelsky2013" class="citation news cs1"><a href="/wiki/Robert_Skidelsky,_Baron_Skidelsky" class="mw-redirect" title="Robert Skidelsky, Baron Skidelsky">Skidelsky, Robert</a> (2013-02-19). <a rel="nofollow" class="external text" href="https://www.theguardian.com/business/2013/feb/19/rise-of-robots-future-of-work">"Rise of the robots: what will the future of work look like?"</a>. <i>The Guardian</i>. London, England. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190403203821/https://www.theguardian.com/business/2013/feb/19/rise-of-robots-future-of-work">Archived</a> from the original on 2019-04-03<span class="reference-accessdate">. Retrieved <span class="nowrap">14 July</span> 2015</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Guardian&amp;rft.atitle=Rise+of+the+robots%3A+what+will+the+future+of+work+look+like%3F&amp;rft.date=2013-02-19&amp;rft.aulast=Skidelsky&amp;rft.aufirst=Robert&amp;rft_id=https%3A%2F%2Fwww.theguardian.com%2Fbusiness%2F2013%2Ffeb%2F19%2Frise-of-robots-future-of-work&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-11"><span class="mw-cite-backlink"><b><a href="#cite_ref-11">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBria2016" class="citation web cs1">Bria, Francesca (February 2016). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20160517215840/https://www.opendemocracy.net/can-europe-make-it/francesca-bria/robot-economy-full-automation-work-future">"The robot economy may already have arrived"</a>. <a href="/wiki/OpenDemocracy" title="OpenDemocracy">openDemocracy</a>. Archived from <a rel="nofollow" class="external text" href="https://www.opendemocracy.net/can-europe-make-it/francesca-bria/robot-economy-full-automation-work-future">the original</a> on 17 May 2016<span class="reference-accessdate">. Retrieved <span class="nowrap">20 May</span> 2016</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=The+robot+economy+may+already+have+arrived&amp;rft.pub=openDemocracy&amp;rft.date=2016-02&amp;rft.aulast=Bria&amp;rft.aufirst=Francesca&amp;rft_id=https%3A%2F%2Fwww.opendemocracy.net%2Fcan-europe-make-it%2Ffrancesca-bria%2Frobot-economy-full-automation-work-future&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-12"><span class="mw-cite-backlink"><b><a href="#cite_ref-12">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFSrnicek2016" class="citation web cs1"><a href="/wiki/Nick_Srnicek" title="Nick Srnicek">Srnicek, Nick</a> (March 2016). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20160625161447/http://wire.novaramedia.com/2015/03/4-reasons-why-technological-unemployment-might-really-be-different-this-time/">"4 Reasons Why Technological Unemployment Might Really Be Different This Time"</a>. novara wire. Archived from <a rel="nofollow" class="external text" href="http://wire.novaramedia.com/2015/03/4-reasons-why-technological-unemployment-might-really-be-different-this-time/">the original</a> on 25 June 2016<span class="reference-accessdate">. Retrieved <span class="nowrap">20 May</span> 2016</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=4+Reasons+Why+Technological+Unemployment+Might+Really+Be+Different+This+Time&amp;rft.pub=novara+wire&amp;rft.date=2016-03&amp;rft.aulast=Srnicek&amp;rft.aufirst=Nick&amp;rft_id=http%3A%2F%2Fwire.novaramedia.com%2F2015%2F03%2F4-reasons-why-technological-unemployment-might-really-be-different-this-time%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-13"><span class="mw-cite-backlink"><b><a href="#cite_ref-13">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBrynjolfssonMcAfee2014" class="citation book cs1">Brynjolfsson, Erik; McAfee, Andrew (2014). "<i>passim</i>, see esp Chpt. 9". <i>The Second Machine Age: Work, Progress, and Prosperity in a Time of Brilliant Technologies</i>. W. W. Norton &amp; Company. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-0393239355" title="Special:BookSources/978-0393239355"><bdi>978-0393239355</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=passim%2C+see+esp+Chpt.+9&amp;rft.btitle=The+Second+Machine+Age%3A+Work%2C+Progress%2C+and+Prosperity+in+a+Time+of+Brilliant+Technologies&amp;rft.pub=W.+W.+Norton+%26+Company&amp;rft.date=2014&amp;rft.isbn=978-0393239355&amp;rft.aulast=Brynjolfsson&amp;rft.aufirst=Erik&amp;rft.au=McAfee%2C+Andrew&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-14"><span class="mw-cite-backlink"><b><a href="#cite_ref-14">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFWakabayashi2018" class="citation news cs1">Wakabayashi, Daisuke (March 19, 2018). <a rel="nofollow" class="external text" href="https://www.nytimes.com/2018/03/19/technology/uber-driverless-fatality.html">"Self-Driving Uber Car Kills Pedestrian in Arizona, Where Robots Roam"</a>. <i>New York Times</i>. New York, New York. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200421221918/https://www.nytimes.com/2018/03/19/technology/uber-driverless-fatality.html">Archived</a> from the original on April 21, 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">March 23,</span> 2018</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=New+York+Times&amp;rft.atitle=Self-Driving+Uber+Car+Kills+Pedestrian+in+Arizona%2C+Where+Robots+Roam&amp;rft.date=2018-03-19&amp;rft.aulast=Wakabayashi&amp;rft.aufirst=Daisuke&amp;rft_id=https%3A%2F%2Fwww.nytimes.com%2F2018%2F03%2F19%2Ftechnology%2Fuber-driverless-fatality.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-15"><span class="mw-cite-backlink"><b><a href="#cite_ref-15">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFJiangBrownChengKhan2023" class="citation book cs1">Jiang, Harry H.; Brown, Lauren; Cheng, Jessica; Khan, Mehtab; Gupta, Abhishek; Workman, Deja; Hanna, Alex; Flowers, Johnathan; Gebru, Timnit (29 August 2023). "AI Art and its Impact on Artists". <i>Proceedings of the 2023 AAAI/ACM Conference on AI, Ethics, and Society</i>. Association for Computing Machinery. pp.&#160;363–374. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://doi.org/10.1145%2F3600211.3604681">10.1145/3600211.3604681</a></span>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/979-8-4007-0231-0" title="Special:BookSources/979-8-4007-0231-0"><bdi>979-8-4007-0231-0</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=AI+Art+and+its+Impact+on+Artists&amp;rft.btitle=Proceedings+of+the+2023+AAAI%2FACM+Conference+on+AI%2C+Ethics%2C+and+Society&amp;rft.pages=363-374&amp;rft.pub=Association+for+Computing+Machinery&amp;rft.date=2023-08-29&amp;rft_id=info%3Adoi%2F10.1145%2F3600211.3604681&amp;rft.isbn=979-8-4007-0231-0&amp;rft.aulast=Jiang&amp;rft.aufirst=Harry+H.&amp;rft.au=Brown%2C+Lauren&amp;rft.au=Cheng%2C+Jessica&amp;rft.au=Khan%2C+Mehtab&amp;rft.au=Gupta%2C+Abhishek&amp;rft.au=Workman%2C+Deja&amp;rft.au=Hanna%2C+Alex&amp;rft.au=Flowers%2C+Johnathan&amp;rft.au=Gebru%2C+Timnit&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-16"><span class="mw-cite-backlink"><b><a href="#cite_ref-16">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFGhoshFossas2022" class="citation arxiv cs1">Ghosh, Avijit; Fossas, Genoveva (19 November 2022). "Can There be Art Without an Artist?". <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/2209.07667">2209.07667</a></span> [<a rel="nofollow" class="external text" href="https://arxiv.org/archive/cs.AI">cs.AI</a>].</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=preprint&amp;rft.jtitle=arXiv&amp;rft.atitle=Can+There+be+Art+Without+an+Artist%3F&amp;rft.date=2022-11-19&amp;rft_id=info%3Aarxiv%2F2209.07667&amp;rft.aulast=Ghosh&amp;rft.aufirst=Avijit&amp;rft.au=Fossas%2C+Genoveva&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-17"><span class="mw-cite-backlink"><b><a href="#cite_ref-17">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFShanCryanWengerZheng2023" class="citation arxiv cs1">Shan, Shawn; Cryan, Jenna; Wenger, Emily; Zheng, Haitao; Hanocka, Rana; Zhao, Ben Y. (3 August 2023). "Glaze: Protecting Artists from Style Mimicry by Text-to-Image Models". <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/2302.04222">2302.04222</a></span> [<a rel="nofollow" class="external text" href="https://arxiv.org/archive/cs.CR">cs.CR</a>].</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=preprint&amp;rft.jtitle=arXiv&amp;rft.atitle=Glaze%3A+Protecting+Artists+from+Style+Mimicry+by+Text-to-Image+Models&amp;rft.date=2023-08-03&amp;rft_id=info%3Aarxiv%2F2302.04222&amp;rft.aulast=Shan&amp;rft.aufirst=Shawn&amp;rft.au=Cryan%2C+Jenna&amp;rft.au=Wenger%2C+Emily&amp;rft.au=Zheng%2C+Haitao&amp;rft.au=Hanocka%2C+Rana&amp;rft.au=Zhao%2C+Ben+Y.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-18"><span class="mw-cite-backlink"><b><a href="#cite_ref-18">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBrooks2024" class="citation news cs1">Brooks, Libby (27 February 2024). <a rel="nofollow" class="external text" href="https://www.theguardian.com/uk-news/2024/feb/27/glasgow-willy-wonka-experience-slammed-as-farce-as-tickets-refunded">"Glasgow Willy Wonka experience called a 'farce' as tickets refunded"</a>. <i>The Guardian</i><span class="reference-accessdate">. Retrieved <span class="nowrap">2 April</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Guardian&amp;rft.atitle=Glasgow+Willy+Wonka+experience+called+a+%27farce%27+as+tickets+refunded&amp;rft.date=2024-02-27&amp;rft.aulast=Brooks&amp;rft.aufirst=Libby&amp;rft_id=https%3A%2F%2Fwww.theguardian.com%2Fuk-news%2F2024%2Ffeb%2F27%2Fglasgow-willy-wonka-experience-slammed-as-farce-as-tickets-refunded&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-19"><span class="mw-cite-backlink"><b><a href="#cite_ref-19">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMetzRobertson2024" class="citation news cs1">Metz, Cade; Robertson, Katie (27 February 2024). <a rel="nofollow" class="external text" href="https://www.nytimes.com/2024/02/27/technology/openai-new-york-times-lawsuit.html?smid=url-share">"OpenAI Seeks to Dismiss Parts of The New York Times's Lawsuit"</a>. <i>The New York Times</i><span class="reference-accessdate">. Retrieved <span class="nowrap">4 April</span> 2024</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+New+York+Times&amp;rft.atitle=OpenAI+Seeks+to+Dismiss+Parts+of+The+New+York+Times%27s+Lawsuit&amp;rft.date=2024-02-27&amp;rft.aulast=Metz&amp;rft.aufirst=Cade&amp;rft.au=Robertson%2C+Katie&amp;rft_id=https%3A%2F%2Fwww.nytimes.com%2F2024%2F02%2F27%2Ftechnology%2Fopenai-new-york-times-lawsuit.html%3Fsmid%3Durl-share&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-20"><span class="mw-cite-backlink"><b><a href="#cite_ref-20">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHawkingRussellTegmarkWilczek2014" class="citation news cs1">Hawking, Stephen; <a href="/wiki/Stuart_J._Russell" title="Stuart J. Russell">Russell, Stuart J.</a>; <a href="/wiki/Max_Tegmark" title="Max Tegmark">Tegmark, Max</a>; <a href="/wiki/Frank_Wilczek" title="Frank Wilczek">Wilczek, Frank</a> (1 May 2014). <span class="id-lock-limited" title="Free access subject to limited trial, subscription normally required"><a rel="nofollow" class="external text" href="https://www.independent.co.uk/news/science/stephen-hawking-transcendence-looks-at-the-implications-of-artificial-intelligence-but-are-we-taking-9313474.html">"Stephen Hawking: 'Transcendence looks at the implications of artificial intelligence - but are we taking AI seriously enough?'<span class="cs1-kern-right"></span>"</a></span>. <i>The Independent</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20151002023652/http://www.independent.co.uk/news/science/stephen-hawking-transcendence-looks-at-the-implications-of-artificial-intelligence-but-are-we-taking-9313474.html">Archived</a> from the original on 2015-10-02<span class="reference-accessdate">. Retrieved <span class="nowrap">1 April</span> 2016</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Independent&amp;rft.atitle=Stephen+Hawking%3A+%27Transcendence+looks+at+the+implications+of+artificial+intelligence+-+but+are+we+taking+AI+seriously+enough%3F%27&amp;rft.date=2014-05-01&amp;rft.aulast=Hawking&amp;rft.aufirst=Stephen&amp;rft.au=Russell%2C+Stuart+J.&amp;rft.au=Tegmark%2C+Max&amp;rft.au=Wilczek%2C+Frank&amp;rft_id=https%3A%2F%2Fwww.independent.co.uk%2Fnews%2Fscience%2Fstephen-hawking-transcendence-looks-at-the-implications-of-artificial-intelligence-but-are-we-taking-9313474.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-21"><span class="mw-cite-backlink"><b><a href="#cite_ref-21">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMüllerBostrom2016" class="citation book cs1"><a href="/wiki/Vincent_C._M%C3%BCller" title="Vincent C. Müller">Müller, Vincent C.</a>; <a href="/wiki/Nick_Bostrom" title="Nick Bostrom">Bostrom, Nick</a> (2016). <a rel="nofollow" class="external text" href="https://nickbostrom.com/papers/survey.pdf">"Future Progress in Artificial Intelligence: A Survey of Expert Opinion"</a> <span class="cs1-format">(PDF)</span>. <i>Fundamental Issues of Artificial Intelligence</i>. Springer. pp.&#160;555–572. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2F978-3-319-26485-1_33">10.1007/978-3-319-26485-1_33</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-3-319-26483-7" title="Special:BookSources/978-3-319-26483-7"><bdi>978-3-319-26483-7</bdi></a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220531142709/https://nickbostrom.com/papers/survey.pdf">Archived</a> <span class="cs1-format">(PDF)</span> from the original on 2022-05-31<span class="reference-accessdate">. Retrieved <span class="nowrap">2022-06-16</span></span>. <q>AI systems will... reach overall human ability... very likely (with 90% probability) by 2075. From reaching human ability, it will move on to superintelligence within 30 years (75%)... So, (most of the AI experts responding to the surveys) think that superintelligence is likely to come in a few decades...</q></cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Future+Progress+in+Artificial+Intelligence%3A+A+Survey+of+Expert+Opinion&amp;rft.btitle=Fundamental+Issues+of+Artificial+Intelligence&amp;rft.pages=555-572&amp;rft.pub=Springer&amp;rft.date=2016&amp;rft_id=info%3Adoi%2F10.1007%2F978-3-319-26485-1_33&amp;rft.isbn=978-3-319-26483-7&amp;rft.aulast=M%C3%BCller&amp;rft.aufirst=Vincent+C.&amp;rft.au=Bostrom%2C+Nick&amp;rft_id=https%3A%2F%2Fnickbostrom.com%2Fpapers%2Fsurvey.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-22"><span class="mw-cite-backlink"><b><a href="#cite_ref-22">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBostrom2012" class="citation journal cs1">Bostrom, Nick (2012). <a rel="nofollow" class="external text" href="https://nickbostrom.com/superintelligentwill.pdf">"The Superintelligent Will: Motivation and Instrumental Rationality in Advanced Artificial Agents"</a> <span class="cs1-format">(PDF)</span>. <i>Minds and Machines</i>. <b>22</b> (2). Springer: 71–85. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs11023-012-9281-3">10.1007/s11023-012-9281-3</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:254835485">254835485</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20220709032134/https://nickbostrom.com/superintelligentwill.pdf">Archived</a> <span class="cs1-format">(PDF)</span> from the original on 2022-07-09<span class="reference-accessdate">. Retrieved <span class="nowrap">2022-06-16</span></span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Minds+and+Machines&amp;rft.atitle=The+Superintelligent+Will%3A+Motivation+and+Instrumental+Rationality+in+Advanced+Artificial+Agents&amp;rft.volume=22&amp;rft.issue=2&amp;rft.pages=71-85&amp;rft.date=2012&amp;rft_id=info%3Adoi%2F10.1007%2Fs11023-012-9281-3&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A254835485%23id-name%3DS2CID&amp;rft.aulast=Bostrom&amp;rft.aufirst=Nick&amp;rft_id=https%3A%2F%2Fnickbostrom.com%2Fsuperintelligentwill.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-bostrom-superintelligence-23"><span class="mw-cite-backlink">^ <a href="#cite_ref-bostrom-superintelligence_23-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-bostrom-superintelligence_23-1"><sup><i><b>b</b></i></sup></a> <a href="#cite_ref-bostrom-superintelligence_23-2"><sup><i><b>c</b></i></sup></a> <a href="#cite_ref-bostrom-superintelligence_23-3"><sup><i><b>d</b></i></sup></a> <a href="#cite_ref-bostrom-superintelligence_23-4"><sup><i><b>e</b></i></sup></a> <a href="#cite_ref-bostrom-superintelligence_23-5"><sup><i><b>f</b></i></sup></a> <a href="#cite_ref-bostrom-superintelligence_23-6"><sup><i><b>g</b></i></sup></a> <a href="#cite_ref-bostrom-superintelligence_23-7"><sup><i><b>h</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBostrom" class="citation book cs1">Bostrom, Nick. <a href="/wiki/Superintelligence:_Paths,_Dangers,_Strategies" title="Superintelligence: Paths, Dangers, Strategies"><i>Superintelligence: Paths, Dangers, Strategies</i></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Superintelligence%3A+Paths%2C+Dangers%2C+Strategies&amp;rft.aulast=Bostrom&amp;rft.aufirst=Nick&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-24"><span class="mw-cite-backlink"><b><a href="#cite_ref-24">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://www.sciencefriday.com/segments/the-origin-of-the-word-robot/">"The Origin Of The Word 'Robot'<span class="cs1-kern-right"></span>"</a>. <i><a href="/wiki/Science_Friday" title="Science Friday">Science Friday</a> (public radio)</i>. 22 April 2011. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200314092540/https://www.sciencefriday.com/segments/the-origin-of-the-word-robot/">Archived</a> from the original on 14 March 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">30 April</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Science+Friday+%28public+radio%29&amp;rft.atitle=The+Origin+Of+The+Word+%27Robot%27&amp;rft.date=2011-04-22&amp;rft_id=https%3A%2F%2Fwww.sciencefriday.com%2Fsegments%2Fthe-origin-of-the-word-robot%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-25"><span class="mw-cite-backlink"><b><a href="#cite_ref-25">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBotkin-Kowacki2016" class="citation news cs1">Botkin-Kowacki, Eva (28 October 2016). <a rel="nofollow" class="external text" href="https://www.csmonitor.com/Science/2016/1028/A-female-Frankenstein-would-lead-to-humanity-s-extinction-say-scientists">"A female Frankenstein would lead to humanity's extinction, say scientists"</a>. <i>Christian Science Monitor</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20210226203855/https://www.csmonitor.com/Science/2016/1028/A-female-Frankenstein-would-lead-to-humanity-s-extinction-say-scientists">Archived</a> from the original on 26 February 2021<span class="reference-accessdate">. Retrieved <span class="nowrap">30 April</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Christian+Science+Monitor&amp;rft.atitle=A+female+Frankenstein+would+lead+to+humanity%27s+extinction%2C+say+scientists&amp;rft.date=2016-10-28&amp;rft.aulast=Botkin-Kowacki&amp;rft.aufirst=Eva&amp;rft_id=https%3A%2F%2Fwww.csmonitor.com%2FScience%2F2016%2F1028%2FA-female-Frankenstein-would-lead-to-humanity-s-extinction-say-scientists&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-26"><span class="mw-cite-backlink"><b><a href="#cite_ref-26">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFOrd2020" class="citation book cs1">Ord, Toby (2020). "Unaligned artificial intelligence". <i>The precipice: existential risk and the future of humanity</i>. London, England and New York, New York: Bloomsbury academic. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-1-5266-0023-3" title="Special:BookSources/978-1-5266-0023-3"><bdi>978-1-5266-0023-3</bdi></a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Unaligned+artificial+intelligence&amp;rft.btitle=The+precipice%3A+existential+risk+and+the+future+of+humanity&amp;rft.place=London%2C+England+and+New+York%2C+New+York&amp;rft.pub=Bloomsbury+academic&amp;rft.date=2020&amp;rft.isbn=978-1-5266-0023-3&amp;rft.aulast=Ord&amp;rft.aufirst=Toby&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-surgery-27"><span class="mw-cite-backlink"><b><a href="#cite_ref-surgery_27-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHocksteinGourinFaustTerris2007" class="citation journal cs1">Hockstein, N. G.; Gourin, C. G.; Faust, R. A.; Terris, D. J. (17 March 2007). <a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4247417">"A history of robots: from science fiction to surgical robotics"</a>. <i>Journal of Robotic Surgery</i>. <b>1</b> (2): 113–118. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs11701-007-0021-2">10.1007/s11701-007-0021-2</a>. <a href="/wiki/PMC_(identifier)" class="mw-redirect" title="PMC (identifier)">PMC</a>&#160;<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4247417">4247417</a></span>. <a href="/wiki/PMID_(identifier)" class="mw-redirect" title="PMID (identifier)">PMID</a>&#160;<a rel="nofollow" class="external text" href="https://pubmed.ncbi.nlm.nih.gov/25484946">25484946</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Journal+of+Robotic+Surgery&amp;rft.atitle=A+history+of+robots%3A+from+science+fiction+to+surgical+robotics&amp;rft.volume=1&amp;rft.issue=2&amp;rft.pages=113-118&amp;rft.date=2007-03-17&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC4247417%23id-name%3DPMC&amp;rft_id=info%3Apmid%2F25484946&amp;rft_id=info%3Adoi%2F10.1007%2Fs11701-007-0021-2&amp;rft.aulast=Hockstein&amp;rft.aufirst=N.+G.&amp;rft.au=Gourin%2C+C.+G.&amp;rft.au=Faust%2C+R.+A.&amp;rft.au=Terris%2C+D.+J.&amp;rft_id=https%3A%2F%2Fwww.ncbi.nlm.nih.gov%2Fpmc%2Farticles%2FPMC4247417&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-28"><span class="mw-cite-backlink"><b><a href="#cite_ref-28">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFHellmann2019" class="citation news cs1">Hellmann, Melissa (21 September 2019). <a rel="nofollow" class="external text" href="https://www.seattletimes.com/business/technology/ai-101-what-is-artificial-intelligence-and-where-is-it-going/">"AI 101: What is artificial intelligence and where is it going?"</a>. <i>The Seattle Times</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200421232439/https://www.seattletimes.com/business/technology/ai-101-what-is-artificial-intelligence-and-where-is-it-going/">Archived</a> from the original on 21 April 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">30 April</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=The+Seattle+Times&amp;rft.atitle=AI+101%3A+What+is+artificial+intelligence+and+where+is+it+going%3F&amp;rft.date=2019-09-21&amp;rft.aulast=Hellmann&amp;rft.aufirst=Melissa&amp;rft_id=https%3A%2F%2Fwww.seattletimes.com%2Fbusiness%2Ftechnology%2Fai-101-what-is-artificial-intelligence-and-where-is-it-going%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-BabcockKrámar2019-29"><span class="mw-cite-backlink"><b><a href="#cite_ref-BabcockKrámar2019_29-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBabcockKrámarYampolskiy2019" class="citation book cs1">Babcock, James; Krámar, János; <a href="/wiki/Roman_Yampolskiy" title="Roman Yampolskiy">Yampolskiy, Roman V.</a> (2019). "Guidelines for Artificial Intelligence Containment". <i>Next-Generation Ethics</i>. pp.&#160;90–112. <a href="/wiki/ArXiv_(identifier)" class="mw-redirect" title="ArXiv (identifier)">arXiv</a>:<span class="id-lock-free" title="Freely accessible"><a rel="nofollow" class="external text" href="https://arxiv.org/abs/1707.08476">1707.08476</a></span>. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1017%2F9781108616188.008">10.1017/9781108616188.008</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/9781108616188" title="Special:BookSources/9781108616188"><bdi>9781108616188</bdi></a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:22007028">22007028</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Guidelines+for+Artificial+Intelligence+Containment&amp;rft.btitle=Next-Generation+Ethics&amp;rft.pages=90-112&amp;rft.date=2019&amp;rft_id=info%3Aarxiv%2F1707.08476&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A22007028%23id-name%3DS2CID&amp;rft_id=info%3Adoi%2F10.1017%2F9781108616188.008&amp;rft.isbn=9781108616188&amp;rft.aulast=Babcock&amp;rft.aufirst=James&amp;rft.au=Kr%C3%A1mar%2C+J%C3%A1nos&amp;rft.au=Yampolskiy%2C+Roman+V.&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-30"><span class="mw-cite-backlink"><b><a href="#cite_ref-30">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBaraniuk2016" class="citation news cs1">Baraniuk, Chris (23 May 2016). <a rel="nofollow" class="external text" href="https://www.newscientist.com/article/2089606-checklist-of-worst-case-scenarios-could-help-prepare-for-evil-ai/">"Checklist of worst-case scenarios could help prepare for evil AI"</a>. <i><a href="/wiki/New_Scientist" title="New Scientist">New Scientist</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20160921061131/https://www.newscientist.com/article/2089606-checklist-of-worst-case-scenarios-could-help-prepare-for-evil-ai/">Archived</a> from the original on 21 September 2016<span class="reference-accessdate">. Retrieved <span class="nowrap">21 September</span> 2016</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=New+Scientist&amp;rft.atitle=Checklist+of+worst-case+scenarios+could+help+prepare+for+evil+AI&amp;rft.date=2016-05-23&amp;rft.aulast=Baraniuk&amp;rft.aufirst=Chris&amp;rft_id=https%3A%2F%2Fwww.newscientist.com%2Farticle%2F2089606-checklist-of-worst-case-scenarios-could-help-prepare-for-evil-ai%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-singinst12-31"><span class="mw-cite-backlink"><b><a href="#cite_ref-singinst12_31-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFYudkowsky2004" class="citation web cs1">Yudkowsky, Eliezer S. (May 2004). <a rel="nofollow" class="external text" href="https://web.archive.org/web/20120615203944/http://singinst.org/upload/CEV.html">"Coherent Extrapolated Volition"</a>. Singularity Institute for Artificial Intelligence. Archived from <a rel="nofollow" class="external text" href="http://singinst.org/upload/CEV.html">the original</a> on 2012-06-15.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Coherent+Extrapolated+Volition&amp;rft.pub=Singularity+Institute+for+Artificial+Intelligence&amp;rft.date=2004-05&amp;rft.aulast=Yudkowsky&amp;rft.aufirst=Eliezer+S.&amp;rft_id=http%3A%2F%2Fsinginst.org%2Fupload%2FCEV.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-Muehlhauser,_Luke_2012-32"><span class="mw-cite-backlink"><b><a href="#cite_ref-Muehlhauser,_Luke_2012_32-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFMuehlhauserHelm2012" class="citation book cs1">Muehlhauser, Luke; Helm, Louie (2012). <a rel="nofollow" class="external text" href="https://intelligence.org/files/IE-ME.pdf">"Intelligence Explosion and Machine Ethics"</a> <span class="cs1-format">(PDF)</span>. <i>Singularity Hypotheses: A Scientific and Philosophical Assessment</i>. Springer. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20150507173028/http://intelligence.org/files/IE-ME.pdf">Archived</a> <span class="cs1-format">(PDF)</span> from the original on 2015-05-07<span class="reference-accessdate">. Retrieved <span class="nowrap">2020-10-02</span></span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Intelligence+Explosion+and+Machine+Ethics&amp;rft.btitle=Singularity+Hypotheses%3A+A+Scientific+and+Philosophical+Assessment&amp;rft.pub=Springer&amp;rft.date=2012&amp;rft.aulast=Muehlhauser&amp;rft.aufirst=Luke&amp;rft.au=Helm%2C+Louie&amp;rft_id=https%3A%2F%2Fintelligence.org%2Ffiles%2FIE-ME.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-Yudkowsky2011-33"><span class="mw-cite-backlink"><b><a href="#cite_ref-Yudkowsky2011_33-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFYudkowsky2011" class="citation book cs1">Yudkowsky, Eliezer (2011). "Complex Value Systems in Friendly AI". <i>Artificial General Intelligence</i>. Lecture Notes in Computer Science. Vol.&#160;6830. pp.&#160;388–393. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2F978-3-642-22887-2_48">10.1007/978-3-642-22887-2_48</a>. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-3-642-22886-5" title="Special:BookSources/978-3-642-22886-5"><bdi>978-3-642-22886-5</bdi></a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0302-9743">0302-9743</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=bookitem&amp;rft.atitle=Complex+Value+Systems+in+Friendly+AI&amp;rft.btitle=Artificial+General+Intelligence&amp;rft.series=Lecture+Notes+in+Computer+Science&amp;rft.pages=388-393&amp;rft.date=2011&amp;rft.issn=0302-9743&amp;rft_id=info%3Adoi%2F10.1007%2F978-3-642-22887-2_48&amp;rft.isbn=978-3-642-22886-5&amp;rft.aulast=Yudkowsky&amp;rft.aufirst=Eliezer&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-pinker_now-34"><span class="mw-cite-backlink">^ <a href="#cite_ref-pinker_now_34-0"><sup><i><b>a</b></i></sup></a> <a href="#cite_ref-pinker_now_34-1"><sup><i><b>b</b></i></sup></a></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFPinker2018" class="citation news cs1">Pinker, Steven (13 February 2018). <a rel="nofollow" class="external text" href="https://www.popsci.com/robot-uprising-enlightenment-now/">"We're told to fear robots. But why do we think they'll turn on us?"</a>. <i>Popular Science</i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20200720164306/https://www.popsci.com/robot-uprising-enlightenment-now/">Archived</a> from the original on 20 July 2020<span class="reference-accessdate">. Retrieved <span class="nowrap">8 June</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Popular+Science&amp;rft.atitle=We%27re+told+to+fear+robots.+But+why+do+we+think+they%27ll+turn+on+us%3F&amp;rft.date=2018-02-13&amp;rft.aulast=Pinker&amp;rft.aufirst=Steven&amp;rft_id=https%3A%2F%2Fwww.popsci.com%2Frobot-uprising-enlightenment-now%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-35"><span class="mw-cite-backlink"><b><a href="#cite_ref-35">^</a></b></span> <span class="reference-text"><i><a rel="nofollow" class="external text" href="http://www.singinst.org/ourresearch/presentations/">Creating a New Intelligent Species: Choices and Responsibilities for Artificial Intelligence Designers</a> <a rel="nofollow" class="external text" href="https://web.archive.org/web/20070206060938/http://www.singinst.org/ourresearch/presentations/">Archived</a> February 6, 2007, at the <a href="/wiki/Wayback_Machine" title="Wayback Machine">Wayback Machine</a></i> - <a href="/wiki/Singularity_Institute_for_Artificial_Intelligence" class="mw-redirect" title="Singularity Institute for Artificial Intelligence">Singularity Institute for Artificial Intelligence</a>, 2005</span> </li> <li id="cite_note-36"><span class="mw-cite-backlink"><b><a href="#cite_ref-36">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFOmohundro2008" class="citation conference cs1">Omohundro, Stephen M. (June 2008). <a rel="nofollow" class="external text" href="https://selfawaresystems.files.wordpress.com/2008/01/ai_drives_final.pdf"><i>The basic AI drives</i></a> <span class="cs1-format">(PDF)</span>. Artificial General Intelligence 2008. pp.&#160;483–492. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20201010072132/https://selfawaresystems.files.wordpress.com/2008/01/ai_drives_final.pdf">Archived</a> <span class="cs1-format">(PDF)</span> from the original on 2020-10-10<span class="reference-accessdate">. Retrieved <span class="nowrap">2020-10-02</span></span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=conference&amp;rft.btitle=The+basic+AI+drives&amp;rft.pages=483-492&amp;rft.date=2008-06&amp;rft.aulast=Omohundro&amp;rft.aufirst=Stephen+M.&amp;rft_id=https%3A%2F%2Fselfawaresystems.files.wordpress.com%2F2008%2F01%2Fai_drives_final.pdf&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-Tucker2014-37"><span class="mw-cite-backlink"><b><a href="#cite_ref-Tucker2014_37-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFTucker2014" class="citation news cs1">Tucker, Patrick (17 Apr 2014). <a rel="nofollow" class="external text" href="http://www.defenseone.com/technology/2014/04/why-there-will-be-robot-uprising/82783/">"Why There Will Be A Robot Uprising"</a>. Defense One. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20140706110100/http://www.defenseone.com/technology/2014/04/why-there-will-be-robot-uprising/82783/">Archived</a> from the original on 6 July 2014<span class="reference-accessdate">. Retrieved <span class="nowrap">15 July</span> 2014</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.atitle=Why+There+Will+Be+A+Robot+Uprising&amp;rft.date=2014-04-17&amp;rft.aulast=Tucker&amp;rft.aufirst=Patrick&amp;rft_id=http%3A%2F%2Fwww.defenseone.com%2Ftechnology%2F2014%2F04%2Fwhy-there-will-be-robot-uprising%2F82783%2F&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-38"><span class="mw-cite-backlink"><b><a href="#cite_ref-38">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRussell2019" class="citation book cs1">Russell, Stuart J. (8 October 2019). <a rel="nofollow" class="external text" href="http://worldcat.org/oclc/1237420037"><i>Human compatible&#160;: artificial intelligence and the problem of control</i></a>. Penguin. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/978-0-525-55862-0" title="Special:BookSources/978-0-525-55862-0"><bdi>978-0-525-55862-0</bdi></a>. <a href="/wiki/OCLC_(identifier)" class="mw-redirect" title="OCLC (identifier)">OCLC</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/oclc/1237420037">1237420037</a>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20230315194123/https://worldcat.org/title/1237420037">Archived</a> from the original on 15 March 2023<span class="reference-accessdate">. Retrieved <span class="nowrap">2 January</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Human+compatible+%3A+artificial+intelligence+and+the+problem+of+control&amp;rft.pub=Penguin&amp;rft.date=2019-10-08&amp;rft_id=info%3Aoclcnum%2F1237420037&amp;rft.isbn=978-0-525-55862-0&amp;rft.aulast=Russell&amp;rft.aufirst=Stuart+J.&amp;rft_id=http%3A%2F%2Fworldcat.org%2Foclc%2F1237420037&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-bbc-google-39"><span class="mw-cite-backlink"><b><a href="#cite_ref-bbc-google_39-0">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation news cs1"><a rel="nofollow" class="external text" href="https://www.bbc.com/news/technology-36472140">"Google developing kill switch for AI"</a>. <i>BBC News</i>. 8 June 2016. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20160611042244/http://www.bbc.com/news/technology-36472140">Archived</a> from the original on 11 June 2016<span class="reference-accessdate">. Retrieved <span class="nowrap">7 June</span> 2020</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=BBC+News&amp;rft.atitle=Google+developing+kill+switch+for+AI&amp;rft.date=2016-06-08&amp;rft_id=https%3A%2F%2Fwww.bbc.com%2Fnews%2Ftechnology-36472140&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-40"><span class="mw-cite-backlink"><b><a href="#cite_ref-40">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRawlinson2015" class="citation web cs1">Rawlinson, Kevin (29 January 2015). <a rel="nofollow" class="external text" href="https://www.bbc.co.uk/news/31047780">"Microsoft's Bill Gates insists AI is a threat"</a>. <i><a href="/wiki/BBC_News" title="BBC News">BBC News</a></i>. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20150129183607/http://www.bbc.co.uk/news/31047780">Archived</a> from the original on 29 January 2015<span class="reference-accessdate">. Retrieved <span class="nowrap">30 January</span> 2015</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=unknown&amp;rft.jtitle=BBC+News&amp;rft.atitle=Microsoft%27s+Bill+Gates+insists+AI+is+a+threat&amp;rft.date=2015-01-29&amp;rft.aulast=Rawlinson&amp;rft.aufirst=Kevin&amp;rft_id=https%3A%2F%2Fwww.bbc.co.uk%2Fnews%2F31047780&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-41"><span class="mw-cite-backlink"><b><a href="#cite_ref-41">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite class="citation web cs1"><a rel="nofollow" class="external text" href="http://futureoflife.org/ai-open-letter">"The Future of Life Institute Open Letter"</a>. The Future of Life Institute. 28 October 2015. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20190329094536/https://futureoflife.org/ai-open-letter/">Archived</a> from the original on 29 March 2019<span class="reference-accessdate">. Retrieved <span class="nowrap">29 March</span> 2019</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=The+Future+of+Life+Institute+Open+Letter&amp;rft.pub=The+Future+of+Life+Institute&amp;rft.date=2015-10-28&amp;rft_id=http%3A%2F%2Ffutureoflife.org%2Fai-open-letter&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-42"><span class="mw-cite-backlink"><b><a href="#cite_ref-42">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFBradshaw2015" class="citation web cs1">Bradshaw, Tim (11 January 2015). <a rel="nofollow" class="external text" href="https://www.ft.com/cms/s/0/3d2c2f12-99e9-11e4-93c1-00144feabdc0.html#axzz3TNL9lxJV">"Scientists and investors warn on AI"</a>. The Financial Times. <a rel="nofollow" class="external text" href="https://web.archive.org/web/20150207042806/http://www.ft.com/cms/s/0/3d2c2f12-99e9-11e4-93c1-00144feabdc0.html#axzz3TNL9lxJV">Archived</a> from the original on 7 February 2015<span class="reference-accessdate">. Retrieved <span class="nowrap">4 March</span> 2015</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=unknown&amp;rft.btitle=Scientists+and+investors+warn+on+AI&amp;rft.pub=The+Financial+Times&amp;rft.date=2015-01-11&amp;rft.aulast=Bradshaw&amp;rft.aufirst=Tim&amp;rft_id=http%3A%2F%2Fwww.ft.com%2Fcms%2Fs%2F0%2F3d2c2f12-99e9-11e4-93c1-00144feabdc0.html%23axzz3TNL9lxJV&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-43"><span class="mw-cite-backlink"><b><a href="#cite_ref-43">^</a></b></span> <span class="reference-text"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFKaminski2022" class="citation journal cs1">Kaminski, Johannes D. (December 2022). <a rel="nofollow" class="external text" href="https://link.springer.com/10.1007/s11059-022-00670-w">"On human expendability: AI takeover in Clarke's Odyssey and Stross's Accelerando"</a>. <i>Neohelicon</i>. <b>49</b> (2): 495–511. <a href="/wiki/Doi_(identifier)" class="mw-redirect" title="Doi (identifier)">doi</a>:<a rel="nofollow" class="external text" href="https://doi.org/10.1007%2Fs11059-022-00670-w">10.1007/s11059-022-00670-w</a>. <a href="/wiki/ISSN_(identifier)" class="mw-redirect" title="ISSN (identifier)">ISSN</a>&#160;<a rel="nofollow" class="external text" href="https://search.worldcat.org/issn/0324-4652">0324-4652</a>. <a href="/wiki/S2CID_(identifier)" class="mw-redirect" title="S2CID (identifier)">S2CID</a>&#160;<a rel="nofollow" class="external text" href="https://api.semanticscholar.org/CorpusID:253793613">253793613</a>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Ajournal&amp;rft.genre=article&amp;rft.jtitle=Neohelicon&amp;rft.atitle=On+human+expendability%3A+AI+takeover+in+Clarke%27s+Odyssey+and+Stross%27s+Accelerando&amp;rft.volume=49&amp;rft.issue=2&amp;rft.pages=495-511&amp;rft.date=2022-12&amp;rft_id=https%3A%2F%2Fapi.semanticscholar.org%2FCorpusID%3A253793613%23id-name%3DS2CID&amp;rft.issn=0324-4652&amp;rft_id=info%3Adoi%2F10.1007%2Fs11059-022-00670-w&amp;rft.aulast=Kaminski&amp;rft.aufirst=Johannes+D.&amp;rft_id=https%3A%2F%2Flink.springer.com%2F10.1007%2Fs11059-022-00670-w&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> <li id="cite_note-AI_alignment_aima4-44"><span class="mw-cite-backlink"><b><a href="#cite_ref-AI_alignment_aima4_44-0">^</a></b></span> <span class="reference-text"> <link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1238218222"><cite id="CITEREFRussellNorvig2021" class="citation book cs1">Russell, Stuart J.; Norvig, Peter (2021). <a rel="nofollow" class="external text" href="https://www.pearson.com/us/higher-education/program/Russell-Artificial-Intelligence-A-Modern-Approach-4th-Edition/PGM1263338.html"><i>Artificial intelligence: A modern approach</i></a> (4th&#160;ed.). Pearson. pp.&#160;5, 1003. <a href="/wiki/ISBN_(identifier)" class="mw-redirect" title="ISBN (identifier)">ISBN</a>&#160;<a href="/wiki/Special:BookSources/9780134610993" title="Special:BookSources/9780134610993"><bdi>9780134610993</bdi></a><span class="reference-accessdate">. Retrieved <span class="nowrap">September 12,</span> 2022</span>.</cite><span title="ctx_ver=Z39.88-2004&amp;rft_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Abook&amp;rft.genre=book&amp;rft.btitle=Artificial+intelligence%3A+A+modern+approach&amp;rft.pages=5%2C+1003&amp;rft.edition=4th&amp;rft.pub=Pearson&amp;rft.date=2021&amp;rft.isbn=9780134610993&amp;rft.aulast=Russell&amp;rft.aufirst=Stuart+J.&amp;rft.au=Norvig%2C+Peter&amp;rft_id=https%3A%2F%2Fwww.pearson.com%2Fus%2Fhigher-education%2Fprogram%2FRussell-Artificial-Intelligence-A-Modern-Approach-4th-Edition%2FPGM1263338.html&amp;rfr_id=info%3Asid%2Fen.wikipedia.org%3AAI+takeover" class="Z3988"></span></span> </li> </ol></div></div> <div class="mw-heading mw-heading2"><h2 id="External_links">External links</h2><span class="mw-editsection"><span class="mw-editsection-bracket">[</span><a href="/w/index.php?title=AI_takeover&amp;action=edit&amp;section=22" title="Edit section: External links"><span>edit</span></a><span class="mw-editsection-bracket">]</span></span></div> <ul><li><a href="/wiki/TED_talk" class="mw-redirect" title="TED talk">TED talk</a>: <a rel="nofollow" class="external text" href="https://www.youtube.com/watch?v=8nt3edWLgIg">"Can we build AI without losing control over it?"</a> by <a href="/wiki/Sam_Harris" title="Sam Harris">Sam Harris</a></li></ul> <div class="navbox-styles"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><style data-mw-deduplicate="TemplateStyles:r1236075235">.mw-parser-output .navbox{box-sizing:border-box;border:1px solid #a2a9b1;width:100%;clear:both;font-size:88%;text-align:center;padding:1px;margin:1em auto 0}.mw-parser-output .navbox .navbox{margin-top:0}.mw-parser-output .navbox+.navbox,.mw-parser-output .navbox+.navbox-styles+.navbox{margin-top:-1px}.mw-parser-output .navbox-inner,.mw-parser-output .navbox-subgroup{width:100%}.mw-parser-output .navbox-group,.mw-parser-output .navbox-title,.mw-parser-output .navbox-abovebelow{padding:0.25em 1em;line-height:1.5em;text-align:center}.mw-parser-output .navbox-group{white-space:nowrap;text-align:right}.mw-parser-output .navbox,.mw-parser-output .navbox-subgroup{background-color:#fdfdfd}.mw-parser-output .navbox-list{line-height:1.5em;border-color:#fdfdfd}.mw-parser-output .navbox-list-with-group{text-align:left;border-left-width:2px;border-left-style:solid}.mw-parser-output tr+tr>.navbox-abovebelow,.mw-parser-output tr+tr>.navbox-group,.mw-parser-output tr+tr>.navbox-image,.mw-parser-output tr+tr>.navbox-list{border-top:2px solid #fdfdfd}.mw-parser-output .navbox-title{background-color:#ccf}.mw-parser-output .navbox-abovebelow,.mw-parser-output .navbox-group,.mw-parser-output .navbox-subgroup .navbox-title{background-color:#ddf}.mw-parser-output .navbox-subgroup .navbox-group,.mw-parser-output .navbox-subgroup .navbox-abovebelow{background-color:#e6e6ff}.mw-parser-output .navbox-even{background-color:#f7f7f7}.mw-parser-output .navbox-odd{background-color:transparent}.mw-parser-output .navbox .hlist td dl,.mw-parser-output .navbox .hlist td ol,.mw-parser-output .navbox .hlist td ul,.mw-parser-output .navbox td.hlist dl,.mw-parser-output .navbox td.hlist ol,.mw-parser-output .navbox td.hlist ul{padding:0.125em 0}.mw-parser-output .navbox .navbar{display:block;font-size:100%}.mw-parser-output .navbox-title .navbar{float:left;text-align:left;margin-right:0.5em}body.skin--responsive .mw-parser-output .navbox-image img{max-width:none!important}@media print{body.ns-0 .mw-parser-output .navbox{display:none!important}}</style></div><div role="navigation" class="navbox" aria-labelledby="Existential_risk_from_artificial_intelligence" style="padding:3px"><table class="nowraplinks mw-collapsible expanded navbox-inner" style="border-spacing:0;background:transparent;color:inherit"><tbody><tr><th scope="col" class="navbox-title" colspan="2"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1239400231"><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Existential_risk_from_artificial_intelligence" title="Template:Existential risk from artificial intelligence"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Existential_risk_from_artificial_intelligence" title="Template talk:Existential risk from artificial intelligence"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Existential_risk_from_artificial_intelligence" title="Special:EditPage/Template:Existential risk from artificial intelligence"><abbr title="Edit this template">e</abbr></a></li></ul></div><div id="Existential_risk_from_artificial_intelligence" style="font-size:114%;margin:0 4em"><a href="/wiki/Existential_risk_from_artificial_general_intelligence" class="mw-redirect" title="Existential risk from artificial general intelligence">Existential risk</a> from <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">artificial intelligence</a></div></th></tr><tr><th scope="row" class="navbox-group" style="width:1%">Concepts</th><td class="navbox-list-with-group navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Artificial_general_intelligence" title="Artificial general intelligence">AGI</a></li> <li><a href="/wiki/AI_alignment" title="AI alignment">AI alignment</a></li> <li><a href="/wiki/AI_capability_control" title="AI capability control">AI capability control</a></li> <li><a href="/wiki/AI_safety" title="AI safety">AI safety</a></li> <li><a class="mw-selflink selflink">AI takeover</a></li> <li><a href="/wiki/Consequentialism" title="Consequentialism">Consequentialism</a></li> <li><a href="/wiki/Effective_accelerationism" title="Effective accelerationism">Effective accelerationism</a></li> <li><a href="/wiki/Ethics_of_artificial_intelligence" title="Ethics of artificial intelligence">Ethics of artificial intelligence</a></li> <li><a href="/wiki/Existential_risk_from_artificial_general_intelligence" class="mw-redirect" title="Existential risk from artificial general intelligence">Existential risk from artificial general intelligence</a></li> <li><a href="/wiki/Friendly_artificial_intelligence" title="Friendly artificial intelligence">Friendly artificial intelligence</a></li> <li><a href="/wiki/Instrumental_convergence" title="Instrumental convergence">Instrumental convergence</a></li> <li><a href="/wiki/Intelligence_explosion" class="mw-redirect" title="Intelligence explosion">Intelligence explosion</a></li> <li><a href="/wiki/Longtermism" title="Longtermism">Longtermism</a></li> <li><a href="/wiki/Machine_ethics" title="Machine ethics">Machine ethics</a></li> <li><a href="/wiki/Suffering_risks" class="mw-redirect" title="Suffering risks">Suffering risks</a></li> <li><a href="/wiki/Superintelligence" title="Superintelligence">Superintelligence</a></li> <li><a href="/wiki/Technological_singularity" title="Technological singularity">Technological singularity</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Organizations</th><td class="navbox-list-with-group navbox-list navbox-even hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Alignment_Research_Center" title="Alignment Research Center">Alignment Research Center</a></li> <li><a href="/wiki/Center_for_AI_Safety" title="Center for AI Safety">Center for AI Safety</a></li> <li><a href="/wiki/Center_for_Applied_Rationality" title="Center for Applied Rationality">Center for Applied Rationality</a></li> <li><a href="/wiki/Center_for_Human-Compatible_Artificial_Intelligence" title="Center for Human-Compatible Artificial Intelligence">Center for Human-Compatible Artificial Intelligence</a></li> <li><a href="/wiki/Centre_for_the_Study_of_Existential_Risk" title="Centre for the Study of Existential Risk">Centre for the Study of Existential Risk</a></li> <li><a href="/wiki/EleutherAI" title="EleutherAI">EleutherAI</a></li> <li><a href="/wiki/Future_of_Humanity_Institute" title="Future of Humanity Institute">Future of Humanity Institute</a></li> <li><a href="/wiki/Future_of_Life_Institute" title="Future of Life Institute">Future of Life Institute</a></li> <li><a href="/wiki/Google_DeepMind" title="Google DeepMind">Google DeepMind</a></li> <li><a href="/wiki/Humanity%2B" title="Humanity+">Humanity+</a></li> <li><a href="/wiki/Institute_for_Ethics_and_Emerging_Technologies" title="Institute for Ethics and Emerging Technologies">Institute for Ethics and Emerging Technologies</a></li> <li><a href="/wiki/Leverhulme_Centre_for_the_Future_of_Intelligence" title="Leverhulme Centre for the Future of Intelligence">Leverhulme Centre for the Future of Intelligence</a></li> <li><a href="/wiki/Machine_Intelligence_Research_Institute" title="Machine Intelligence Research Institute">Machine Intelligence Research Institute</a></li> <li><a href="/wiki/OpenAI" title="OpenAI">OpenAI</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">People</th><td class="navbox-list-with-group navbox-list navbox-odd hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Slate_Star_Codex" title="Slate Star Codex">Scott Alexander</a></li> <li><a href="/wiki/Sam_Altman" title="Sam Altman">Sam Altman</a></li> <li><a href="/wiki/Yoshua_Bengio" title="Yoshua Bengio">Yoshua Bengio</a></li> <li><a href="/wiki/Nick_Bostrom" title="Nick Bostrom">Nick Bostrom</a></li> <li><a href="/wiki/Paul_Christiano_(researcher)" title="Paul Christiano (researcher)">Paul Christiano</a></li> <li><a href="/wiki/K._Eric_Drexler" title="K. Eric Drexler">Eric Drexler</a></li> <li><a href="/wiki/Sam_Harris" title="Sam Harris">Sam Harris</a></li> <li><a href="/wiki/Stephen_Hawking" title="Stephen Hawking">Stephen Hawking</a></li> <li><a href="/wiki/Dan_Hendrycks" title="Dan Hendrycks">Dan Hendrycks</a></li> <li><a href="/wiki/Geoffrey_Hinton" title="Geoffrey Hinton">Geoffrey Hinton</a></li> <li><a href="/wiki/Bill_Joy" title="Bill Joy">Bill Joy</a></li> <li><a href="/wiki/Shane_Legg" title="Shane Legg">Shane Legg</a></li> <li><a href="/wiki/Elon_Musk" title="Elon Musk">Elon Musk</a></li> <li><a href="/wiki/Steve_Omohundro" title="Steve Omohundro">Steve Omohundro</a></li> <li><a href="/wiki/Huw_Price" title="Huw Price">Huw Price</a></li> <li><a href="/wiki/Martin_Rees" title="Martin Rees">Martin Rees</a></li> <li><a href="/wiki/Stuart_J._Russell" title="Stuart J. Russell">Stuart J. Russell</a></li> <li><a href="/wiki/Jaan_Tallinn" title="Jaan Tallinn">Jaan Tallinn</a></li> <li><a href="/wiki/Max_Tegmark" title="Max Tegmark">Max Tegmark</a></li> <li><a href="/wiki/Frank_Wilczek" title="Frank Wilczek">Frank Wilczek</a></li> <li><a href="/wiki/Roman_Yampolskiy" title="Roman Yampolskiy">Roman Yampolskiy</a></li> <li><a href="/wiki/Eliezer_Yudkowsky" title="Eliezer Yudkowsky">Eliezer Yudkowsky</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Other</th><td class="navbox-list-with-group navbox-list navbox-even hlist" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Statement_on_AI_risk_of_extinction" title="Statement on AI risk of extinction">Statement on AI risk of extinction</a></li> <li><i><a href="/wiki/Human_Compatible" title="Human Compatible">Human Compatible</a></i></li> <li><a href="/wiki/Open_letter_on_artificial_intelligence_(2015)" title="Open letter on artificial intelligence (2015)">Open letter on artificial intelligence (2015)</a></li> <li><i><a href="/wiki/Our_Final_Invention" title="Our Final Invention">Our Final Invention</a></i></li> <li><i><a href="/wiki/The_Precipice:_Existential_Risk_and_the_Future_of_Humanity" title="The Precipice: Existential Risk and the Future of Humanity">The Precipice</a></i></li> <li><i><a href="/wiki/Superintelligence:_Paths,_Dangers,_Strategies" title="Superintelligence: Paths, Dangers, Strategies">Superintelligence: Paths, Dangers, Strategies</a></i></li> <li><i><a href="/wiki/Do_You_Trust_This_Computer%3F" title="Do You Trust This Computer?">Do You Trust This Computer?</a></i></li> <li><a href="/wiki/Artificial_Intelligence_Act" title="Artificial Intelligence Act">Artificial Intelligence Act</a></li></ul> </div></td></tr><tr><td class="navbox-abovebelow" colspan="2"><div><span class="noviewer" typeof="mw:File"><span title="Category"><img alt="" src="//upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/16px-Symbol_category_class.svg.png" decoding="async" width="16" height="16" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/23px-Symbol_category_class.svg.png 1.5x, //upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/31px-Symbol_category_class.svg.png 2x" data-file-width="180" data-file-height="185" /></span></span> <a href="/wiki/Category:Existential_risk_from_artificial_general_intelligence" title="Category:Existential risk from artificial general intelligence">Category</a></div></td></tr></tbody></table></div> <div class="navbox-styles"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1236075235"></div><div role="navigation" class="navbox" aria-labelledby="Global_catastrophic_risks" style="padding:3px"><table class="nowraplinks hlist mw-collapsible autocollapse navbox-inner" style="border-spacing:0;background:transparent;color:inherit"><tbody><tr><th scope="col" class="navbox-title" colspan="2"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1129693374"><link rel="mw-deduplicated-inline-style" href="mw-data:TemplateStyles:r1239400231"><div class="navbar plainlinks hlist navbar-mini"><ul><li class="nv-view"><a href="/wiki/Template:Global_catastrophic_risks" title="Template:Global catastrophic risks"><abbr title="View this template">v</abbr></a></li><li class="nv-talk"><a href="/wiki/Template_talk:Global_catastrophic_risks" title="Template talk:Global catastrophic risks"><abbr title="Discuss this template">t</abbr></a></li><li class="nv-edit"><a href="/wiki/Special:EditPage/Template:Global_catastrophic_risks" title="Special:EditPage/Template:Global catastrophic risks"><abbr title="Edit this template">e</abbr></a></li></ul></div><div id="Global_catastrophic_risks" style="font-size:114%;margin:0 4em"><a href="/wiki/Global_catastrophic_risk" title="Global catastrophic risk">Global catastrophic risks</a></div></th></tr><tr><td class="navbox-abovebelow" colspan="2"><div> <ul><li><a href="/wiki/Future_of_Earth" title="Future of Earth">Future of the Earth</a></li> <li><a href="/wiki/Future_of_an_expanding_universe" title="Future of an expanding universe">Future of an expanding universe</a> <ul><li><a href="/wiki/Ultimate_fate_of_the_universe" title="Ultimate fate of the universe">Ultimate fate of the universe</a></li></ul></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Technological</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Chemical_warfare" title="Chemical warfare">Chemical warfare</a></li> <li><a href="/wiki/Cyberattack" title="Cyberattack">Cyberattack</a> <ul><li><a href="/wiki/Cyberwarfare" title="Cyberwarfare">Cyberwarfare</a></li> <li><a href="/wiki/Cyberterrorism" title="Cyberterrorism">Cyberterrorism</a></li> <li><a href="/wiki/Cybergeddon" title="Cybergeddon">Cybergeddon</a></li></ul></li> <li><a href="/wiki/Gray_goo" title="Gray goo">Gray goo</a></li> <li><a href="/wiki/Industrial_applications_of_nanotechnology#Weapons" title="Industrial applications of nanotechnology">Nanoweapons</a></li> <li><a href="/wiki/Kinetic_bombardment" title="Kinetic bombardment">Kinetic bombardment</a> <ul><li><a href="/wiki/Kinetic_energy_weapon" title="Kinetic energy weapon">Kinetic energy weapon</a></li></ul></li> <li><a href="/wiki/Nuclear_warfare" title="Nuclear warfare">Nuclear warfare</a> <ul><li><a href="/wiki/Mutual_assured_destruction" title="Mutual assured destruction">Mutual assured destruction</a></li> <li><a href="/wiki/Dead_Hand" title="Dead Hand">Dead Hand</a></li> <li><a href="/wiki/Doomsday_Clock" title="Doomsday Clock">Doomsday Clock</a></li> <li><a href="/wiki/Doomsday_device" title="Doomsday device">Doomsday device</a></li> <li><a href="/wiki/Antimatter_weapon" title="Antimatter weapon">Antimatter weapon</a></li></ul></li> <li><a href="/wiki/Electromagnetic_pulse" title="Electromagnetic pulse">Electromagnetic pulse</a> (EMP)</li> <li><a href="/wiki/Safety_of_high-energy_particle_collision_experiments" title="Safety of high-energy particle collision experiments">Safety of high-energy particle collision experiments</a> <ul><li><a href="/wiki/Micro_black_hole" title="Micro black hole">Micro black hole</a></li> <li><a href="/wiki/Strangelet" title="Strangelet">Strangelet</a></li></ul></li> <li><a href="/wiki/Synthetic_intelligence" title="Synthetic intelligence">Synthetic intelligence</a> / <a href="/wiki/Artificial_intelligence" title="Artificial intelligence">Artificial intelligence</a> <ul><li><a class="mw-selflink selflink">AI takeover</a></li> <li><a href="/wiki/Existential_risk_from_artificial_general_intelligence" class="mw-redirect" title="Existential risk from artificial general intelligence">Existential risk from artificial intelligence</a></li> <li><a href="/wiki/Technological_singularity" title="Technological singularity">Technological singularity</a></li></ul></li> <li><a href="/wiki/Transhumanism" title="Transhumanism">Transhumanism</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Sociological</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Anthropogenic_hazard" class="mw-redirect" title="Anthropogenic hazard">Anthropogenic hazard</a></li> <li><a href="/wiki/Collapsology" title="Collapsology">Collapsology</a></li> <li><a href="/wiki/Doomsday_argument" title="Doomsday argument">Doomsday argument</a> <ul><li><a href="/wiki/Self-indication_assumption_doomsday_argument_rebuttal" title="Self-indication assumption doomsday argument rebuttal">Self-indication assumption doomsday argument rebuttal</a></li> <li><a href="/wiki/Self-referencing_doomsday_argument_rebuttal" title="Self-referencing doomsday argument rebuttal">Self-referencing doomsday argument rebuttal</a></li></ul></li> <li><a href="/wiki/Economic_collapse" title="Economic collapse">Economic collapse</a></li> <li><a href="/wiki/Malthusianism" title="Malthusianism">Malthusian catastrophe</a></li> <li><a href="/wiki/New_World_Order_(conspiracy_theory)" class="mw-redirect" title="New World Order (conspiracy theory)">New World Order (conspiracy theory)</a></li> <li><a href="/wiki/Nuclear_holocaust" title="Nuclear holocaust">Nuclear holocaust</a> <ul><li><a href="/wiki/Cobalt_bomb" title="Cobalt bomb">cobalt</a></li> <li><a href="/wiki/Nuclear_famine" title="Nuclear famine">famine</a></li> <li><a href="/wiki/Nuclear_winter" title="Nuclear winter">winter</a></li></ul></li> <li><a href="/wiki/Societal_collapse" title="Societal collapse">Societal collapse</a></li> <li><a href="/wiki/State_collapse" title="State collapse">State collapse</a></li> <li><a href="/wiki/World_War_III" title="World War III">World War III</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Climate_apocalypse" title="Climate apocalypse">Ecological</a></th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Climate_variability_and_change" title="Climate variability and change">Climate change</a></th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Anoxic_event" title="Anoxic event">Anoxic event</a></li> <li><a href="/wiki/Biodiversity_loss" title="Biodiversity loss">Biodiversity loss</a> <ul><li><a href="/wiki/Mass_mortality_event" title="Mass mortality event">Mass mortality event</a></li></ul></li> <li><a href="/wiki/Cascade_effect_(ecology)" title="Cascade effect (ecology)">Cascade effect</a></li> <li><a href="/wiki/Cataclysmic_pole_shift_hypothesis" title="Cataclysmic pole shift hypothesis">Cataclysmic pole shift hypothesis</a></li> <li><a href="/wiki/Climate_change_and_civilizational_collapse" title="Climate change and civilizational collapse">Climate change and civilizational collapse</a></li> <li><a href="/wiki/Deforestation" title="Deforestation">Deforestation</a></li> <li><a href="/wiki/Desertification" title="Desertification">Desertification</a></li> <li><a href="/wiki/Extinction_risk_from_climate_change" title="Extinction risk from climate change">Extinction risk from climate change</a> <ul><li><a href="/wiki/Tipping_points_in_the_climate_system" title="Tipping points in the climate system">Tipping points in the climate system</a></li></ul></li> <li><a href="/wiki/Flood_basalt" title="Flood basalt">Flood basalt</a></li> <li><a href="/wiki/Global_dimming" title="Global dimming">Global dimming</a></li> <li><a href="/wiki/Global_terrestrial_stilling" title="Global terrestrial stilling">Global terrestrial stilling</a></li> <li><a href="/wiki/Climate_change" title="Climate change">Global warming</a></li> <li><a href="/wiki/Hypercane" title="Hypercane">Hypercane</a></li> <li><a href="/wiki/Ice_age" title="Ice age">Ice age</a></li> <li><a href="/wiki/Ecocide" title="Ecocide">Ecocide</a></li> <li><a href="/wiki/Ecological_collapse" class="mw-redirect" title="Ecological collapse">Ecological collapse</a></li> <li><a href="/wiki/Environmental_degradation" title="Environmental degradation">Environmental degradation</a></li> <li><a href="/wiki/Habitat_destruction" title="Habitat destruction">Habitat destruction</a></li> <li><a href="/wiki/Human_impact_on_the_environment" title="Human impact on the environment">Human impact on the environment</a> <ul><li><a href="/wiki/Environmental_issues_with_coral_reefs" title="Environmental issues with coral reefs">coral reefs</a></li> <li><a href="/wiki/Human_impact_on_marine_life" title="Human impact on marine life">on marine life</a></li></ul></li> <li><a href="/wiki/Land_degradation" title="Land degradation">Land degradation</a></li> <li><a href="/wiki/Land_consumption" title="Land consumption">Land consumption</a></li> <li><a href="/wiki/Land_surface_effects_on_climate" title="Land surface effects on climate">Land surface effects on climate</a></li> <li><a href="/wiki/Ocean_acidification" title="Ocean acidification">Ocean acidification</a></li> <li><a href="/wiki/Ozone_depletion" title="Ozone depletion">Ozone depletion</a></li> <li><a href="/wiki/Resource_depletion" title="Resource depletion">Resource depletion</a></li> <li><a href="/wiki/Sea_level_rise" title="Sea level rise">Sea level rise</a></li> <li><a href="/wiki/Supervolcano" title="Supervolcano">Supervolcano</a> <ul><li><a href="/wiki/Volcanic_winter" title="Volcanic winter">winter</a></li></ul></li> <li><a href="/wiki/Verneshot" title="Verneshot">Verneshot</a></li> <li><a href="/wiki/Water_pollution" title="Water pollution">Water pollution</a></li> <li><a href="/wiki/Water_scarcity" title="Water scarcity">Water scarcity</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Earth_Overshoot_Day" title="Earth Overshoot Day">Earth Overshoot Day</a></th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Overexploitation" title="Overexploitation">Overexploitation</a></li> <li><a href="/wiki/Overpopulation" title="Overpopulation">Overpopulation</a> <ul><li><a href="/wiki/Human_overpopulation" title="Human overpopulation">Human overpopulation</a></li></ul></li></ul> </div></td></tr></tbody></table><div></div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Biological</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"></div><table class="nowraplinks navbox-subgroup" style="border-spacing:0"><tbody><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Extinction" title="Extinction">Extinction</a></th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Extinction_event" title="Extinction event">Extinction event</a></li> <li><a href="/wiki/Holocene_extinction" title="Holocene extinction">Holocene extinction</a></li> <li><a href="/wiki/Human_extinction" title="Human extinction">Human extinction</a></li> <li><a href="/wiki/List_of_extinction_events" title="List of extinction events">List of extinction events</a></li> <li><a href="/wiki/Genetic_erosion" title="Genetic erosion">Genetic erosion</a></li> <li><a href="/wiki/Genetic_pollution" title="Genetic pollution">Genetic pollution</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Others</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Biodiversity_loss" title="Biodiversity loss">Biodiversity loss</a> <ul><li><a href="/wiki/Decline_in_amphibian_populations" title="Decline in amphibian populations">Decline in amphibian populations</a></li> <li><a href="/wiki/Decline_in_insect_populations" title="Decline in insect populations">Decline in insect populations</a></li></ul></li> <li><a href="/wiki/Biotechnology_risk" title="Biotechnology risk">Biotechnology risk</a> <ul><li><a href="/wiki/Biological_agent" title="Biological agent">Biological agent</a></li> <li><a href="/wiki/Biological_warfare" title="Biological warfare">Biological warfare</a></li> <li><a href="/wiki/Bioterrorism" title="Bioterrorism">Bioterrorism</a></li></ul></li> <li><a href="/wiki/Colony_collapse_disorder" title="Colony collapse disorder">Colony collapse disorder</a></li> <li><a href="/wiki/Defaunation" title="Defaunation">Defaunation</a></li> <li><a href="/wiki/Dysgenics" title="Dysgenics">Dysgenics</a></li> <li><a href="/wiki/Interplanetary_contamination" title="Interplanetary contamination">Interplanetary contamination</a></li> <li><a href="/wiki/Pandemic" title="Pandemic">Pandemic</a></li> <li><a href="/wiki/Pollinator_decline" title="Pollinator decline">Pollinator decline</a></li> <li><a href="/wiki/Overfishing" title="Overfishing">Overfishing</a></li></ul> </div></td></tr></tbody></table><div></div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Astronomical</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Big_Crunch" title="Big Crunch">Big Crunch</a></li> <li><a href="/wiki/Big_Rip" title="Big Rip">Big Rip</a></li> <li><a href="/wiki/Coronal_mass_ejection" title="Coronal mass ejection">Coronal mass ejection</a></li> <li><a href="/wiki/Cosmological_phase_transition" title="Cosmological phase transition">Cosmological phase transition</a></li> <li><a href="/wiki/Geomagnetic_storm" title="Geomagnetic storm">Geomagnetic storm</a></li> <li><a href="/wiki/False_vacuum_decay" class="mw-redirect" title="False vacuum decay">False vacuum decay</a></li> <li><a href="/wiki/Gamma-ray_burst" title="Gamma-ray burst">Gamma-ray burst</a></li> <li><a href="/wiki/Heat_death_of_the_universe" title="Heat death of the universe">Heat death of the universe</a></li> <li><a href="/wiki/Proton_decay" title="Proton decay">Proton decay</a></li> <li><a href="/wiki/Virtual_black_hole" title="Virtual black hole">Virtual black hole</a></li> <li><a href="/wiki/Impact_event" title="Impact event">Impact event</a> <ul><li><a href="/wiki/Asteroid_impact_avoidance" title="Asteroid impact avoidance">Asteroid impact avoidance</a></li> <li><a href="/wiki/Asteroid_impact_prediction" title="Asteroid impact prediction">Asteroid impact prediction</a></li> <li><a href="/wiki/Potentially_hazardous_object" title="Potentially hazardous object">Potentially hazardous object</a> <ul><li><a href="/wiki/Near-Earth_object" title="Near-Earth object">Near-Earth object</a></li></ul></li> <li><a href="/wiki/Impact_winter" title="Impact winter">winter</a></li> <li><a href="/wiki/Rogue_planet" title="Rogue planet">Rogue planet</a></li></ul></li> <li><a href="/wiki/Near-Earth_supernova" title="Near-Earth supernova">Near-Earth supernova</a></li> <li><a href="/wiki/Hypernova" title="Hypernova">Hypernova</a></li> <li><a href="/wiki/Micronova" title="Micronova">Micronova</a></li> <li><a href="/wiki/Solar_flare" title="Solar flare">Solar flare</a></li> <li><a href="/wiki/Stellar_collision" title="Stellar collision">Stellar collision</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%"><a href="/wiki/Eschatology" title="Eschatology">Eschatological</a></th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Buddhist_eschatology" title="Buddhist eschatology">Buddhist</a> <ul><li><a href="/wiki/Maitreya" title="Maitreya">Maitreya</a></li> <li><a href="/wiki/Three_Ages_of_Buddhism" title="Three Ages of Buddhism">Three Ages</a></li></ul></li> <li><a href="/wiki/Hindu_eschatology" title="Hindu eschatology">Hindu</a> <ul><li><a href="/wiki/Kalki" title="Kalki">Kalki</a></li> <li><a href="/wiki/Kali_Yuga" title="Kali Yuga">Kali Yuga</a></li></ul></li> <li><a href="/wiki/Last_Judgment" title="Last Judgment">Last Judgement</a></li> <li><a href="/wiki/Second_Coming" title="Second Coming">Second Coming</a> <ul><li><a href="/wiki/Book_of_Enoch" title="Book of Enoch">1 Enoch</a></li> <li><a href="/wiki/Book_of_Daniel" title="Book of Daniel">Daniel</a> <ul><li><a href="/wiki/Abomination_of_desolation" title="Abomination of desolation">Abomination of desolation</a></li> <li><a href="/wiki/Prophecy_of_Seventy_Weeks" title="Prophecy of Seventy Weeks">Prophecy of Seventy Weeks</a></li></ul></li> <li><a href="/wiki/Messiah" title="Messiah">Messiah</a></li> <li><a href="/wiki/Christian_eschatology" title="Christian eschatology">Christian</a> <ul><li><a href="/wiki/Futurism_(Christianity)" title="Futurism (Christianity)">Futurism</a></li> <li><a href="/wiki/Historicism_(Christianity)" title="Historicism (Christianity)">Historicism</a> <ul><li><a href="/wiki/Historicist_interpretations_of_the_Book_of_Revelation" title="Historicist interpretations of the Book of Revelation">Interpretations of Revelation</a></li></ul></li> <li><a href="/wiki/Idealism_(Christian_eschatology)" title="Idealism (Christian eschatology)"> Idealism</a></li> <li><a href="/wiki/Preterism" title="Preterism">Preterism</a></li> <li><a href="/wiki/2_Esdras" title="2 Esdras">2 Esdras</a></li> <li><a href="/wiki/Second_Epistle_to_the_Thessalonians" title="Second Epistle to the Thessalonians">2 Thessalonians</a> <ul><li><a href="/wiki/Man_of_sin" title="Man of sin">Man of sin</a></li> <li><a href="/wiki/Katechon" title="Katechon">Katechon</a></li></ul></li> <li><a href="/wiki/Antichrist" title="Antichrist">Antichrist</a></li> <li><a href="/wiki/Book_of_Revelation" title="Book of Revelation">Book of Revelation</a> <ul><li><a href="/wiki/Events_of_Revelation" title="Events of Revelation">Events</a> <ul><li><a href="/wiki/Four_Horsemen_of_the_Apocalypse" title="Four Horsemen of the Apocalypse">Four Horsemen of the Apocalypse</a></li> <li><a href="/wiki/Lake_of_fire" title="Lake of fire">Lake of fire</a></li> <li><a href="/wiki/Number_of_the_beast" title="Number of the beast">Number of the Beast</a></li> <li><a href="/wiki/Seven_bowls" title="Seven bowls">Seven bowls</a></li> <li><a href="/wiki/Seven_seals" title="Seven seals">Seven seals</a></li> <li><a href="/wiki/The_Beast_(Revelation)" title="The Beast (Revelation)">The Beast</a></li> <li><a href="/wiki/Two_witnesses" title="Two witnesses">Two witnesses</a></li> <li><a href="/wiki/War_in_Heaven" title="War in Heaven">War in Heaven</a></li> <li><a href="/wiki/Whore_of_Babylon" title="Whore of Babylon">Whore of Babylon</a></li></ul></li></ul></li> <li><a href="/wiki/Great_Apostasy" title="Great Apostasy">Great Apostasy</a></li> <li><a href="/wiki/New_Earth_(Christianity)" title="New Earth (Christianity)">New Earth</a></li> <li><a href="/wiki/New_Jerusalem" title="New Jerusalem">New Jerusalem</a></li> <li><a href="/wiki/Olivet_Discourse" title="Olivet Discourse">Olivet Discourse</a> <ul><li><a href="/wiki/Great_Tribulation" title="Great Tribulation">Great Tribulation</a></li></ul></li> <li><a href="/wiki/Son_of_perdition" title="Son of perdition">Son of perdition</a></li> <li><a href="/wiki/The_Sheep_and_the_Goats" title="The Sheep and the Goats">Sheep and Goats</a></li></ul></li> <li><a href="/wiki/Islamic_eschatology" title="Islamic eschatology">Islamic</a> <ul><li><a href="/wiki/Qa%27im_Al_Muhammad" title="Qa&#39;im Al Muhammad">Al-Qa'im</a></li> <li><a href="/wiki/Beast_of_the_Earth" title="Beast of the Earth">Beast of the Earth</a></li> <li><a href="/wiki/Dhu_al-Qarnayn" title="Dhu al-Qarnayn">Dhu al-Qarnayn</a></li> <li><a href="/wiki/Dhul-Suwayqatayn" title="Dhul-Suwayqatayn">Dhul-Suwayqatayn</a></li> <li><a href="/wiki/Al-Masih_ad-Dajjal" title="Al-Masih ad-Dajjal">Dajjal</a></li> <li><a href="/wiki/Israfil" title="Israfil">Israfil</a></li> <li><a href="/wiki/Mahdi" title="Mahdi">Mahdi</a></li> <li><a href="/wiki/Sufyani" title="Sufyani">Sufyani</a></li></ul></li> <li><a href="/wiki/Jewish_eschatology" title="Jewish eschatology">Jewish</a> <ul><li><a href="/wiki/Messiah_in_Judaism" title="Messiah in Judaism">Messiah</a></li> <li><a href="/wiki/Gog_and_Magog" title="Gog and Magog">War of Gog and Magog</a></li> <li><a href="/wiki/Third_Temple" title="Third Temple">Third Temple</a></li></ul></li></ul></li> <li><a href="/wiki/Ragnar%C3%B6k" title="Ragnarök">Norse</a></li> <li><a href="/wiki/Frashokereti" title="Frashokereti">Zoroastrian</a> <ul><li><a href="/wiki/Saoshyant" title="Saoshyant">Saoshyant</a></li></ul></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Others</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/2011_end_times_prediction" title="2011 end times prediction">2011 end times prediction</a></li> <li><a href="/wiki/2012_phenomenon" title="2012 phenomenon">2012 phenomenon</a></li> <li><a href="/wiki/Apocalypse" title="Apocalypse">Apocalypse</a></li> <li><a href="/wiki/Apocalyptic_literature" title="Apocalyptic literature">Apocalyptic literature</a></li> <li><a href="/wiki/Apocalypticism" title="Apocalypticism">Apocalypticism</a></li> <li><a href="/wiki/Armageddon" title="Armageddon">Armageddon</a></li> <li><a href="/wiki/Blood_moon_prophecy" title="Blood moon prophecy">Blood moon prophecy</a></li> <li><a href="/wiki/Earth_Changes" title="Earth Changes">Earth Changes</a></li> <li><a href="/wiki/Global_catastrophic_risk" title="Global catastrophic risk">End time</a></li> <li><a href="/wiki/Gog_and_Magog" title="Gog and Magog">Gog and Magog</a></li> <li><a href="/wiki/List_of_dates_predicted_for_apocalyptic_events" title="List of dates predicted for apocalyptic events">List of dates predicted for apocalyptic events</a></li> <li><a href="/wiki/Messianism" title="Messianism">Messianism</a> <ul><li><a href="/wiki/Messianic_Age" title="Messianic Age">Messianic Age</a></li></ul></li> <li><a href="/wiki/Millenarianism" title="Millenarianism">Millenarianism</a></li> <li><a href="/wiki/Millennialism" title="Millennialism">Millennialism</a> <ul><li><a href="/wiki/Premillennialism" title="Premillennialism">Premillennialism</a></li> <li><a href="/wiki/Amillennialism" title="Amillennialism">Amillennialism</a></li> <li><a href="/wiki/Postmillennialism" title="Postmillennialism">Postmillennialism</a></li></ul></li> <li><a href="/wiki/Nemesis_(hypothetical_star)" title="Nemesis (hypothetical star)">Nemesis (hypothetical star)</a></li> <li><a href="/wiki/Nibiru_cataclysm" title="Nibiru cataclysm">Nibiru cataclysm</a></li> <li><a href="/wiki/Rapture" title="Rapture">Rapture</a> <ul><li><a href="/wiki/Rapture#Prewrath_premillennialism" title="Rapture">Prewrath</a></li> <li><a href="/wiki/Post-tribulation_rapture" title="Post-tribulation rapture">Post-tribulation rapture</a></li></ul></li> <li><a href="/wiki/Universal_resurrection" title="Universal resurrection">Resurrection of the dead</a></li> <li><a href="/wiki/World_to_come" title="World to come">World to come</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Fictional</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Alien_invasion" title="Alien invasion">Alien invasion</a></li> <li><a href="/wiki/Apocalyptic_and_post-apocalyptic_fiction" title="Apocalyptic and post-apocalyptic fiction">Apocalyptic and post-apocalyptic fiction</a> <ul><li><a href="/wiki/List_of_apocalyptic_and_post-apocalyptic_fiction" title="List of apocalyptic and post-apocalyptic fiction">List of apocalyptic and post-apocalyptic fiction</a></li> <li><a href="/wiki/List_of_apocalyptic_films" title="List of apocalyptic films">List of apocalyptic films</a></li></ul></li> <li><a href="/wiki/Climate_fiction" title="Climate fiction">Climate fiction</a></li> <li><a href="/wiki/Disaster_film" title="Disaster film">Disaster films</a> <ul><li><a href="/wiki/List_of_disaster_films" title="List of disaster films">List of disaster films</a></li></ul></li> <li><a href="/wiki/List_of_fictional_doomsday_devices" title="List of fictional doomsday devices">List of fictional doomsday devices</a></li> <li><a href="/wiki/Zombie_apocalypse" title="Zombie apocalypse">Zombie apocalypse</a> <ul><li><a href="/wiki/Zombie" title="Zombie">Zombie</a></li></ul></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">Organizations</th><td class="navbox-list-with-group navbox-list navbox-odd" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Centre_for_the_Study_of_Existential_Risk" title="Centre for the Study of Existential Risk">Centre for the Study of Existential Risk</a></li> <li><a href="/wiki/Future_of_Humanity_Institute" title="Future of Humanity Institute">Future of Humanity Institute</a></li> <li><a href="/wiki/Future_of_Life_Institute" title="Future of Life Institute">Future of Life Institute</a></li> <li><a href="/wiki/Nuclear_Threat_Initiative" title="Nuclear Threat Initiative">Nuclear Threat Initiative</a></li></ul> </div></td></tr><tr><th scope="row" class="navbox-group" style="width:1%">General</th><td class="navbox-list-with-group navbox-list navbox-even" style="width:100%;padding:0"><div style="padding:0 0.25em"> <ul><li><a href="/wiki/Ransomware" title="Ransomware">Ransomware</a></li> <li><a href="/wiki/Cyberwarfare" title="Cyberwarfare">Cyberwarfare</a></li> <li><a href="/wiki/Disaster" title="Disaster">Disaster</a></li> <li><a href="/wiki/Economic_depression" title="Economic depression">Depression</a></li> <li><a href="/wiki/Financial_crisis" title="Financial crisis">Financial crisis</a></li> <li><a href="/wiki/Pandemic" title="Pandemic">Pandemic</a></li> <li><a href="/wiki/Riot" title="Riot">Riots</a></li> <li><a href="/wiki/Social_crisis" title="Social crisis">Social crisis</a></li> <li><a href="/wiki/Survivalism" title="Survivalism">Survivalism</a></li></ul> </div></td></tr><tr><td class="navbox-abovebelow" colspan="2"><div> <ul><li><b><span class="nowrap"><span class="noviewer" typeof="mw:File"><a href="/wiki/File:The_Earth_seen_from_Apollo_17.jpg" class="mw-file-description"><img alt="" src="//upload.wikimedia.org/wikipedia/commons/thumb/9/97/The_Earth_seen_from_Apollo_17.jpg/16px-The_Earth_seen_from_Apollo_17.jpg" decoding="async" width="16" height="16" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/commons/thumb/9/97/The_Earth_seen_from_Apollo_17.jpg/24px-The_Earth_seen_from_Apollo_17.jpg 1.5x, //upload.wikimedia.org/wikipedia/commons/thumb/9/97/The_Earth_seen_from_Apollo_17.jpg/32px-The_Earth_seen_from_Apollo_17.jpg 2x" data-file-width="3000" data-file-height="3002" /></a></span> </span><a href="/wiki/Portal:World" title="Portal:World">World&#32;portal</a></b></li> <li><span class="noviewer" typeof="mw:File"><span title="Category"><img alt="" src="//upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/16px-Symbol_category_class.svg.png" decoding="async" width="16" height="16" class="mw-file-element" srcset="//upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/23px-Symbol_category_class.svg.png 1.5x, //upload.wikimedia.org/wikipedia/en/thumb/9/96/Symbol_category_class.svg/31px-Symbol_category_class.svg.png 2x" data-file-width="180" data-file-height="185" /></span></span> Categories <ul><li><a href="/wiki/Category:Apocalypticism" title="Category:Apocalypticism">Apocalypticism</a></li> <li><a href="/wiki/Category:Future_problems" title="Category:Future problems">Future problems</a></li> <li><a href="/wiki/Category:Hazards" title="Category:Hazards">Hazards</a></li> <li><a href="/wiki/Category:Risk_analysis" title="Category:Risk analysis">Risk analysis</a></li> <li><a href="/wiki/Category:Doomsday_scenarios" title="Category:Doomsday scenarios">Doomsday scenarios</a></li></ul></li></ul> </div></td></tr></tbody></table></div> <!-- NewPP limit report Parsed by mw‐api‐int.codfw.main‐6bcd787d7f‐4pm4b Cached time: 20241126014230 Cache expiry: 2592000 Reduced expiry: false Complications: [vary‐revision‐sha1, show‐toc] CPU time usage: 0.832 seconds Real time usage: 0.978 seconds Preprocessor visited node count: 3309/1000000 Post‐expand include size: 167627/2097152 bytes Template argument size: 997/2097152 bytes Highest expansion depth: 9/100 Expensive parser function count: 13/500 Unstrip recursion depth: 1/20 Unstrip post‐expand size: 195459/5000000 bytes Lua time usage: 0.538/10.000 seconds Lua memory usage: 6191565/52428800 bytes Number of Wikibase entities loaded: 0/400 --> <!-- Transclusion expansion time report (%,ms,calls,template) 100.00% 817.037 1 -total 47.77% 390.274 2 Template:Reflist 18.09% 147.771 11 Template:Cite_web 12.02% 98.213 1 Template:Artificial_intelligence 11.69% 95.550 1 Template:Sidebar_with_collapsible_lists 10.21% 83.421 1 Template:Short_description 10.07% 82.289 4 Template:Navbox 9.80% 80.097 1 Template:Excerpt 7.84% 64.023 11 Template:Cite_book 7.61% 62.159 13 Template:Cite_news --> <!-- Saved in parser cache with key enwiki:pcache:idhash:813176-0!canonical and timestamp 20241126014230 and revision id 1259605660. Rendering was triggered because: api-parse --> </div><!--esi <esi:include src="/esitest-fa8a495983347898/content" /> --><noscript><img src="https://login.wikimedia.org/wiki/Special:CentralAutoLogin/start?type=1x1" alt="" width="1" height="1" style="border: none; position: absolute;"></noscript> <div class="printfooter" data-nosnippet="">Retrieved from "<a dir="ltr" href="https://en.wikipedia.org/w/index.php?title=AI_takeover&amp;oldid=1259605660">https://en.wikipedia.org/w/index.php?title=AI_takeover&amp;oldid=1259605660</a>"</div></div> <div id="catlinks" class="catlinks" data-mw="interface"><div id="mw-normal-catlinks" class="mw-normal-catlinks"><a href="/wiki/Help:Category" title="Help:Category">Categories</a>: <ul><li><a href="/wiki/Category:Doomsday_scenarios" title="Category:Doomsday scenarios">Doomsday scenarios</a></li><li><a href="/wiki/Category:Future_problems" title="Category:Future problems">Future problems</a></li><li><a href="/wiki/Category:Science_fiction_themes" title="Category:Science fiction themes">Science fiction themes</a></li><li><a href="/wiki/Category:Existential_risk_from_artificial_general_intelligence" title="Category:Existential risk from artificial general intelligence">Existential risk from artificial general intelligence</a></li><li><a href="/wiki/Category:Technophobia" title="Category:Technophobia">Technophobia</a></li></ul></div><div id="mw-hidden-catlinks" class="mw-hidden-catlinks mw-hidden-cats-hidden">Hidden categories: <ul><li><a href="/wiki/Category:Webarchive_template_wayback_links" title="Category:Webarchive template wayback links">Webarchive template wayback links</a></li><li><a href="/wiki/Category:Articles_with_short_description" title="Category:Articles with short description">Articles with short description</a></li><li><a href="/wiki/Category:Short_description_is_different_from_Wikidata" title="Category:Short description is different from Wikidata">Short description is different from Wikidata</a></li><li><a href="/wiki/Category:Articles_with_excerpts" title="Category:Articles with excerpts">Articles with excerpts</a></li></ul></div></div> </div> </main> </div> <div class="mw-footer-container"> <footer id="footer" class="mw-footer" > <ul id="footer-info"> <li id="footer-info-lastmod"> This page was last edited on 26 November 2024, at 01:42<span class="anonymous-show">&#160;(UTC)</span>.</li> <li id="footer-info-copyright">Text is available under the <a href="/wiki/Wikipedia:Text_of_the_Creative_Commons_Attribution-ShareAlike_4.0_International_License" title="Wikipedia:Text of the Creative Commons Attribution-ShareAlike 4.0 International License">Creative Commons Attribution-ShareAlike 4.0 License</a>; additional terms may apply. By using this site, you agree to the <a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Terms_of_Use" class="extiw" title="foundation:Special:MyLanguage/Policy:Terms of Use">Terms of Use</a> and <a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Privacy_policy" class="extiw" title="foundation:Special:MyLanguage/Policy:Privacy policy">Privacy Policy</a>. Wikipedia® is a registered trademark of the <a rel="nofollow" class="external text" href="https://wikimediafoundation.org/">Wikimedia Foundation, Inc.</a>, a non-profit organization.</li> </ul> <ul id="footer-places"> <li id="footer-places-privacy"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Privacy_policy">Privacy policy</a></li> <li id="footer-places-about"><a href="/wiki/Wikipedia:About">About Wikipedia</a></li> <li id="footer-places-disclaimers"><a href="/wiki/Wikipedia:General_disclaimer">Disclaimers</a></li> <li id="footer-places-contact"><a href="//en.wikipedia.org/wiki/Wikipedia:Contact_us">Contact Wikipedia</a></li> <li id="footer-places-wm-codeofconduct"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Universal_Code_of_Conduct">Code of Conduct</a></li> <li id="footer-places-developers"><a href="https://developer.wikimedia.org">Developers</a></li> <li id="footer-places-statslink"><a href="https://stats.wikimedia.org/#/en.wikipedia.org">Statistics</a></li> <li id="footer-places-cookiestatement"><a href="https://foundation.wikimedia.org/wiki/Special:MyLanguage/Policy:Cookie_statement">Cookie statement</a></li> <li id="footer-places-mobileview"><a href="//en.m.wikipedia.org/w/index.php?title=AI_takeover&amp;mobileaction=toggle_view_mobile" class="noprint stopMobileRedirectToggle">Mobile view</a></li> </ul> <ul id="footer-icons" class="noprint"> <li id="footer-copyrightico"><a href="https://wikimediafoundation.org/" class="cdx-button cdx-button--fake-button cdx-button--size-large cdx-button--fake-button--enabled"><img src="/static/images/footer/wikimedia-button.svg" width="84" height="29" alt="Wikimedia Foundation" loading="lazy"></a></li> <li id="footer-poweredbyico"><a href="https://www.mediawiki.org/" class="cdx-button cdx-button--fake-button cdx-button--size-large cdx-button--fake-button--enabled"><img src="/w/resources/assets/poweredby_mediawiki.svg" alt="Powered by MediaWiki" width="88" height="31" loading="lazy"></a></li> </ul> </footer> </div> </div> </div> <div class="vector-settings" id="p-dock-bottom"> <ul></ul> </div><script>(RLQ=window.RLQ||[]).push(function(){mw.config.set({"wgHostname":"mw-web.codfw.main-694cf4987f-5frnp","wgBackendResponseTime":155,"wgPageParseReport":{"limitreport":{"cputime":"0.832","walltime":"0.978","ppvisitednodes":{"value":3309,"limit":1000000},"postexpandincludesize":{"value":167627,"limit":2097152},"templateargumentsize":{"value":997,"limit":2097152},"expansiondepth":{"value":9,"limit":100},"expensivefunctioncount":{"value":13,"limit":500},"unstrip-depth":{"value":1,"limit":20},"unstrip-size":{"value":195459,"limit":5000000},"entityaccesscount":{"value":0,"limit":400},"timingprofile":["100.00% 817.037 1 -total"," 47.77% 390.274 2 Template:Reflist"," 18.09% 147.771 11 Template:Cite_web"," 12.02% 98.213 1 Template:Artificial_intelligence"," 11.69% 95.550 1 Template:Sidebar_with_collapsible_lists"," 10.21% 83.421 1 Template:Short_description"," 10.07% 82.289 4 Template:Navbox"," 9.80% 80.097 1 Template:Excerpt"," 7.84% 64.023 11 Template:Cite_book"," 7.61% 62.159 13 Template:Cite_news"]},"scribunto":{"limitreport-timeusage":{"value":"0.538","limit":"10.000"},"limitreport-memusage":{"value":6191565,"limit":52428800},"limitreport-logs":"table#1 {\n [\"size\"] = \"tiny\",\n}\n"},"cachereport":{"origin":"mw-api-int.codfw.main-6bcd787d7f-4pm4b","timestamp":"20241126014230","ttl":2592000,"transientcontent":false}}});});</script> <script type="application/ld+json">{"@context":"https:\/\/schema.org","@type":"Article","name":"AI takeover","url":"https:\/\/en.wikipedia.org\/wiki\/AI_takeover","sameAs":"http:\/\/www.wikidata.org\/entity\/Q2254427","mainEntity":"http:\/\/www.wikidata.org\/entity\/Q2254427","author":{"@type":"Organization","name":"Contributors to Wikimedia projects"},"publisher":{"@type":"Organization","name":"Wikimedia Foundation, Inc.","logo":{"@type":"ImageObject","url":"https:\/\/www.wikimedia.org\/static\/images\/wmf-hor-googpub.png"}},"datePublished":"2004-07-12T23:34:47Z","dateModified":"2024-11-26T01:42:24Z","image":"https:\/\/upload.wikimedia.org\/wikipedia\/commons\/d\/d9\/Capek_RUR.jpg","headline":"hypothetical scenario in which AI becomes the dominant form of intelligence on Earth"}</script> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10