CINXE.COM
PAI’s Responsible Practices for Synthetic Media
<!doctype html> <html lang="en-US"> <head> <meta charset="utf-8"> <meta http-equiv="x-ua-compatible" content="ie=edge"> <meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"> <meta name='robots' content='index, follow, max-image-preview:large, max-snippet:-1, max-video-preview:-1' /> <!-- This site is optimized with the Yoast SEO plugin v21.2 - https://yoast.com/wordpress/plugins/seo/ --> <title>PAI’s Responsible Practices for Synthetic Media</title> <meta name="description" content="PAI's Responsible Practices for Synthetic Media is a framework on how to responsibly develop, create, and share synthetic media" /> <link rel="canonical" href="https://syntheticmedia.partnershiponai.org/" /> <meta property="og:locale" content="en_US" /> <meta property="og:type" content="website" /> <meta property="og:title" content="PAI’s Responsible Practices for Synthetic Media" /> <meta property="og:description" content="PAI's Responsible Practices for Synthetic Media is a framework on how to responsibly develop, create, and share synthetic media" /> <meta property="og:url" content="https://syntheticmedia.partnershiponai.org/" /> <meta property="og:site_name" content="Partnership on AI - Synthetic Media" /> <meta property="article:modified_time" content="2024-11-19T17:16:41+00:00" /> <meta property="og:image" content="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/02/framework-feature-img-1800x945-1-e1677520578865.png" /> <meta property="og:image:width" content="2514" /> <meta property="og:image:height" content="1320" /> <meta property="og:image:type" content="image/png" /> <meta name="twitter:card" content="summary_large_image" /> <meta name="twitter:image" content="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/02/framework-feature-img-1800x945-1-e1677520578865.png" /> <script type="application/ld+json" class="yoast-schema-graph">{"@context":"https://schema.org","@graph":[{"@type":"WebPage","@id":"https://syntheticmedia.partnershiponai.org/","url":"https://syntheticmedia.partnershiponai.org/","name":"PAI’s Responsible Practices for Synthetic Media","isPartOf":{"@id":"https://syntheticmedia.partnershiponai.org/#website"},"primaryImageOfPage":{"@id":"https://syntheticmedia.partnershiponai.org/#primaryimage"},"image":{"@id":"https://syntheticmedia.partnershiponai.org/#primaryimage"},"thumbnailUrl":"https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/02/framework-feature-img-1800x945-1-e1677520578865.png","datePublished":"2023-02-22T23:20:25+00:00","dateModified":"2024-11-19T17:16:41+00:00","description":"PAI's Responsible Practices for Synthetic Media is a framework on how to responsibly develop, create, and share synthetic media","breadcrumb":{"@id":"https://syntheticmedia.partnershiponai.org/#breadcrumb"},"inLanguage":"en-US","potentialAction":[{"@type":"ReadAction","target":["https://syntheticmedia.partnershiponai.org/"]}]},{"@type":"ImageObject","inLanguage":"en-US","@id":"https://syntheticmedia.partnershiponai.org/#primaryimage","url":"https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/02/framework-feature-img-1800x945-1-e1677520578865.png","contentUrl":"https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/02/framework-feature-img-1800x945-1-e1677520578865.png","width":2514,"height":1320,"caption":"PAI's Synthetic Media logo"},{"@type":"BreadcrumbList","@id":"https://syntheticmedia.partnershiponai.org/#breadcrumb","itemListElement":[{"@type":"ListItem","position":1,"name":"Home"}]},{"@type":"WebSite","@id":"https://syntheticmedia.partnershiponai.org/#website","url":"https://syntheticmedia.partnershiponai.org/","name":"Partnership on AI - Synthetic Media","description":"Responsible Practices for Synthetic Media","potentialAction":[{"@type":"SearchAction","target":{"@type":"EntryPoint","urlTemplate":"https://syntheticmedia.partnershiponai.org/?s={search_term_string}"},"query-input":"required name=search_term_string"}],"inLanguage":"en-US"}]}</script> <!-- / Yoast SEO plugin. --> <!-- This site uses the Google Analytics by MonsterInsights plugin v8.12.1 - Using Analytics tracking - https://www.monsterinsights.com/ --> <script src="//www.googletagmanager.com/gtag/js?id=G-4LJ44HY033" data-cfasync="false" data-wpfc-render="false" type="text/javascript" async></script> <script data-cfasync="false" data-wpfc-render="false" type="text/javascript"> var mi_version = '8.12.1'; var mi_track_user = true; var mi_no_track_reason = ''; var disableStrs = [ 'ga-disable-G-4LJ44HY033', ]; /* Function to detect opted out users */ function __gtagTrackerIsOptedOut() { for (var index = 0; index < disableStrs.length; index++) { if (document.cookie.indexOf(disableStrs[index] + '=true') > -1) { return true; } } return false; } /* Disable tracking if the opt-out cookie exists. */ if (__gtagTrackerIsOptedOut()) { for (var index = 0; index < disableStrs.length; index++) { window[disableStrs[index]] = true; } } /* Opt-out function */ function __gtagTrackerOptout() { for (var index = 0; index < disableStrs.length; index++) { document.cookie = disableStrs[index] + '=true; expires=Thu, 31 Dec 2099 23:59:59 UTC; path=/'; window[disableStrs[index]] = true; } } if ('undefined' === typeof gaOptout) { function gaOptout() { __gtagTrackerOptout(); } } window.dataLayer = window.dataLayer || []; window.MonsterInsightsDualTracker = { helpers: {}, trackers: {}, }; if (mi_track_user) { function __gtagDataLayer() { dataLayer.push(arguments); } function __gtagTracker(type, name, parameters) { if (!parameters) { parameters = {}; } if (parameters.send_to) { __gtagDataLayer.apply(null, arguments); return; } if (type === 'event') { parameters.send_to = monsterinsights_frontend.v4_id; var hookName = name; if (typeof parameters['event_category'] !== 'undefined') { hookName = parameters['event_category'] + ':' + name; } if (typeof MonsterInsightsDualTracker.trackers[hookName] !== 'undefined') { MonsterInsightsDualTracker.trackers[hookName](parameters); } else { __gtagDataLayer('event', name, parameters); } } else { __gtagDataLayer.apply(null, arguments); } } __gtagTracker('js', new Date()); __gtagTracker('set', { 'developer_id.dZGIzZG': true, }); __gtagTracker('config', 'G-4LJ44HY033', {"forceSSL":"true","link_attribution":"true"} ); window.gtag = __gtagTracker; (function () { /* https://developers.google.com/analytics/devguides/collection/analyticsjs/ */ /* ga and __gaTracker compatibility shim. */ var noopfn = function () { return null; }; var newtracker = function () { return new Tracker(); }; var Tracker = function () { return null; }; var p = Tracker.prototype; p.get = noopfn; p.set = noopfn; p.send = function () { var args = Array.prototype.slice.call(arguments); args.unshift('send'); __gaTracker.apply(null, args); }; var __gaTracker = function () { var len = arguments.length; if (len === 0) { return; } var f = arguments[len - 1]; if (typeof f !== 'object' || f === null || typeof f.hitCallback !== 'function') { if ('send' === arguments[0]) { var hitConverted, hitObject = false, action; if ('event' === arguments[1]) { if ('undefined' !== typeof arguments[3]) { hitObject = { 'eventAction': arguments[3], 'eventCategory': arguments[2], 'eventLabel': arguments[4], 'value': arguments[5] ? arguments[5] : 1, } } } if ('pageview' === arguments[1]) { if ('undefined' !== typeof arguments[2]) { hitObject = { 'eventAction': 'page_view', 'page_path': arguments[2], } } } if (typeof arguments[2] === 'object') { hitObject = arguments[2]; } if (typeof arguments[5] === 'object') { Object.assign(hitObject, arguments[5]); } if ('undefined' !== typeof arguments[1].hitType) { hitObject = arguments[1]; if ('pageview' === hitObject.hitType) { hitObject.eventAction = 'page_view'; } } if (hitObject) { action = 'timing' === arguments[1].hitType ? 'timing_complete' : hitObject.eventAction; hitConverted = mapArgs(hitObject); __gtagTracker('event', action, hitConverted); } } return; } function mapArgs(args) { var arg, hit = {}; var gaMap = { 'eventCategory': 'event_category', 'eventAction': 'event_action', 'eventLabel': 'event_label', 'eventValue': 'event_value', 'nonInteraction': 'non_interaction', 'timingCategory': 'event_category', 'timingVar': 'name', 'timingValue': 'value', 'timingLabel': 'event_label', 'page': 'page_path', 'location': 'page_location', 'title': 'page_title', }; for (arg in args) { if (!(!args.hasOwnProperty(arg) || !gaMap.hasOwnProperty(arg))) { hit[gaMap[arg]] = args[arg]; } else { hit[arg] = args[arg]; } } return hit; } try { f.hitCallback(); } catch (ex) { } }; __gaTracker.create = newtracker; __gaTracker.getByName = newtracker; __gaTracker.getAll = function () { return []; }; __gaTracker.remove = noopfn; __gaTracker.loaded = true; window['__gaTracker'] = __gaTracker; })(); } else { console.log(""); (function () { function __gtagTracker() { return null; } window['__gtagTracker'] = __gtagTracker; window['gtag'] = __gtagTracker; })(); } </script> <!-- / Google Analytics by MonsterInsights --> <script type="text/javascript"> /* <![CDATA[ */ window._wpemojiSettings = {"baseUrl":"https:\/\/s.w.org\/images\/core\/emoji\/15.0.3\/72x72\/","ext":".png","svgUrl":"https:\/\/s.w.org\/images\/core\/emoji\/15.0.3\/svg\/","svgExt":".svg","source":{"concatemoji":"https:\/\/syntheticmedia.partnershiponai.org\/wp-includes\/js\/wp-emoji-release.min.js?ver=6.6.1"}}; /*! This file is auto-generated */ !function(i,n){var o,s,e;function c(e){try{var t={supportTests:e,timestamp:(new Date).valueOf()};sessionStorage.setItem(o,JSON.stringify(t))}catch(e){}}function p(e,t,n){e.clearRect(0,0,e.canvas.width,e.canvas.height),e.fillText(t,0,0);var t=new Uint32Array(e.getImageData(0,0,e.canvas.width,e.canvas.height).data),r=(e.clearRect(0,0,e.canvas.width,e.canvas.height),e.fillText(n,0,0),new Uint32Array(e.getImageData(0,0,e.canvas.width,e.canvas.height).data));return t.every(function(e,t){return e===r[t]})}function u(e,t,n){switch(t){case"flag":return n(e,"\ud83c\udff3\ufe0f\u200d\u26a7\ufe0f","\ud83c\udff3\ufe0f\u200b\u26a7\ufe0f")?!1:!n(e,"\ud83c\uddfa\ud83c\uddf3","\ud83c\uddfa\u200b\ud83c\uddf3")&&!n(e,"\ud83c\udff4\udb40\udc67\udb40\udc62\udb40\udc65\udb40\udc6e\udb40\udc67\udb40\udc7f","\ud83c\udff4\u200b\udb40\udc67\u200b\udb40\udc62\u200b\udb40\udc65\u200b\udb40\udc6e\u200b\udb40\udc67\u200b\udb40\udc7f");case"emoji":return!n(e,"\ud83d\udc26\u200d\u2b1b","\ud83d\udc26\u200b\u2b1b")}return!1}function f(e,t,n){var r="undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?new OffscreenCanvas(300,150):i.createElement("canvas"),a=r.getContext("2d",{willReadFrequently:!0}),o=(a.textBaseline="top",a.font="600 32px Arial",{});return e.forEach(function(e){o[e]=t(a,e,n)}),o}function t(e){var t=i.createElement("script");t.src=e,t.defer=!0,i.head.appendChild(t)}"undefined"!=typeof Promise&&(o="wpEmojiSettingsSupports",s=["flag","emoji"],n.supports={everything:!0,everythingExceptFlag:!0},e=new Promise(function(e){i.addEventListener("DOMContentLoaded",e,{once:!0})}),new Promise(function(t){var n=function(){try{var e=JSON.parse(sessionStorage.getItem(o));if("object"==typeof e&&"number"==typeof e.timestamp&&(new Date).valueOf()<e.timestamp+604800&&"object"==typeof e.supportTests)return e.supportTests}catch(e){}return null}();if(!n){if("undefined"!=typeof Worker&&"undefined"!=typeof OffscreenCanvas&&"undefined"!=typeof URL&&URL.createObjectURL&&"undefined"!=typeof Blob)try{var e="postMessage("+f.toString()+"("+[JSON.stringify(s),u.toString(),p.toString()].join(",")+"));",r=new Blob([e],{type:"text/javascript"}),a=new Worker(URL.createObjectURL(r),{name:"wpTestEmojiSupports"});return void(a.onmessage=function(e){c(n=e.data),a.terminate(),t(n)})}catch(e){}c(n=f(s,u,p))}t(n)}).then(function(e){for(var t in e)n.supports[t]=e[t],n.supports.everything=n.supports.everything&&n.supports[t],"flag"!==t&&(n.supports.everythingExceptFlag=n.supports.everythingExceptFlag&&n.supports[t]);n.supports.everythingExceptFlag=n.supports.everythingExceptFlag&&!n.supports.flag,n.DOMReady=!1,n.readyCallback=function(){n.DOMReady=!0}}).then(function(){return e}).then(function(){var e;n.supports.everything||(n.readyCallback(),(e=n.source||{}).concatemoji?t(e.concatemoji):e.wpemoji&&e.twemoji&&(t(e.twemoji),t(e.wpemoji)))}))}((window,document),window._wpemojiSettings); /* ]]> */ </script> <style id='wp-emoji-styles-inline-css' type='text/css'> img.wp-smiley, img.emoji { display: inline !important; border: none !important; box-shadow: none !important; height: 1em !important; width: 1em !important; margin: 0 0.07em !important; vertical-align: -0.1em !important; background: none !important; padding: 0 !important; } </style> <link rel='stylesheet' id='wp-block-library-css' href='https://syntheticmedia.partnershiponai.org/wp-includes/css/dist/block-library/style.min.css?ver=6.6.1' type='text/css' media='all' /> <style id='global-styles-inline-css' type='text/css'> :root{--wp--preset--aspect-ratio--square: 1;--wp--preset--aspect-ratio--4-3: 4/3;--wp--preset--aspect-ratio--3-4: 3/4;--wp--preset--aspect-ratio--3-2: 3/2;--wp--preset--aspect-ratio--2-3: 2/3;--wp--preset--aspect-ratio--16-9: 16/9;--wp--preset--aspect-ratio--9-16: 9/16;--wp--preset--color--black: #000000;--wp--preset--color--cyan-bluish-gray: #abb8c3;--wp--preset--color--white: #ffffff;--wp--preset--color--pale-pink: #f78da7;--wp--preset--color--vivid-red: #cf2e2e;--wp--preset--color--luminous-vivid-orange: #ff6900;--wp--preset--color--luminous-vivid-amber: #fcb900;--wp--preset--color--light-green-cyan: #7bdcb5;--wp--preset--color--vivid-green-cyan: #00d084;--wp--preset--color--pale-cyan-blue: #8ed1fc;--wp--preset--color--vivid-cyan-blue: #0693e3;--wp--preset--color--vivid-purple: #9b51e0;--wp--preset--gradient--vivid-cyan-blue-to-vivid-purple: linear-gradient(135deg,rgba(6,147,227,1) 0%,rgb(155,81,224) 100%);--wp--preset--gradient--light-green-cyan-to-vivid-green-cyan: linear-gradient(135deg,rgb(122,220,180) 0%,rgb(0,208,130) 100%);--wp--preset--gradient--luminous-vivid-amber-to-luminous-vivid-orange: linear-gradient(135deg,rgba(252,185,0,1) 0%,rgba(255,105,0,1) 100%);--wp--preset--gradient--luminous-vivid-orange-to-vivid-red: linear-gradient(135deg,rgba(255,105,0,1) 0%,rgb(207,46,46) 100%);--wp--preset--gradient--very-light-gray-to-cyan-bluish-gray: linear-gradient(135deg,rgb(238,238,238) 0%,rgb(169,184,195) 100%);--wp--preset--gradient--cool-to-warm-spectrum: linear-gradient(135deg,rgb(74,234,220) 0%,rgb(151,120,209) 20%,rgb(207,42,186) 40%,rgb(238,44,130) 60%,rgb(251,105,98) 80%,rgb(254,248,76) 100%);--wp--preset--gradient--blush-light-purple: linear-gradient(135deg,rgb(255,206,236) 0%,rgb(152,150,240) 100%);--wp--preset--gradient--blush-bordeaux: linear-gradient(135deg,rgb(254,205,165) 0%,rgb(254,45,45) 50%,rgb(107,0,62) 100%);--wp--preset--gradient--luminous-dusk: linear-gradient(135deg,rgb(255,203,112) 0%,rgb(199,81,192) 50%,rgb(65,88,208) 100%);--wp--preset--gradient--pale-ocean: linear-gradient(135deg,rgb(255,245,203) 0%,rgb(182,227,212) 50%,rgb(51,167,181) 100%);--wp--preset--gradient--electric-grass: linear-gradient(135deg,rgb(202,248,128) 0%,rgb(113,206,126) 100%);--wp--preset--gradient--midnight: linear-gradient(135deg,rgb(2,3,129) 0%,rgb(40,116,252) 100%);--wp--preset--font-size--small: 13px;--wp--preset--font-size--medium: 20px;--wp--preset--font-size--large: 36px;--wp--preset--font-size--x-large: 42px;--wp--preset--spacing--20: 0.44rem;--wp--preset--spacing--30: 0.67rem;--wp--preset--spacing--40: 1rem;--wp--preset--spacing--50: 1.5rem;--wp--preset--spacing--60: 2.25rem;--wp--preset--spacing--70: 3.38rem;--wp--preset--spacing--80: 5.06rem;--wp--preset--shadow--natural: 6px 6px 9px rgba(0, 0, 0, 0.2);--wp--preset--shadow--deep: 12px 12px 50px rgba(0, 0, 0, 0.4);--wp--preset--shadow--sharp: 6px 6px 0px rgba(0, 0, 0, 0.2);--wp--preset--shadow--outlined: 6px 6px 0px -3px rgba(255, 255, 255, 1), 6px 6px rgba(0, 0, 0, 1);--wp--preset--shadow--crisp: 6px 6px 0px rgba(0, 0, 0, 1);}:where(body) { margin: 0; }.wp-site-blocks > .alignleft { float: left; margin-right: 2em; }.wp-site-blocks > .alignright { float: right; margin-left: 2em; }.wp-site-blocks > .aligncenter { justify-content: center; margin-left: auto; margin-right: auto; }:where(.is-layout-flex){gap: 0.5em;}:where(.is-layout-grid){gap: 0.5em;}.is-layout-flow > .alignleft{float: left;margin-inline-start: 0;margin-inline-end: 2em;}.is-layout-flow > .alignright{float: right;margin-inline-start: 2em;margin-inline-end: 0;}.is-layout-flow > .aligncenter{margin-left: auto !important;margin-right: auto !important;}.is-layout-constrained > .alignleft{float: left;margin-inline-start: 0;margin-inline-end: 2em;}.is-layout-constrained > .alignright{float: right;margin-inline-start: 2em;margin-inline-end: 0;}.is-layout-constrained > .aligncenter{margin-left: auto !important;margin-right: auto !important;}.is-layout-constrained > :where(:not(.alignleft):not(.alignright):not(.alignfull)){margin-left: auto !important;margin-right: auto !important;}body .is-layout-flex{display: flex;}.is-layout-flex{flex-wrap: wrap;align-items: center;}.is-layout-flex > :is(*, div){margin: 0;}body .is-layout-grid{display: grid;}.is-layout-grid > :is(*, div){margin: 0;}:root :where(body){padding-top: 0px;padding-right: 0px;padding-bottom: 0px;padding-left: 0px;}a:where(:not(.wp-element-button)){text-decoration: underline;}:root :where(.wp-element-button, .wp-block-button__link){background-color: #32373c;border-width: 0;color: #fff;font-family: inherit;font-size: inherit;line-height: inherit;padding: calc(0.667em + 2px) calc(1.333em + 2px);text-decoration: none;}.has-black-color{color: var(--wp--preset--color--black) !important;}.has-cyan-bluish-gray-color{color: var(--wp--preset--color--cyan-bluish-gray) !important;}.has-white-color{color: var(--wp--preset--color--white) !important;}.has-pale-pink-color{color: var(--wp--preset--color--pale-pink) !important;}.has-vivid-red-color{color: var(--wp--preset--color--vivid-red) !important;}.has-luminous-vivid-orange-color{color: var(--wp--preset--color--luminous-vivid-orange) !important;}.has-luminous-vivid-amber-color{color: var(--wp--preset--color--luminous-vivid-amber) !important;}.has-light-green-cyan-color{color: var(--wp--preset--color--light-green-cyan) !important;}.has-vivid-green-cyan-color{color: var(--wp--preset--color--vivid-green-cyan) !important;}.has-pale-cyan-blue-color{color: var(--wp--preset--color--pale-cyan-blue) !important;}.has-vivid-cyan-blue-color{color: var(--wp--preset--color--vivid-cyan-blue) !important;}.has-vivid-purple-color{color: var(--wp--preset--color--vivid-purple) !important;}.has-black-background-color{background-color: var(--wp--preset--color--black) !important;}.has-cyan-bluish-gray-background-color{background-color: var(--wp--preset--color--cyan-bluish-gray) !important;}.has-white-background-color{background-color: var(--wp--preset--color--white) !important;}.has-pale-pink-background-color{background-color: var(--wp--preset--color--pale-pink) !important;}.has-vivid-red-background-color{background-color: var(--wp--preset--color--vivid-red) !important;}.has-luminous-vivid-orange-background-color{background-color: var(--wp--preset--color--luminous-vivid-orange) !important;}.has-luminous-vivid-amber-background-color{background-color: var(--wp--preset--color--luminous-vivid-amber) !important;}.has-light-green-cyan-background-color{background-color: var(--wp--preset--color--light-green-cyan) !important;}.has-vivid-green-cyan-background-color{background-color: var(--wp--preset--color--vivid-green-cyan) !important;}.has-pale-cyan-blue-background-color{background-color: var(--wp--preset--color--pale-cyan-blue) !important;}.has-vivid-cyan-blue-background-color{background-color: var(--wp--preset--color--vivid-cyan-blue) !important;}.has-vivid-purple-background-color{background-color: var(--wp--preset--color--vivid-purple) !important;}.has-black-border-color{border-color: var(--wp--preset--color--black) !important;}.has-cyan-bluish-gray-border-color{border-color: var(--wp--preset--color--cyan-bluish-gray) !important;}.has-white-border-color{border-color: var(--wp--preset--color--white) !important;}.has-pale-pink-border-color{border-color: var(--wp--preset--color--pale-pink) !important;}.has-vivid-red-border-color{border-color: var(--wp--preset--color--vivid-red) !important;}.has-luminous-vivid-orange-border-color{border-color: var(--wp--preset--color--luminous-vivid-orange) !important;}.has-luminous-vivid-amber-border-color{border-color: var(--wp--preset--color--luminous-vivid-amber) !important;}.has-light-green-cyan-border-color{border-color: var(--wp--preset--color--light-green-cyan) !important;}.has-vivid-green-cyan-border-color{border-color: var(--wp--preset--color--vivid-green-cyan) !important;}.has-pale-cyan-blue-border-color{border-color: var(--wp--preset--color--pale-cyan-blue) !important;}.has-vivid-cyan-blue-border-color{border-color: var(--wp--preset--color--vivid-cyan-blue) !important;}.has-vivid-purple-border-color{border-color: var(--wp--preset--color--vivid-purple) !important;}.has-vivid-cyan-blue-to-vivid-purple-gradient-background{background: var(--wp--preset--gradient--vivid-cyan-blue-to-vivid-purple) !important;}.has-light-green-cyan-to-vivid-green-cyan-gradient-background{background: var(--wp--preset--gradient--light-green-cyan-to-vivid-green-cyan) !important;}.has-luminous-vivid-amber-to-luminous-vivid-orange-gradient-background{background: var(--wp--preset--gradient--luminous-vivid-amber-to-luminous-vivid-orange) !important;}.has-luminous-vivid-orange-to-vivid-red-gradient-background{background: var(--wp--preset--gradient--luminous-vivid-orange-to-vivid-red) !important;}.has-very-light-gray-to-cyan-bluish-gray-gradient-background{background: var(--wp--preset--gradient--very-light-gray-to-cyan-bluish-gray) !important;}.has-cool-to-warm-spectrum-gradient-background{background: var(--wp--preset--gradient--cool-to-warm-spectrum) !important;}.has-blush-light-purple-gradient-background{background: var(--wp--preset--gradient--blush-light-purple) !important;}.has-blush-bordeaux-gradient-background{background: var(--wp--preset--gradient--blush-bordeaux) !important;}.has-luminous-dusk-gradient-background{background: var(--wp--preset--gradient--luminous-dusk) !important;}.has-pale-ocean-gradient-background{background: var(--wp--preset--gradient--pale-ocean) !important;}.has-electric-grass-gradient-background{background: var(--wp--preset--gradient--electric-grass) !important;}.has-midnight-gradient-background{background: var(--wp--preset--gradient--midnight) !important;}.has-small-font-size{font-size: var(--wp--preset--font-size--small) !important;}.has-medium-font-size{font-size: var(--wp--preset--font-size--medium) !important;}.has-large-font-size{font-size: var(--wp--preset--font-size--large) !important;}.has-x-large-font-size{font-size: var(--wp--preset--font-size--x-large) !important;} :where(.wp-block-post-template.is-layout-flex){gap: 1.25em;}:where(.wp-block-post-template.is-layout-grid){gap: 1.25em;} :where(.wp-block-columns.is-layout-flex){gap: 2em;}:where(.wp-block-columns.is-layout-grid){gap: 2em;} :root :where(.wp-block-pullquote){font-size: 1.5em;line-height: 1.6;} </style> <link rel='stylesheet' id='sage/main.css-css' href='https://syntheticmedia.partnershiponai.org/wp-content/themes/synthmediatheme/dist/styles/main_42efb519.css' type='text/css' media='all' /> <script type="text/javascript" src="https://syntheticmedia.partnershiponai.org/wp-content/plugins/google-analytics-for-wordpress/assets/js/frontend-gtag.min.js?ver=8.12.1" id="monsterinsights-frontend-script-js"></script> <script data-cfasync="false" data-wpfc-render="false" type="text/javascript" id='monsterinsights-frontend-script-js-extra'>/* <![CDATA[ */ var monsterinsights_frontend = {"js_events_tracking":"true","download_extensions":"doc,pdf,ppt,zip,xls,docx,pptx,xlsx","inbound_paths":"[]","home_url":"https:\/\/syntheticmedia.partnershiponai.org","hash_tracking":"false","ua":"","v4_id":"G-4LJ44HY033"};/* ]]> */ </script> <script type="text/javascript" src="https://syntheticmedia.partnershiponai.org/wp-includes/js/jquery/jquery.min.js?ver=3.7.1" id="jquery-core-js"></script> <script type="text/javascript" src="https://syntheticmedia.partnershiponai.org/wp-includes/js/jquery/jquery-migrate.min.js?ver=3.4.1" id="jquery-migrate-js"></script> <link rel="https://api.w.org/" href="https://syntheticmedia.partnershiponai.org/wp-json/" /><link rel="alternate" title="JSON" type="application/json" href="https://syntheticmedia.partnershiponai.org/wp-json/wp/v2/pages/7547" /><link rel="EditURI" type="application/rsd+xml" title="RSD" href="https://syntheticmedia.partnershiponai.org/xmlrpc.php?rsd" /> <link rel='shortlink' href='https://syntheticmedia.partnershiponai.org/' /> <link rel="alternate" title="oEmbed (JSON)" type="application/json+oembed" href="https://syntheticmedia.partnershiponai.org/wp-json/oembed/1.0/embed?url=https%3A%2F%2Fsyntheticmedia.partnershiponai.org%2F" /> <link rel="alternate" title="oEmbed (XML)" type="text/xml+oembed" href="https://syntheticmedia.partnershiponai.org/wp-json/oembed/1.0/embed?url=https%3A%2F%2Fsyntheticmedia.partnershiponai.org%2F&format=xml" /> <style type="text/css">.recentcomments a{display:inline !important;padding:0 !important;margin:0 !important;}</style><link rel="icon" href="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2021/08/PAI-Favicon.png" sizes="32x32" /> <link rel="icon" href="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2021/08/PAI-Favicon.png" sizes="192x192" /> <link rel="apple-touch-icon" href="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2021/08/PAI-Favicon.png" /> <meta name="msapplication-TileImage" content="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2021/08/PAI-Favicon.png" /> <style type="text/css" id="wp-custom-css"> ol {margin-left: 0.5em;} ul {margin-left: 2em;} p.indent { margin-left: 1.6em; } a.light:link, a.light:visited, a.light:hover {color: #aad1f3;} h3.framework {font-weight: 250; margin-top:2em; padding:0.3em 0em 1em 0em;} .accordion-content {padding-bottom: 0.5em; padding-top: 0.5em;} .smallcaps {font-size: 0.8em; text-transform: uppercase; letter-spacing: 0.1em; font-weight: 800; padding-bottom: 1.5em;} #framework a { color: #3674df; } #framework ol { padding-top: 0.5em; } .bluebox { padding: 15px 20px 5px 20px; background-color: #1f64cc; border-radius: 10px; margin-bottom: 3em; } hr.white { border: 0 none transparent; border-top: 1px solid #FFF; } /* popup quotes */ .modal-body {margin: 1em 2em 2em 2em;} .btn-close {float: right; } </style> <!-- Global site tag (gtag.js) - Google Analytics --> <script async src="https://www.googletagmanager.com/gtag/js?id=UA-137908281-1"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'UA-137908281-1'); </script> <!-- Global site tag (gtag.js) - Google Analytics --> <script async src="https://www.googletagmanager.com/gtag/js?id=G-3XTEWHLJEB"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-3XTEWHLJEB'); </script> </head> <body class="home page-template-default page page-id-7547 app-data index-data singular-data page-data page-7547-data page-responsible-practices-for-synthetic-media-data front-page-data"> <div class="wrap" role="document"> <div class="horizontal-accordion"> <div id="pai_sidebar" class="pai-sidebar card contract"> <div class="sidebar-header"><img src="https://syntheticmedia.partnershiponai.org/wp-content/themes/synthmediatheme/resources/assets/images/chevron_right.png" alt="open sidebar" class="open-close" /> Partnership on AI</div> <div class="card-body"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/themes/synthmediatheme/resources/assets/images/logo_black.png" alt="Partnership for AI" class="pai-logo" /> <div class="menu-pai-menu-container"><ul id="menu-pai-menu" class="sidebar-nav"><li id="menu-item-7778" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-has-children menu-item-7778"><a href="https://new.partnershiponai.org/work">OUR WORK</a> <ul class="sub-menu"> <li id="menu-item-7779" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7779"><a href="https://partnershiponai.org/how-we-work/">How We Work</a></li> <li id="menu-item-7780" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-has-children menu-item-7780"><a href="https://partnershiponai.org/work">Programs</a> <ul class="sub-menu"> <li id="menu-item-7781" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7781"><a href="https://partnershiponai.org/program/inclusive-research-design/">Inclusive Research & Design</a></li> <li id="menu-item-7782" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7782"><a href="https://new.partnershiponai.org/program/ai-media-integrity/">AI & Media Integrity</a></li> <li id="menu-item-7783" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7783"><a href="https://new.partnershiponai.org/program/ai-labor-and-the-economy/">AI, Labor, and the Economy</a></li> <li id="menu-item-7784" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7784"><a href="https://new.partnershiponai.org/program/fairness-transparency-and-accountability-about-ml/">FTA & ABOUT ML</a></li> <li id="menu-item-7785" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7785"><a href="https://new.partnershiponai.org/program/safety-critical-ai/">Safety-Critical AI</a></li> </ul> </li> <li id="menu-item-7786" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7786"><a href="https://partnershiponai.org/workstreams">Workstreams</a></li> <li id="menu-item-7787" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7787"><a href="https://partnershiponai.org/program/policy/">Public Policy</a></li> <li id="menu-item-7788" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7788"><a href="https://partnershiponai.org/resources/?resource-content-type_tax=impact-stories">Impact Stories</a></li> </ul> </li> <li id="menu-item-7789" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-has-children menu-item-7789"><a href="https://new.partnershiponai.org/resources/">RESOURCES</a> <ul class="sub-menu"> <li id="menu-item-7790" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7790"><a href="https://partnershiponai.org/blog">Blog</a></li> <li id="menu-item-7791" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7791"><a href="https://partnershiponai.org/resources">Resource Library</a></li> </ul> </li> <li id="menu-item-7792" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7792"><a href="https://partnershiponai.org/events/">EVENTS</a></li> <li id="menu-item-7793" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-has-children menu-item-7793"><a href="https://partnershiponai.org/about">ABOUT US</a> <ul class="sub-menu"> <li id="menu-item-7794" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-has-children menu-item-7794"><a href="https://partnershiponai.org/about/">About Us</a> <ul class="sub-menu"> <li id="menu-item-7795" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7795"><a href="https://partnershiponai.org/about/#mission">Mission, Vision, & Values</a></li> <li id="menu-item-7796" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7796"><a href="https://partnershiponai.org/about/#pillars">Pillars</a></li> <li id="menu-item-7797" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7797"><a href="https://partnershiponai.org/about/#tenets">Tenents</a></li> </ul> </li> <li id="menu-item-7798" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7798"><a href="https://partnershiponai.org/team">Team</a></li> <li id="menu-item-7799" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7799"><a href="https://partnershiponai.org/funding">Funding</a></li> <li id="menu-item-7800" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7800"><a href="https://partnershiponai.org/press">Press</a></li> <li id="menu-item-7801" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7801"><a href="https://partnershiponai.org/careers">Careers</a></li> </ul> </li> <li id="menu-item-7802" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7802"><a href="https://partnershiponai.org/partners">PARTNERS</a></li> <li id="menu-item-7803" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-7803"><a href="https://partnershiponai.org/donate">Donate</a></li> </ul></div> </div> </div> <div id="landing" class="card landing expand text-white" style="background-color: #071d49"> <div class="header-logo"> <a href="https://partnershiponai.org"><img src="https://syntheticmedia.partnershiponai.org/wp-content/themes/synthmediatheme/resources/assets/images/logo_white_blue.svg" alt="Partnership for AI" class="logo-expand" /></a> <img src="https://syntheticmedia.partnershiponai.org/wp-content/themes/synthmediatheme/resources/assets/images/logo_square.svg" alt="Partnership for AI" class="logo-small" /> </div> <div class="card-header"> Synthetic Media </div> <div class="card-body"> <div class="landing-image small"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/02/synthmedia-emblem-600x600-1.svg" alt="Synthetic Media Logo" /> </div> <div class="row landing-content"> <div class="col-xxl-7 title"> <h1>PAI’s Responsible Practices for Synthetic Media</h1> <h2>A Framework for<br />Collective Action</h2> </div> <div class="col-xxl-5 content"> <div class="medium-description" style="font-weight: 200; color: white; margin-bottom: 1em;">Partnership on AI’s (PAI) Responsible Practices for Synthetic Media is a framework on how to responsibly develop, create, and share synthetic media: the audiovisual content often generated or modified by AI.</div> </div> </div> </div> <div class="landing-bottom"> <div class="smallcaps">Framework Supporters:</div> <div class="custom_carousel"> <div class="partner_logo_carousel custom_logo_carousel"><div class="slides splide"> <div class="splide__track"> <div class="splide__list"><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/adobe400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/bbc400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/bumble400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/cbc400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/code-for-africa400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/01/d-id400px_new.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/google400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/hai400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/meedan400px.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/meta400px-2.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/microsoft400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/openai400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/respeecher400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/synthesia400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/thorn_400px.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/tiktok400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide "><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/truepic400px-1.png" alt="" loading="lazy" /> </div></div><div class="slide splide__slide last"><div class="logo-img"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/witness400px-1.png" alt="" loading="lazy" /> </div></div></div> </div></div></div></div> </div> </div> <div id="the_need_for_guidance" class="card contract text-white" style="background-color: #0a2966"> <div class="card-header">The Need for Guidance</div> <div class="card-body"> <h2 style="padding-bottom: 2em;">The Need for Guidance</h2> <div class="medium-description" style="font-weight: 200;"> <p>Over the past few years, we’ve seen AI-generated content move from experimental research in computer science labs to one of the engines of digital content creation.</p> </div> <p> </p> <div class="row"> <div class="col-md-4"> <p><img decoding="async" class="aligncenter size-full wp-image-7609" src="https://syntheticmedia.wpengine.com/wp-content/uploads/2023/02/1-audio.svg" alt="" width="60" /></p> <div class="medium-description" style="text-align: center; color: white; font-weight: 200;">Audio</div> </div> <div class="col-md-4 medium-description" style="text-align: center;"> <p><img decoding="async" class="aligncenter size-full wp-image-7611" src="https://syntheticmedia.wpengine.com/wp-content/uploads/2023/02/1-visual.svg" alt="" width="60" /></p> <div class="medium-description" style="text-align: center; color: white; font-weight: 200;">Visual</div> </div> <div class="col-md-4 medium-description" style="text-align: center;"> <p><img decoding="async" class="aligncenter wp-image-7809" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/synth-media-icons_1-multimodal-transparent.svg" alt="" width="60" /></p> <div class="medium-description" style="text-align: center; color: white; font-weight: 200;">Multimodal</div> </div> </div> <p> </p> <p>Synthetic media provides significant responsible, creative opportunities across society. However, it can also cause harm. As the technology becomes increasingly accessible and sophisticated, the potential harmful, as well as responsible and beneficial impacts, can increase. As this field matures, synthetic media creators, distributors, publishers, and tool developers need to agree on and follow best practices.</p> <p>With the Framework, AI experts and industry leaders at the intersection of information, media, and technology are coming together to take action for the public good. This diverse coalition has worked together for over a year to create a shared set of values, tactics, and practices to help creators and distributors use this powerful technology responsibly as it evolves.</p> <h3></h3> <div id="yt-container" class="ytpp-main"> <a name="ytplayer" class="f"><iframe name="ytpl-frame" id="ytpl-frame" type="text/html" rel="SvpqX8obTaM" src="https://www.youtube.com/embed/SvpqX8obTaM?rel=0&hd=1&version=3&iv_load_policy=3&showinfo=0&controls=1&origin=https://syntheticmedia.partnershiponai.org" width="560" height="315" loading="lazy"></iframe></a> <div id="ytpp-playlist-container" class="ytpp-playlist-container" data-playlist=""><div id="ytplayer_div2"></div></div> </div> <h3></h3> <h3>Three Categories of Stakeholders</h3> <p>PAI’s Responsible Practices for Synthetic Media offers recommendations for three categories of stakeholders contributing to the societal impact of synthetic media:</p> <div class="row"> <div class="col-md-4"> <p><img decoding="async" class="aligncenter size-full wp-image-7612" src="https://syntheticmedia.wpengine.com/wp-content/uploads/2023/02/2-building.svg" alt="" width="60" /></p> <div style="text-align: center; color: white;"><strong>Builders of Technology<br /> and Infrastructure</strong></div> </div> <div class="col-md-4" style="text-align: center;"> <p><img decoding="async" class="aligncenter wp-image-7810" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/synth-media-icons_2-creating-transparent.svg" alt="" width="60" /></p> <div style="text-align: center; color: white;"><strong>Creators</strong></div> </div> <div class="col-md-4" style="text-align: center;"> <p><img decoding="async" class="aligncenter size-full wp-image-7614" src="https://syntheticmedia.wpengine.com/wp-content/uploads/2023/02/2-distributing.svg" alt="" width="60" /></p> <div style="text-align: center; color: white;"><strong>Distributors<br /> and Publishers</strong></div> </div> </div> <p> </p> <h3>Three Key Techniques</h3> <p>Based around the core concepts of consent, disclosure, and transparency, the Framework outlines key techniques for developing, creating, and sharing synthetic media responsibly.</p> <p>Along with stakeholder-specific recommendations, the Framework asks organizations to:</p> <div class="row"> <div class="col-md-4"> <p><img decoding="async" class="aligncenter size-full wp-image-7624" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/02/3-collaborate-xo.svg" alt="" width="60" /></p> <div style="text-align: center; color: white;"><strong>Collaborate to help<br /> counter </strong><strong>the harmful use<br /> of synthetic media</strong></div> </div> <div class="col-md-4" style="text-align: center;"> <p><img decoding="async" class="aligncenter size-full wp-image-7616" src="https://syntheticmedia.wpengine.com/wp-content/uploads/2023/02/3-identify.svg" alt="" width="60" /></p> <div style="text-align: center; color: white;"><strong>Further identify responsible and</strong><br /> <strong>harmful uses of synthetic media</strong></div> </div> <div class="col-md-4" style="text-align: center;"> <p><img decoding="async" class="aligncenter wp-image-7811" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/synth-media-icons_3-mitigate-transparent.svg" alt="" width="60" /></p> <div style="text-align: center; color: white;"><strong>Pursue specific mitigation<br /> strategies </strong><strong>when synthetic media</strong><br /> <strong>is used to cause harm</strong></div> </div> </div> <p> </p> <h3>A Living Document</h3> <p>PAI’s Responsible Practices for Synthetic Media is a living document. While it is grounded in existing norms and practices, it will evolve to reflect new technology developments, use cases, and stakeholders. Responsible synthetic media, infrastructure development, creation, and distribution are emerging areas with fast-moving changes, requiring flexibility and calibration over time. PAI plans to conduct a yearly review of the Framework and also to enable a review trigger at any time as called for by the <a href="https://partnershiponai.org/program/ai-media-integrity/" target="_blank" rel="noopener">AI and Media Integrity Steering Committee</a>.</p> <h2 style="margin-bottom: 1em;"></h2> </div> </div> <div id="read_the_framework" class="card contract text-white" style="background-color: #0c3380"> <div class="card-header">Read the Framework</div> <div class="card-body"> <h2 style="padding-bottom: 2em;">Read the Framework</h2> <div class="simple-accordion"> <div class="accordion-item"> <div class="accordion-header">Introduction</div> <div class="accordion-content"> <p>The Partnership on AI’s (PAI) Responsible Practices for Synthetic Media is a set of recommendations to support the responsible development and deployment of synthetic media.</p> <p>These practices are the result of feedback from more than 100 global stakeholders. It builds on PAI’s work over the past four years with representatives from industry, civil society, media/journalism, and academia.</p> <p>With this Framework, we seek to:</p> <ol> <li aria-level="1">Advance understanding on how to realize synthetic media’s benefits responsibly, building consensus and community around best practices for key stakeholders from industry, media/journalism, academia, and civil society</li> <li aria-level="1">Both offer guidance for emerging players and larger players in the field of synthetic media</li> <li aria-level="1">Align on norms/practices to reduce redundancy and help advance responsible practice broadly across industry and society, avoiding a race to the bottom</li> <li aria-level="1">Ensure that there is a document and associated community that are both useful and can adapt to developments in a nascent and rapidly changing space</li> <li aria-level="1">Serve as a complement to other standards and policy efforts around synthetic media, including internationally</li> </ol> </div> </div> <div class="accordion-item"> <div class="accordion-header">Governance and Involvement</div> <div class="accordion-content"> <p>The intended stakeholder audiences are those building synthetic media technology and tools, or those creating, sharing, and publishing synthetic media.</p> <p>Several of these stakeholders will launch PAI’s Responsible Practices for Synthetic Media, formally joining this effort. These organizations will:</p> <ol> <li aria-level="1">Participate in the PAI community of practice</li> <li aria-level="1">Contribute a yearly case example or analysis that explores the Framework in technology or product practice</li> </ol> <p>PAI will not be auditing or certifying organizations. This Framework includes suggested practices developed as guidance.</p> <p>PAI’s Responsible Practices for Synthetic Media is a living document. While it is grounded in existing norms and practices, it will evolve to reflect new technology developments, use cases, and stakeholders. Responsible synthetic media, infrastructure development, creation, and distribution are emerging areas with fast-moving changes, requiring flexibility and calibration over time. PAI plans to conduct a yearly review of the Framework and also to enable a review trigger at any time as called for by the <a href="https://partnershiponai.org/program/ai-media-integrity/">AI and Media Integrity Steering Committee.</a></p> </div> </div> <div class="accordion-item"> <div class="accordion-header">The Framework’s Focus</div> <div class="accordion-content"> <p>Synthetic media presents significant opportunities for responsible use, including for creative purposes. However, it can also cause harm. As synthetic media technology becomes more accessible and sophisticated, its potential impact also increases. This applies to both positive and negative possibilities — examples of which we only begin to explore in this Framework. The Framework focuses on how to best address the risks synthetic media can pose while ensuring its benefits are able to be realized in a responsible way.</p> <p>Further, while the ethical implications of synthetic media are vast, implicating elements like copyright, the future of work, and even the meaning of art, the goal of this document is to target an initial set of stakeholder groups identified by the PAI AI and Media Integrity community that can play a meaningful role in: (a) reducing the potential harms associated with abuses of synthetic media and promoting responsible uses, (b) increasing transparency, and (c) enabling audiences to better identify and respond to synthetic media.</p> <p>For more information on the creation, goals, and continued development of PAI’s Responsible Practices for Synthetic Media, see <a href="#faq">the FAQ. </a></p> </div> </div> </div> <div style="text-align: center;"><a class="btn blue large" href="https://partnershiponai.org/download/7636/?tmstv=1677282001" target="_blank" rel="noopener">DOWNLOAD THE FRAMEWORK</a></div> <div class="smallcaps" style="color: white; text-align: center; margin: 1em 0 2em 0;">Download <a style="color: white;" href="https://partnershiponai.org/download/9857/?tmstv=1697647553">Spanish</a> or <a style="color: white;" href="https://partnershiponai.org/download/9860/?tmstv=1697647670">French</a> Versions</div> <p><!-- start framework box --></p> <div id="framework" class="framework-box" style="font-family: Literata; color: #000000;"> <h2 style="font-size: 2em; padding-bottom: 1em;">PAI’s Responsible Practices for Synthetic Media</h2> <p>Those building technology and infrastructure for synthetic media, creating synthetic media, and distributing or publishing synthetic media will seek to advance ethical and responsible behavior.</p> <p>Here, synthetic media, also referred to as generative media, is defined as visual, auditory, or multimodal content that has been generated or modified (commonly via artificial intelligence). Such outputs are often highly realistic, would not be identifiable as synthetic to the average person, and may simulate artifacts, persons, or events. See <a href="#appendix-a">Appendix A</a> for more information on the Framework’s scope.</p> <p>PAI offers recommendations for different categories of stakeholders with regard to their roles in developing, creating, and distributing synthetic media. <span style="font-weight: 600;">These categories are not mutually exclusive</span>. A given stakeholder could fit within several categories, as in the case of social media platforms. These categories include:</p> <ol> <li aria-level="1">Those building technology and infrastructure for synthetic media</li> <li aria-level="1">Those creating synthetic media</li> <li aria-level="1">Those distributing and publishing synthetic media</li> </ol> <h3 class="framework">Section 1:<br /> Practices for Enabling Ethical and Responsible Use of Synthetic Media</h3> <ol> <li>Collaborate to advance research, technical solutions, media literacy initiatives, and policy proposals to help counter the harmful uses of synthetic media. We note that synthetic media can be deployed responsibly or can be harnessed to cause harm.</li> </ol> <p class="indent">Responsible categories of use may include, but are not limited to:</p> <ul> <li aria-level="1">Entertainment</li> <li aria-level="1">Art</li> <li aria-level="1">Satire</li> <li aria-level="1">Education</li> <li aria-level="1">Research</li> </ul> <ol start="2"> <li>Conduct research and share best practices to further develop categories of responsible and harmful uses of synthetic media.</li> </ol> <p class="indent">These uses often <a href="https://cocreationstudio.mit.edu/just-joking/">involve gray areas</a>, and techniques for navigating these gray areas are described in the sections below.</p> <ol start="3"> <li>When the techniques below are deployed to create and/or distribute synthetic media in order to cause harm (see examples of harm in <a href="#appendix-b">Appendix B</a>), pursue reasonable mitigation strategies, consistent with the methods described in Sections 2, 3, and 4.</li> </ol> <p class="indent">The following techniques can be deployed responsibly or to cause harm:</p> <ul> <li aria-level="1">Representing any person or company, media organization, government body, or entity</li> <li aria-level="1">Creating realistic fake personas</li> <li aria-level="1">Representing a specific individual having acted, behaved, or made statements in a manner in which the real individual did not</li> <li aria-level="1">Representing events or interactions that did not occur</li> <li aria-level="1">Inserting synthetically generated artifacts or removing authentic ones from authentic media</li> <li aria-level="1">Generating wholly synthetic scenes or soundscapes</li> </ul> <p class="indent">For examples of how these techniques can be deployed to cause harm and an explicit, nonexhaustive list of harmful impacts, see <a href="#appendix-b">Appendix B</a>.</p> <h3 class="framework">Section 2:<br /> Practices for <span style="font-weight: 500;">Builders of Technology and Infrastructure</span></h3> <p>Those building and providing technology and infrastructure for synthetic media can include: B2B and B2C toolmakers; open-source developers; academic researchers; synthetic media startups, including those providing the infrastructure for hobbyists to create synthetic media; social media platforms; and app stores.</p> <ol start="4"> <li><span style="font-weight: 600;">Be transparent to users about tools and technologies’ </span>capabilities, functionality, limitations, and the potential risks of synthetic media.</li> </ol> <ol start="5"> <li>Take steps to <span style="font-weight: 600;">provide disclosure mechanisms </span>for those creating and distributing synthetic media.</li> </ol> <p class="indent">Disclosure can be <span style="font-weight: 600;">direct and/or indirect, </span>depending on <a href="https://firstdraftnews.org/long-form-article/there-are-lots-of-ways-to-label-ai-content-but-what-are-the-risks/">the use case and context</a>:</p> <ul> <li aria-level="1">Direct disclosure is viewer or listener-facing and includes, but is not limited to, <a href="https://firstdraftnews.org/long-form-article/there-are-lots-of-ways-to-label-ai-content-but-what-are-the-risks/">content labels</a>, context notes, watermarking, and disclaimers.</li> <li aria-level="1">Indirect disclosure is embedded and includes, but is not limited to, applying cryptographic provenance to synthetic outputs (such as<a href="https://c2pa.org/"> the C2PA standard</a>), applying traceable elements to training data and outputs, synthetic media file metadata, synthetic media pixel composition, and single-frame disclosure statements in videos.</li> </ul> <ol start="6"> <li><span style="font-weight: 600;">When developing code and datasets, training models, and applying software</span> for the production of synthetic media,<span style="font-weight: 600;"> make best efforts to apply indirect disclosure elements </span>(steganographic, media provenance, or otherwise) within respective assets and stages of synthetic media production.</li> </ol> <p class="indent">Aim to disclose in a manner that mitigates speculation about content, strives toward resilience to manipulation or forgery, is accurately applied, and also, when necessary, communicates uncertainty without furthering speculation. (Note: The ability to add durable disclosure to synthetic media is an open challenge where research is ongoing).</p> <ol start="7"> <li><span style="font-weight: 600;">Support additional research to shape future data-sharing initiatives </span>and determine what types of data would be most appropriate and beneficial to collect and report, while balancing considerations such as transparency and privacy preservation.</li> </ol> <ol start="8"> <li>Take steps to <span style="font-weight: 600;">research, develop, and deploy </span>technologies that:</li> </ol> <ul> <li aria-level="1">Are as forensically detectable as possible for manipulation, without stifling innovation in photorealism.</li> <li aria-level="1">Retain durable disclosure of synthesis, such as watermarks or cryptographically bound provenance that are discoverable, preserve privacy, and are made readily available to the broader community and provided open source.</li> </ul> <ol start="9"> <li><span style="font-weight: 600;">Provide a published, accessible policy</span> outlining the ethical use of your technologies and use restrictions that users will be expected to adhere to and providers seek to enforce.</li> </ol> <h3 class="framework">Section 3:<br /> Practices for <span style="font-weight: 500;">Creators</span></h3> <p>Those creating synthetic media can range from large-scale producers (such as B2B content producers) to smaller-scale producers (such as hobbyists, artists, influencers and those in civil society, including activists and satirists). Those commissioning and creative-directing synthetic media also can fall within this category. Given the increasingly democratized nature of content creation tools, anyone can be a creator and have a chance for their content to reach a wide audience. Accordingly, these stakeholder examples are illustrative but not exhaustive.</p> <ol start="10"> <li><span style="font-weight: 600;">Be transparent </span>to content consumers about:</li> </ol> <ul> <li aria-level="1">How you received <span style="font-weight: 600;">informed consent</span> from the subject(s) of a piece of manipulated content, appropriate to product and context, except for when used toward reasonable artistic, satirical, or expressive ends.</li> <li aria-level="1">How you think about the ethical use of technology and use restrictions (e.g., through a <span style="font-weight: 600;">published</span>, accessible policy, on your website, or in posts about your work) and consult these guidelines before creating synthetic media.</li> <li aria-level="1">The capabilities, limitations, and potential risks of synthetic content.</li> </ul> <ol start="11"> <li><span style="font-weight: 600;">Disclose </span>when the media you have created or introduced includes synthetic elements especially when failure to know about synthesis changes the way the content is perceived. Take advantage of any disclosure tools provided by those building technology and infrastructure for synthetic media.</li> </ol> <p class="indent">Disclosure can be <span style="font-weight: 600;">direct and/or indirect</span>, depending on <a href="https://firstdraftnews.org/long-form-article/there-are-lots-of-ways-to-label-ai-content-but-what-are-the-risks/">the use case and context</a>:</p> <ul> <li aria-level="1">Direct disclosure is viewer or listener-facing and includes, but is not limited to, <a href="https://firstdraftnews.org/long-form-article/from-deepfakes-to-tiktok-filters-how-do-you-label-ai-content/">content labels</a>, context notes, watermarking, and disclaimers.</li> <li aria-level="1">Indirect disclosure is embedded and includes, but is not limited to, applying cryptographic provenance to synthetic outputs (such as <a href="https://c2pa.org/">the C2PA open standard</a>), applying traceable elements to training data and outputs, synthetic media file metadata, synthetic media pixel composition, and single-frame disclosure statements in videos.</li> </ul> <p class="indent">Aim to disclose in a manner that mitigates speculation about content, strives toward resilience to manipulation or forgery, is accurately applied, and also, when necessary, communicates uncertainty without furthering speculation.</p> <h3 class="framework">Section 4:<br /> Practices for <span style="font-weight: 500;">Distributors and Publishers</span></h3> <p>Those distributing synthetic media include both institutions with active, editorial decision-making around content that mostly host first-party content and may distribute editorially created synthetic media and/or report on synthetic media created by others (i.e., media institutions, including broadcasters) and online platforms that have more passive displays of synthetic media and host user-generated or third-party content (i.e., social media platforms).</p> <h4>For both active and passive distribution channels</h4> <ol start="12"> <li><span style="font-weight: 600;">Disclose</span> when you confidently detect third-party/user-generated synthetic content.</li> </ol> <p class="indent">Disclosure can be <span style="font-weight: 600;">direct and/or indirect</span>, depending on <a href="https://firstdraftnews.org/long-form-article/there-are-lots-of-ways-to-label-ai-content-but-what-are-the-risks/">the use case and context</a>:</p> <ul> <li aria-level="1">Direct disclosure <a href="https://partnershiponai.org/it-matters-how-platforms-label-manipulated-media-here-are-12-principles-designers-should-follow/">is viewer or listener-facing</a>, and includes, but is not limited to, <a href="https://firstdraftnews.org/long-form-article/there-are-lots-of-ways-to-label-ai-content-but-what-are-the-risks/">content labels</a>, context notes, watermarking, and disclaimers.</li> <li aria-level="1">Indirect disclosure is embedded and includes, but is not limited to, applying cryptographic provenance (such as <a href="https://c2pa.org/">the C2PA open standard</a>) to synthetic outputs, applying traceable elements to training data and outputs, synthetic media file metadata, synthetic media pixel composition, and single-frame disclosure statements in videos.</li> </ul> <p class="indent">Aim to disclose in a manner that mitigates speculation about content, strives toward resilience to manipulation or forgery, is accurately applied, and also, when necessary, communicates uncertainty without furthering speculation.</p> <ol start="13"> <li><span style="font-weight: 600;">Provide a</span> published, accessible<span style="font-weight: 600;"> policy</span> outlining the organization’s approach to synthetic media that you will adhere to and seek to enforce.</li> </ol> <h4>For active distribution channels</h4> <p style="font-style: italic;">Channels (such as media institutions) that mostly host first-party content and may distribute editorially created synthetic media and/or report on synthetic media created by others.</p> <ol class="framework" start="14"> <li><span style="font-weight: 600;">Make prompt adjustments</span> when you realize you have unknowingly distributed and/or represented harmful synthetic content.</li> </ol> <ol start="15"> <li><span style="font-weight: 600;">Avoid distributing unattributed </span>synthetic media<span style="font-weight: 600;"> content</span> or reporting on harmful synthetic media created by others without clear labeling and context to ensure that no reasonable viewer or reader could take it to not be synthetic.</li> </ol> <ol start="16"> <li><span style="font-weight: 600;">Work towards </span>organizational <span style="font-weight: 600;">content provenance</span> infrastructure for both non-synthetic and synthetic media, while respecting privacy (for example, through the <a href="https://c2pa.org/">C2PA open standard</a>).</li> </ol> <ol start="17"> <li><span style="font-weight: 600;">Ensure that transparent and informed consent </span>has been provided by <span style="font-weight: 600;">the creator and the subject(s) depicted </span>in the synthetic content that will be shared and distributed, even if you have already received consent for content creation.</li> </ol> <h4>For passive distribution channels</h4> <p style="font-style: italic;">Channels (such as platforms) that mostly host third-party content.</p> <ol start="18"> <li><span style="font-weight: 600;">Identify </span>harmful synthetic media being distributed on platforms by implementing reasonable technical methods, user reporting, and staff measures for doing so.</li> </ol> <ol start="19"> <li><span style="font-weight: 600;">Make prompt adjustments</span> via labels, downranking, removal, or other interventions like those <a href="https://partnershiponai.org/intervention-inventory/">described here</a>, when harmful synthetic media is known to be distributed on the platform.</li> </ol> <p>20. <span style="font-weight: 600;">Clearly communicate</span> and <span style="font-weight: 600;">educate</span> platform users about synthetic media and what kinds of synthetic content are permissible to create and/or share on the platform.</p> <h3 class="framework">Appendices</h3> <div class="simple-accordion"> <div id="appendix-a" class="accordion-item"> <div class="accordion-header">Appendix A: PAI’s Responsible Practices for Synthetic Media Scope</div> <div class="accordion-content"> <p>While this Framework focuses on highly realistic forms of synthetic media, it recognizes the threshold for what is deemed highly realistic may vary based on an audience’s media literacy and across global contexts. We also recognize that harms can still be caused by synthetic media that is not highly realistic, such as in the context of intimate image abuse. This Framework has been created with a focus on audiovisual synthetic media, otherwise known as generative media, rather than synthetic text which provides other benefits and risks. However, it may still provide useful guidance for the creation and distribution of synthetic text.</p> <p>Additionally, this Framework only covers generative media, not the broader category of generative AI as a whole. We recognize that these terms are sometimes treated as interchangeable.</p> <p>Synthetic media is not inherently harmful, but the technology is increasingly accessible and sophisticated, magnifying potential harms and opportunities. As the technology develops, we will seek to revisit this Framework and adapt it to technological shifts (e.g., immersive media experiences).</p> </div> </div> <div id="appendix-b" class="accordion-item"> <div class="accordion-header">Appendix B: Potential Harms of Synthetic Media</div> <div class="accordion-content"> <p>List of potential harms from synthetic media we seek to mitigate:</p> <ul> <li aria-level="1">Impersonating an individual to gain unauthorized information or privileges</li> <li aria-level="1">Making unsolicited phone calls, bulk communications, posts, or messages that deceive or harass</li> <li aria-level="1">Committing fraud for financial gain</li> <li aria-level="1">Disinformation about an individual, group, or organization</li> <li aria-level="1">Exploiting or manipulating children</li> <li aria-level="1">Bullying and harassment</li> <li aria-level="1">Espionage</li> <li aria-level="1">Manipulating democratic and political processes, including deceiving a voter into voting for or against a candidate, damaging a candidate’s reputation by providing false statements or acts, influencing the outcome of an election via deception, or suppressing voters</li> <li aria-level="1">Market manipulation and corporate sabotage</li> <li aria-level="1">Creating or inciting hate speech, discrimination, defamation, terrorism, or acts of violence</li> <li aria-level="1">Defamation and reputational sabotage</li> <li aria-level="1">Non-consensual intimate or sexual content</li> <li aria-level="1">Extortion and blackmail</li> <li aria-level="1">Creating new identities and accounts at scale to represent unique people in order to “manufacture public opinion”</li> </ul> </div> </div> </div> </div> <p><!-- end framework box --></p> <h2 style="margin-bottom: 1em;"></h2> </div> </div> <div id="learn_more" class="card contract text-white" style="background-color: #0f3d99"> <div class="card-header">Learn More</div> <div class="card-body"> <h2 id="learn" style="padding-bottom: 2em;">Learn More</h2> <div class="bluebox"> <h3>What Is Synthetic Media?</h3> <p>Synthetic media, also referred to as generative media, is visual, auditory, or multimodal content that has been artificially generated or modified (commonly through artificial intelligence). Such outputs are often highly realistic, would not be identifiable as synthetic to the average person, and may simulate artifacts, persons, or events.</p> </div> <h3 style="margin: 1em 0em -0.5em 0em;">Part 1: Framing the Responsible Practices</h3> <div class="simple-accordion"> <div class="accordion-item"> <div class="accordion-header">What is PAI’s Responsible Practices for Synthetic Media?</div> <div class="accordion-content"> <p><i>PAI’s Responsible Practices for Synthetic Media: A Framework for Collective Action</i> is a set of recommendations to support the responsible development and deployment of synthetic media. The intended audiences are those creating synthetic media technology and tools or creating, sharing, and publishing synthetic media content. The Framework <a href="https://partnershiponai.org/workstream/synthetic-and-manipulated-content/" target="_blank" rel="noopener">builds on PAI’s work over the past four years</a> with industry, civil society, media/journalism, and academia to evaluate the challenges and opportunities for synthetic media.</p> </div> </div> <div class="accordion-item"> <div class="accordion-header">What are the Framework’s goals?</div> <div class="accordion-content"> <ol> <li aria-level="1">Advance understanding on how to realize synthetic media’s benefits responsibly, building consensus and community around best practices for key stakeholders from industry, media/journalism, academia, and civil society</li> <li aria-level="1">Both offer guidance for emerging players and larger players in the field of synthetic media</li> <li aria-level="1">Align on norms/practices to reduce redundancy and help advance responsible practice broadly across industry and society, avoiding a race to the bottom</li> <li aria-level="1">Ensure that there is a document and associated community that are both useful and can adapt to developments in a nascent and rapidly changing space</li> <li aria-level="1">Serve as a complement to other standards and policy efforts around synthetic media, including internationally</li> </ol> <p>What PAI is <b>not</b> doing:</p> <ol style="margin-top: -0.6em;"> <li aria-level="1">Auditing or certifying organizations</li> </ol> </div> </div> <div class="accordion-item"> <div class="accordion-header">How should I understand this document?</div> <div class="accordion-content"> <p>Think of this document like a constitution, not a set of laws. We provide recommendations to ensure that the emerging space of responsible synthetic media has a set of values, tactics, and practices to explore and evaluate. This document reflects the fact that responsible synthetic media (and its associated infrastructure development, creation, and distribution) is an emerging area with fast-moving developments requiring flexibility and calibration over time.</p> </div> </div> <div class="accordion-item"> <div class="accordion-header">What is the Framework’s main focus?</div> <div class="accordion-content"> <p>Synthetic media presents significant opportunities for responsible use, including for creative purposes. However, it can also cause harm. As synthetic media technology becomes more accessible and sophisticated, its potential impact also increases. This applies to both positive and negative possibilities — examples of which we only begin to explore in this Framework. The Framework focuses on how to best address the risks synthetic media can pose while ensuring its benefits are able to be realized in a responsible way.</p> <p>We recognize, however, that many institutions collaborating with us are explicitly working in the creative and responsible content categories. In the Framework, we include a list of harmful and responsible content categories, and we explicitly state that this list is not exhaustive, often includes gray areas, and that specific elements of the Framework apply to responsible use cases as well.</p> </div> </div> <div class="accordion-item"> <div class="accordion-header">What type of synthetic media does the Framework focus on?</div> <div class="accordion-content"> <p>This Framework has been created with a focus on visual, auditory, or multimodal content that has been generated or modified (commonly via artificial intelligence). Such outputs are often highly realistic, would not be identifiable as synthetic to the average person, and may simulate artifacts, persons, or events. However, the Framework may still provide useful guidance for the creation and distribution of synthetic text.</p> <p>Additionally, this Framework focuses on highly realistic forms of synthetic media, but recognizes the threshold for what is deemed highly realistic may vary based on audience’s media literacy and across global contexts. We also recognize harms can still be caused by synthetic media that is not highly realistic, such as in the context of intimate image abuse. In addition, this Framework only covers generative media, not the broader category of generative AI as a whole. We recognize that these terms are sometimes treated as interchangeable.</p> </div> </div> </div> <h3 style="margin: 2em 0em -0.5em 0em;">Part 2: Involvement in the Framework</h3> <div class="simple-accordion"> <div class="accordion-item"> <div class="accordion-header">Who has been involved in creating the Framework?</div> <div class="accordion-content">PAI has worked with more than 50 organizations — including synthetic media startups, social media platforms, news organizations, advocacy and human rights groups, academic institutions, policy professionals, experiential experts, and public commenters — to refine the Framework. With our field-wide expertise and perspective, PAI led the iterative, multistakeholder process and was the primary arbiter of the Framework’s language.</div> </div> <div class="accordion-item"> <div class="accordion-header">How can my organization get involved?</div> <div class="accordion-content">Organizations interested in becoming Framework partners can register their interest by <a href="https://partnershiponai.org/workstream/synthetic-and-manipulated-content/#getinvolved">filling out this form</a>.</div> </div> <div class="accordion-item"> <div class="accordion-header">What is expected from Framework supporters?</div> <div class="accordion-content"> <ul> <li aria-level="1"><b>Joining/Continuing Participation in the Framework Community of Practice. <span style="font-weight: 400;">Agreement to join a synthetic media community of good-faith actors working to develop and deploy responsible synthetic media while learning together about this emerging technology, facilitated by PAI</span></b>.</li> <li aria-level="1"><b>Transparency via Case Contribution.</b> Commitment to explore case examples or analysis related to the application of the Framework with the PAI synthetic media community — through a pilot and/or reporting of a case example via an annual public reporting process.</li> <li aria-level="1"><b>Convening Participation. </b>Agreement to participate in one to two programmatic convenings in 2023 evaluating the Framework’s use for real-world case examples and evolution of the synthetic media field. These are an opportunity to share about learnings from applying the Framework with others in the community.</li> </ul> </div> </div> <div class="accordion-item"> <div class="accordion-header">What process did PAI take to get to the final Framework?</div> <div class="accordion-content"> <p>PAI developed the Responsible Practices for Synthetic Media from January 2022 to January 2023, through:</p> <ul> <li aria-level="1">Bilateral meetings with stakeholders</li> <li aria-level="1">Public comment submissions</li> <li aria-level="1">Meetings with the AI and Media Integrity Steering Committee (every two weeks)</li> <li aria-level="1">Meetings with the Framework Working Group (every two weeks)</li> <li aria-level="1">Program Meetings with the AI and Media Integrity Program Members (three meetings)</li> <li aria-level="1">Additional convenings with the DARPA/NYU Computational Disinformation Working Group and a Synthetic Media Startup Cohort</li> </ul> <p style="text-align: center;"><img decoding="async" class="alignnone size-full wp-image-7631" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/02/process-timeline-new.svg" alt="" /><br /> <em>Development timeline, 2022</em></p> </div> </div> </div> <h3 style="margin: 2em 0em -0.5em 0em;">Part 3: The Framework as a Living Document</h3> <div class="simple-accordion"> <div class="accordion-item"> <div class="accordion-header">How are you ensuring that the Framework reflects that synthetic media is an emerging technology?</div> <div class="accordion-content"> <p>One of the expectations of Framework supporters is the submission of a case example, in which the organization reflects on how the Framework can be applied to a synthetic media challenge it has faced or is currently facing. By collecting real-world examples of use cases to pressure test the Framework against, we can see how the Framework principles stand up against technological advancements and public understanding of AI-generated and modified content.</p> </div> </div> <div class="accordion-item"> <div class="accordion-header">How will case studies complement the Framework?</div> <div class="accordion-content"> <p>Those that join the Framework effort will explore case examples or analysis related to the application of its recommendations as part of the Framework Community of Practice. Over the course of each year, PAI will host convenings where the community applies the Framework to these cases, as well as additional public cases identified by PAI staff. The 11 <a href="https://syntheticmedia.partnershiponai.org/#case_studies">case studies</a> we published in March 2024 provide industry, policy makers, and the general public with a shared body of case material that puts the Framework into practice. These case studies allow us to pressure test the Framework and to further operationalize its recommendations via multistakeholder input, especially when applied to gray areas. The case studies also provide us with opportunities to identify what areas of the Framework can be improved upon to better inform audiences.</p> </div> </div> <div class="accordion-item"> <div class="accordion-header">Have you shared the Framework with individuals in government? How does this connect to public policy?</div> <div class="accordion-content"> <p>Although regulation and government policy are emerging in the synthetic media space, the Framework exemplifies a type of norm development and public commitment that can help to strengthen the connection between policies, entities, and industries that are relevant to responsible synthetic media. While we have intentionally limited the involvement of policymakers in <i>drafting</i> the Framework, we have thought about its development as a complement to existing and forthcoming regulation, as well as intergovernmental and organizational policies on AI, mis/disinformation, and synthetic and generative media. For example, we have thought about the Framework alongside the EU AI Act, the EU Code of Practice on Disinformation, as well as the launch of the Deepfake Task Force Act in the U.S. Following the launch of the Framework, we plan to engage the policy community working on and around AI, mis/disinformation, and synthetic and generative media policy, including through a policymaker roundtable on the Framework in 2023.</p> </div> </div> <div class="accordion-item"> <div class="accordion-header">I am an individual (e.g., researcher, advocate, interested citizen). How can I learn more or get involved?</div> <div class="accordion-content">At present, only institutions will join the Framework effort, but we will have designated opportunities for public input and be sharing details of progress with the public more broadly later in 2023.</div> </div> </div> <h3></h3> <h3 style="margin: 2em 0em -0.5em 0em;">Part 4: Development Process</h3> <div class="simple-accordion"> <div class="accordion-item"> <div class="accordion-header">How was the Framework Developed?</div> <div class="accordion-content"> <p>PAI worked with over 50 global institutions in a participatory, year-long drafting process to create the current Responsible Practices for Synthetic Media. Participating stakeholders included the broader AI and media integrity field of synthetic media startups, social media platforms, AI research organizations, advocacy and human rights groups, academic institutions, experiential experts, news organizations, and public commenters.</p> <p style="text-align: center;"><img decoding="async" class="alignnone size-full wp-image-7631" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/02/process-timeline-new.svg" alt="" /><br /> <em>Development timeline, 2022</em></p> </div> </div> <div class="accordion-item"> <div class="accordion-header">Framework Evolution and the Community of Practice</div> <div class="accordion-content"> <p>The Framework is not a static document, but a living one. You can think of the Framework like a constitution, and not a set of laws, providing the burgeoning generative AI space with a set of guidelines for ethical synthetic media. PAI will revise the Framework each year in order to reflect new technology developments, use cases, and stakeholders.Part of that evolution will be informed by case examples from the real-world institutions building, creating, and sharing synthetic media. Institutions that join the Responsible Practices for Synthetic Media will provide yearly reports or analysis on synthetic media cases and how the Framework can be explored in practice. These cases will be published and inform the evolution of synthetic media policymaking and AI governance.</p> </div> </div> </div> <h2 style="margin-bottom: 1em;"></h2> </div> </div> <div id="framework_supporters" class="card contract text-white" style="background-color: #1248b3"> <div class="card-header">Framework Supporters</div> <div class="card-body"> <h2 style="padding-bottom: 2em;">Framework Supporters</h2> <p>The Framework is supported by the following companies and organizations. Click to read their statements of support.</p> <p style="text-align: center; font-weight: 900; text-transform: uppercase; letter-spacing: 0.1em; margin-top: 3em;">February 2023 Launch Supporters</p> <p><!-- row 1 --></p> <div class="row"> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- ADOBE --><br /> <!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#adobe"><img decoding="async" class="aligncenter wp-image-7812" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/adobe-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/adobe-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/adobe-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="adobe" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “Adobe launched the Content Authenticity Initiative (CAI) in 2019 to increase trust and transparency online. Since then, our membership has grown to over 900 leading media and tech companies, publishers, creators, and camera manufacturers working to address misinformation at scale through attribution. As synthetic media techniques become increasingly powerful, we are committed to advancing standards and frameworks that promote ethical creation and use of digital content. We are excited to be involved in the PAI Framework and look forward to continuing to shape the future of responsible use of AI.”<br /> <strong>Andy Parsons</strong><br /> <em>Senior Director, Content Authenticity Initiative,</em><br /> <em>Adobe</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END ADOBE --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- BBC --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#bbc"><img decoding="async" class="aligncenter wp-image-7813" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/bbc-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/bbc-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/bbc-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a><br /> <!-- Modal --></p> <div id="bbc" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “The BBC, as a PAI partner, is pleased to have made a contribution to developing the Framework. Establishing principles for the responsible use of synthetic media has enormous value as many organisations grapple with its implications. As a public service broadcaster with a focus on trust and safety, we look forward to reflecting work in this area in our own editorial guidelines as appropriate and continuing to support and develop work in this area.”<br /> <strong>Jatin Aythora</strong><br /> <em>Director of Research and Development,</em><br /> <em>BBC Research & Development</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END BBC --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- BUMBLE --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#bumble"><img decoding="async" class="aligncenter wp-image-7814" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/bumble-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/bumble-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/bumble-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="bumble" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “We are steadfast advocates for safe spaces online for less represented voices. Our work with PAI on developing and joining the Framework, alongside an amazing group of partners, is an extension of that. We are especially optimistic about how we continue to show up to address the unique AI-enabled harms that affect women and marginalized voices.”<br /> <strong>Payton Iheme</strong><br /> <em>VP of Global Public Policy,</em><br /> <em>Bumble</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END BUMBLE --></p> </div> </div> <p><!-- row 2 --></p> <div class="row"> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- CBC --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#cbc"><img loading="lazy" decoding="async" class="aligncenter wp-image-7816" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/cbc-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/cbc-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/cbc-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="cbc" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “CBC/Radio-Canada is delighted to work with the Partnership on AI on this new approach to synthetic media. As Canada’s national public broadcaster, we work hard to build, sustain, and safeguard the trust people put in our news and current affairs content. That’s why developing an international framework to promote transparency in the development and responsible use of synthetic media is so important.”<br /> <strong>Jon Medline</strong><br /> <em>Executive Director,</em><br /> <em>Policy & International Relations,</em><br /> <em>CBC/Radio-Canada</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END CBC --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- D-ID --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#d-id"><img loading="lazy" decoding="async" class="aligncenter wp-image-7818" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/d-id-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/d-id-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/d-id-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="d-id" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “Generative AI technology is extremely powerful, and it’s built-in to our DNA at D-ID to ensure that this power is used for good. But we don’t stand alone, it needs to be an industry-wide effort, which is why we are very proud to be part of this initiative to help drive best practice and move forward the ethical development and deployment of synthetic media across a wide range of industries.”<br /> <strong>Gil Perry</strong><br /> <em>CEO and Co-Founder</em><br /> <em>D-ID</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END D-ID --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- OPEN AI --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#openai"><img loading="lazy" decoding="async" class="aligncenter wp-image-7824" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/openai-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/openai-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/openai-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="openai" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “We’re seeing how human-machine interaction has sparked incredible creativity and expression, but any powerful technology needs careful deployment. These recommendations represent a necessary step towards society collectively working out ways to address the global challenges presented by AI progress, and we are pleased to take part in PAI’s efforts to guide the industry.”<br /> <strong>Dave Willner</strong><br /> <em>Head of Trust & Safety, </em><br /> <em>OpenAI</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END OPEN AI --></p> </div> </div> <p><!-- row 3 --></p> <div class="row"> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- RESPEECHER --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#respeecher"><img loading="lazy" decoding="async" class="aligncenter wp-image-7825" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/respeecher-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/respeecher-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/respeecher-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="respeecher" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “We encourage the responsible use of AI technology, and it’s impossible without mutual efforts and dialogue among industry leaders. We must not only implement creative ways to democratize synthetic speech tech, but also find the most effective ways to control it. This is something Respeecher has been committed to since the company’s establishment— more than 5 years. We are happy to contribute to this effort. It is not just a privilege— it’s our responsibility.”<br /> <strong>Alex Serdiuk</strong><br /> <em>CEO and Co-founder,</em><br /> <em>Respeecher</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END RESPEECHER --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- SYNTHESIA --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#synthesia"><img loading="lazy" decoding="async" class="aligncenter wp-image-7826" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/synthesia-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/synthesia-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/synthesia-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="synthesia" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “The creative possibilities of AI are endless but like all powerful technology, it will be used by bad-faith actors. Reducing this harm is crucial. It’s key we work together as an industry to combat threats AI presents. We believe that education, transparency and measured regulatory interventions will allow everyone to safely benefit from the immense opportunity of AI-generated content whilst also enjoying the magic it has to offer.”<br /> <strong>Victor Riparbelli</strong><br /> <em>CEO,</em><br /> <em>Synthesia</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END SYNTHESIA --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- TIKTOK --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#tiktok"><img loading="lazy" decoding="async" class="aligncenter wp-image-7828" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/tiktok-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/tiktok-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/tiktok-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="tiktok" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “TikTok is built on the belief that trust and authenticity are necessary to foster safe, creative and joyful online communities, and we’re proud to support Partnership on AI’s Responsible Practices for Synthetic Media. Like many technologies, the advancement of synthetic media opens up both exciting creative opportunities as well as unique safety considerations. We look forward to collaborating with our industry to advance thoughtful synthetic media approaches that empower creative expression by increasing transparency and guarding against potential risks.”<br /> <strong> Chris Roberts</strong><br /> <em>Head of Integrity and Authenticity Policy,</em><br /> <em>TikTok</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END TIKTOK --></p> </div> </div> <p><!--Row 4--></p> <div class="row"> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- EMPTY CELL --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- WITNESS --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#witness"><img loading="lazy" decoding="async" class="aligncenter wp-image-7830" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/witness-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/witness-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/witness-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="witness" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “At a time where synthetic media continues blurring the lines that separate truth from falsehood and reality from fiction, WITNESS works on ‘fortifying the truth’ based on the threats experienced and solutions identified by vulnerable and marginalized communities globally. Ensuring strong ethical boundaries are in place from the early phases of these technological developments brings us closer to solutions that can benefit civic journalists and human rights defenders worldwide. We’re delighted to partner with PAI to present this framework of responsible practices.”<br /> <strong>Jacobo Castellanos</strong><br /> <em>Technology, Threats and Opportunities Coordinator,</em><br /> <em> WITNESS</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END WITNESS --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- EMPTY CELL --></p> </div> </div> <p style="text-align: center; font-weight: 900; text-transform: uppercase; letter-spacing: 0.1em; margin-top: 5em;">Supporters</p> <p><!-- row 1 --></p> <div class="row"> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- CODE FOR AFRICA --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#cfa"><img loading="lazy" decoding="async" class="aligncenter wp-image-7817" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/codeforafrica-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/codeforafrica-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/codeforafrica-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="cfa" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “As an African watchdog institution we can confirm that we’re already seeing bad actors use synthetic or other AI-manipulated media created on easily accessible commercial tools to drive disinformation campaigns in conflict zones like Burkina Faso and the DRC. PAI’s framework is therefore an important first step for helping those who build AI tools and those who use them to have a set of clear, simple ethical guidelines to minimize harm and fight against abuse of AI’s potential.”<br /> <strong>Justin Arenstein</strong><br /> <em>Co-founder and CEO,</em><br /> <em>Code for Africa</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END CODE FOR AFRICA --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- GOOGLE --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#google"><img loading="lazy" decoding="async" class="aligncenter wp-image-7819" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/google-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/google-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/google-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="google" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “Realizing AI’s many opportunities requires responsible practices from the start. We’re pleased to join the Partnership on AI’s Responsible Practices for Synthetic Media: A Framework for Collective Action to help develop and foster best practices across the industry. We’re taking a responsible approach to AI by helping people evaluate information online, and recently announced that we’ll soon be integrating watermarking, metadata, and other innovative techniques into our latest generative models.”<br /> <strong>Laurie Richardson</strong><br /> <em>VP, Trust and Safety,</em><br /> <em>Policy & International Relations,</em><br /> <em>Google</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END GOOGLE --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- HAI --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#hai"><img loading="lazy" decoding="async" class="aligncenter wp-image-7820" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/hai-white400x160.png" alt="" width="250" height="100" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/hai-white400x160.png 400w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/hai-white400x160-300x120.png 300w" sizes="(max-width: 250px) 100vw, 250px" /></a></p> <p><!-- Modal --></p> <div id="hai" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “We are excited to support this important initiative. Responsible development and use of synthetic media is crucial as these capabilities become more advanced and accessible. We look forward to collaborating with other organizations in this community and contributing to thoughtful conversations and scholarship to build democracy-affirming and human-centered technologies.”<br /> <strong>Russell Wald</strong><br /> <em>Managing Director for Policy & Society,</em><br /> <em>Stanford Institute for Human-Centered Artificial Intelligence</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END HAI --></p> </div> </div> <p><!-- row 2 --></p> <div class="row"> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- MEEDAN --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#meedan"><img loading="lazy" decoding="async" class="aligncenter wp-image-7821 size-full" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/meedan-white400x160.png" alt="" width="250" height="160" /></a></p> <p><!-- Modal --></p> <div id="meedan" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “‘Is it real?’ is a frequent question asked of fact-checking organizations using Meedan’s software to run chatbots on messaging apps. The PAI Framework provides clear guidance on disclosure and transparency for the use of synthetic media that will help ensure responsible creators, publishers, and researchers don’t further contribute to the confusion of what is and isn’t real online.”<br /> <strong>Dr. Scott A. Hale</strong><br /> <em>Director of Research,</em><br /> <em>Meedan</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END MEEDAN --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- META --><br /> <!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#meta"><img decoding="async" class="wp-image-7689 size-full aligncenter" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/meta-white400x160.png" alt="" width="250" /></a></p> <p><!-- Modal --></p> <div id="meta" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “Meta is excited to join the cohort of supporters of Partnership on AI’s Responsible Practices for Synthetic Media and to work with PAI on developing this into a nuanced approach to educating people about generated media. We’re optimistic about the developments in this space and about using this technology to bring more tools for creative expression to our community.”<br /> <strong>Nick Clegg</strong><br /> <em>President, Global Affairs</em><br /> <em>Meta</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END META --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- MICROSOFT --><br /> <!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#microsoft"><img decoding="async" class="wp-image-7686 size-full aligncenter" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/microsoft-white400x160.png" alt="" width="250" /></a></p> <p><!-- Modal --></p> <div id="microsoft" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “Microsoft endorses the Partnership on AI’s framework for collective action and responsible practices for uses of generative AI to create synthetic media. We applaud and support PAI’s initiative to build a strong, collaborative community dedicated to protecting the public from malicious actors who aim to manipulate, sow discord, and to erode trust in the digital information we consume.”<br /> <strong>Eric Horvitz</strong><br /> <em>Chief Scientific Officer</em><br /> <em>Microsoft</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END MICROSOFT --></p> </div> </div> <p><!-- row 3 --></p> <div class="row"> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- THORN --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#thorn"><img decoding="async" class="aligncenter size-full wp-image-7610" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/thorn-white400x160.png" alt="" width="250" /></a></p> <p><!-- Modal --></p> <div id="thorn" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “Generative AI represents a leap in how we create, with the potential to have significant implications on how we work and live. In the child safety ecosystem, generative AI is also evolving how child sexual abuse occurs online and how we combat it. In moments like these, building our future with eyes wide open is critical to ensure we build safely. Thorn is happy to support the Partnership on AI’s Responsible Practices for Synthetic Media Framework to advocate for the unique safety concerns of the child safety ecosystem.”<br /> <strong>Melissa Stroebel</strong><br /> <em>VP Research & Insights,</em><br /> <em>Thorn</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END THORN --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- TRUEPIC --></p> <p><!-- trigger --><br /> <a href="#" data-bs-toggle="modal" data-bs-target="#truepic"><img decoding="async" class="aligncenter size-full wp-image-7610" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/truepic-white400x160.png" alt="" width="250" /></a></p> <p><!-- Modal --></p> <div id="truepic" class="modal fade" tabindex="-1" data-backdrop="false" aria-labelledby="exampleModalLabel" aria-hidden="true"> <div class="modal-dialog modal-dialog-centered"> <div class="modal-content"> <div class="modal-body"> <p><span style="color: #000;"><br /> “We have long believed that secure media provenance is the best way to ensure transparency online and elevate authenticity in digital content. Joining the Partnership on AI’s Framework is a natural progression of our work on authenticity and disclosure in digital media. We believe the Framework will continue to guide best practices and policies for the ethical, transparent use of generative AI. Our mission continues to be a more authentic, transparent internet, and we’re excited to support the Framework with this mission in mind.”<br /> <strong>Jeff McGregor</strong><br /> <em>CEO,</em><br /> <em>Truepic</em><br /> </span></p> </div> </div> </div> </div> <p><!-- END TRUEPIC --></p> </div> <div class="col-lg-4 col-md-4 col-sm-12"> <p><!-- EMPTY CELL --></p> </div> </div> <h2 style="margin-bottom: 1em;"></h2> </div> </div> <div id="case_studies" class="card contract text-white" style="background-color: #1855cc"> <div class="card-header">Case Studies</div> <div class="card-body"> <h2 style="padding-bottom: 2em;">Case Studies</h2> <p>The best practices outlined in PAI’s Synthetic Media Framework will need to evolve with both the technology and information landscape. Thus, to understand how the principles can be applied to the real-world, we required all 18 of the Framework supporters to submit an in-depth case study exploring how they implemented the Framework in practice.</p> <p>In March 2024, ten Framework supporters delivered case studies, with PAI drafting one of our own. This set of case studies, and the <a href="https://partnershiponai.org/wp-content/uploads/2024/03/pai-synthetic-media-case-study-analysis-1.pdf" target="_blank" rel="noopener">accompanying analysis</a>, focused on transparency, consent, and harmful/responsible use cases.</p> <p>In November 2024, another five Framework supporters developed case studies, specifically focused on an underexplored area of synthetic media governance: direct disclosure — methods to convey to audiences how content has been modified or created with AI, like labels or other signals — and PAI developed policy recommendations based on insights from the cases.</p> <p>The cases not only provide greater transparency on institutional practices and decisions related to synthetic media, but also help the field refine policies and practices for responsible synthetic media, including emergent mitigations. Importantly, the cases may support AI policymaking overall, providing broader insight about how collaborative governance can be applied across institutions.</p> <p><!-- TABLE --></p> <div class="row"> <div class="col-sm-12 col-md-6 col-lg-6"> <div class="row"> <div class="col-12"> <h3>March 2024 Cases</h3> <p>Read <a href="https://partnershiponai.org/resource/from-principles-to-practices/" target="_blank" rel="noopener">PAI’s analysis</a> of these cases</p> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7732" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/adobe400px-1.png" alt="" width="35" height="60" /></div> <div class="col-sm-8 col-md-10">Adobe designed its Firefly generative AI model with transparency and disclosure<br /> <a href="https://partnershiponai.org/adobe-framework-case-study/" target="_blank" rel="noopener">Read Adobe’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7733" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/bbc400px-1.png" alt="" width="60" height="60" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/bbc400px-1.png 160w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/bbc400px-1-150x150.png 150w" sizes="(max-width: 60px) 100vw, 60px" /></div> <div class="col-sm-8 col-md-10">BBC used face swapping to anonymize interviewees<br /> <a href="https://partnershiponai.org/bbc-framework-case-study/" target="_blank" rel="noopener">Read BBC R&D’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7734" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/bumble400px-1.png" alt="" width="120" height="64" /></div> <div class="col-sm-8 col-md-10">Bumble is preventing malicious AI-generated dating profiles<br /> <a href="https://partnershiponai.org/bumble-framework-case-study/" target="_blank" rel="noopener">Read Bumble’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7840" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/cbc-news400px.png" alt="" width="120" height="60" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/cbc-news400px.png 321w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/03/cbc-news400px-300x150.png 300w" sizes="(max-width: 120px) 100vw, 120px" /></div> <div class="col-sm-8 col-md-10">CBC News decided against using AI to conceal a news source’s identity <span style="font-weight: 400;"><br /> <a href="https://partnershiponai.org/cbc-framework-case-study/" target="_blank" rel="noopener">Read CBC Radio-Canada’s case study</a></span></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img decoding="async" class="alignnone wp-image-7772" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/01/d-id400px_new.png" alt="" width="55" /></div> <div class="col-sm-8 col-md-10">AI video company D-ID received consent to digitally resurrect victims of domestic violence<br /> <a href="https://partnershiponai.org/d-id-framework-case-study/" target="_blank" rel="noopener">Read D-ID’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7743" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/openai400px-1.png" alt="" width="68" height="60" /></div> <div class="col-sm-8 col-md-10">OpenAI is building disclosure into every DALL-E image<br /> <a href="https://partnershiponai.org/openai-framework-case-study/" target="_blank" rel="noopener">Read OpenAI’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7744" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/respeecher400px-1.png" alt="" width="120" height="60" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/respeecher400px-1.png 318w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/respeecher400px-1-300x151.png 300w" sizes="(max-width: 120px) 100vw, 120px" /></div> <div class="col-sm-8 col-md-10">Respeecher enables creative uses of its voice-cloning technology while preventing misuse<br /> <a href="https://partnershiponai.org/respeecher-framework-case-study/" target="_blank" rel="noopener">Read Respeecher’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7745" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/synthesia400px-1.png" alt="" width="120" height="60" srcset="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/synthesia400px-1.png 322w, https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/synthesia400px-1-300x149.png 300w" sizes="(max-width: 120px) 100vw, 120px" /></div> <div class="col-sm-8 col-md-10">AI video startup Synthesia is scaling up content moderation to prevent misuse<br /> <a href="https://partnershiponai.org/synthesia-framework-case-study/" target="_blank" rel="noopener">Read Synthesia’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7747" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/tiktok400px-1.png" alt="" width="37" height="60" /></div> <div class="col-sm-8 col-md-10">TikTok launched new AI labeling policies to prevent misleading content and empower responsible creation<br /> <a href="https://partnershiponai.org/tiktok-framework-case-study/" target="_blank" rel="noopener">Read TikTok’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7749" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/witness400px-1.png" alt="" width="120" height="82" /></div> <div class="col-sm-8 col-md-10">Even the best-intentioned uses of generative AI still need transparency — an analysis by human rights organization WITNESS<br /> <a href="https://partnershiponai.org/witness-framework-case-study" target="_blank" rel="noopener">Read WITNESS’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-10544" src="https://partnershiponai.org/wp-content/uploads/2024/01/PAI-white-transparent-300x51-1.png" alt="" width="120" height="20" /></div> <div class="col-sm-8 col-md-10">The risk of synthetic media misuse is growing in global elections — an analysis by PAI<br /> <a href="https://partnershiponai.org/pai-framework-case-study" target="_blank" rel="noopener">Read PAI’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-12"><span style="font-size: .9em; font-style: italic;">See the <a href="https://partnershiponai.org/wp-content/uploads/2024/03/pai-synthetic-media-case-study-template.pdf" target="_blank" rel="noopener">blank template</a> these cases respond to</span></div> </div> </div> </div> <div class="col-sm-12 col-md-6 col-lg-6"> <div class="row"> <div class="col-12"> <h3>November 2024 Cases</h3> <p>Read <a href="https://partnershiponai.org/resource/policy-recommendations-from-5-cases-implementing-pais-synthetic-media-framework/" target="_blank" rel="noopener">PAI’s policy recommendations</a> from these cases</p> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img decoding="async" class="alignnone wp-image-7733" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/meta400px-1.png" alt="" width="120" /></div> <div class="col-sm-8 col-md-10">Meta updated its approach to direct disclosure based on user feedback<br /> <a href="https://partnershiponai.org/meta-framework-case-study/" target="_blank" rel="noopener">Read Meta’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7734" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/microsoft400px-1.png" alt="" width="120" height="64" /></div> <div class="col-sm-8 col-md-10">Microsoft and LinkedIn gave users detailed context about media on the professional networking platform<br /> <a href="https://partnershiponai.org/microsoft-framework-case-study/" target="_blank" rel="noopener">Read Microsoft’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="aligncenter size-full wp-image-12100" src="https://partnershiponai.org/wp-content/uploads/2024/01/HAI_researchers-white-400x160-1.png" alt="" width="400" height="160" /></div> <div class="col-sm-8 col-md-10">Direct disclosure has limited impact on AI-generated Child Sexual Abuse Material — an analysis by researchers at Stanford HAI<br /> <a href="https://partnershiponai.org/hai-researchers-framework-case-study/" target="_blank" rel="noopener">Read the HAI researchers’ case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7840" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2024/11/thorn_400px-1.png" alt="" width="120" height="60" /></div> <div class="col-sm-8 col-md-10">Mitigating the risk of generative AI models creating Child Sexual Abuse Materials – an analysis by child safety nonprofit Thorn<br /> <a href="https://partnershiponai.org/thorn-framework-case-study/" target="_blank" rel="noopener">Read Thorn’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-sm-4 col-md-2"><img loading="lazy" decoding="async" class="alignnone wp-image-7772" src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/truepic400px-1.png" alt="" width="68" height="60" /></div> <div class="col-sm-8 col-md-10">Truepic used disclosures to help authenticate cultural heritage imagery in conflict zones<br /> <a href="https://partnershiponai.org/truepic-framework-case-study/" target="_blank" rel="noopener">Read Truepic’s case study</a></div> </div> <div class="row" style="padding-bottom: 10px;"> <hr class="white" /> <div class="col-12"><span style="font-size: .9em; font-style: italic;">See the <a href="https://partnershiponai.org/wp-content/uploads/2024/11/case-study-template-2024.pdf" target="_blank" rel="noopener">blank template</a> the Meta and Microsoft cases respond to</span></div> <div class="col-12"></div> </div> </div> </div> <p><!-- END TABLE --></p> <p> </p> <p>The case submitting organizations are a seemingly eclectic group; and yet they’re all integral members of a synthetic media ecosystem that requires a blend of technical and humanistic might to benefit society.</p> <p><img decoding="async" class="aligncenter size-full wp-image-12072" src="https://partnershiponai.org/wp-content/uploads/2024/11/gen-ai-lifecycle-3.png" alt="" width="1200" /></p> <p>Some of those featured are <strong>Builders</strong> of technology for synthetic media, while others are <strong>Creators</strong>, or <strong>Distributors</strong>. Notably, while civil society organizations are not typically creating, distributing, or building synthetic media (<a href="https://www.theguardian.com/world/2023/may/02/amnesty-international-ai-generated-images-criticism" target="_blank" rel="noopener">though that’s possible</a>), they are included in the case process; they are key actors in the ecosystem surrounding digital media and online information who must have a central role in AI governance development and implementation.</p> </div> </div> </div> <div class="accordion-logos"> <div class="logo-item"> <a href="https://twitter.com/intent/tweet?url=https://syntheticmedia.partnershiponai.org/&text=PAI's+Responsible+Practices+for+Synthetic+Media" target="_blank"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/09/X-icon-square.png" alt="" /> </a> </div> <div class="logo-item"> <a href="https://www.facebook.com/sharer/sharer.php?u=https://syntheticmedia.partnershiponai.org" target="_blank"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/02/facebook-icon-square.png" alt="" /> </a> </div> <div class="logo-item"> <a href="https://www.linkedin.com/shareArticle?mini=true&url=https://syntheticmedia.partnershiponai.org&title=Responsible+Practices+for+Synthetic+Media" target="_blank"> <img src="https://syntheticmedia.partnershiponai.org/wp-content/uploads/2023/02/linkedin-icon-square.png" alt="" /> </a> </div> </div> </div> </div> <footer class="content-info"> <div class="container"> </div> </footer> <link rel='stylesheet' id='ytpp-css' href='https://syntheticmedia.partnershiponai.org/wp-content/plugins/youtube-playlist-player/css/style.min.css?ver=4.6.9' type='text/css' media='all' /> <script type="text/javascript" src="https://syntheticmedia.partnershiponai.org/wp-content/themes/synthmediatheme/dist/scripts/main_42efb519.js" id="sage/main.js-js"></script> <script type="text/javascript" src="https://syntheticmedia.partnershiponai.org/wp-content/plugins/youtube-playlist-player/js/ytpp-main.min.js?ver=4.6.9" id="ytpp-js"></script> <script type="text/javascript" src="https://syntheticmedia.partnershiponai.org/wp-content/plugins/youtube-playlist-player/js/ytpp-fluid-vids.min.js?ver=4.6.9" id="ytpp-fluid-vids-js"></script> </body> </html>