CINXE.COM
code.flickr.com
<!DOCTYPE html> <!--[if IE 6]> <html id="ie6" lang="en-US"> <![endif]--> <!--[if IE 7]> <html id="ie7" lang="en-US"> <![endif]--> <!--[if IE 8]> <html id="ie8" lang="en-US"> <![endif]--> <!--[if !(IE 6) & !(IE 7) & !(IE 8)]><!--> <html lang="en-US"> <!--<![endif]--> <head> <meta charset="UTF-8" /> <meta name="viewport" content="width=device-width" /> <title> code.flickr.com </title> <link rel="profile" href="https://gmpg.org/xfn/11" /> <link rel="stylesheet" type="text/css" media="all" href="https://code.flickr.net/wp-content/themes/flickr-code/style.css?ver=20190507" /> <link rel="pingback" href="https://code.flickr.net/xmlrpc.php"> <!--[if lt IE 9]> <script src="https://code.flickr.net/wp-content/themes/twentyeleven/js/html5.js?ver=3.7.0" type="text/javascript"></script> <![endif]--> <meta name='robots' content='max-image-preview:large' /> <link rel='dns-prefetch' href='//stats.wp.com' /> <link rel="alternate" type="application/rss+xml" title="code.flickr.com » Feed" href="https://code.flickr.net/feed/" /> <link rel="alternate" type="application/rss+xml" title="code.flickr.com » Comments Feed" href="https://code.flickr.net/comments/feed/" /> <script type="text/javascript"> window._wpemojiSettings = {"baseUrl":"https:\/\/s.w.org\/images\/core\/emoji\/14.0.0\/72x72\/","ext":".png","svgUrl":"https:\/\/s.w.org\/images\/core\/emoji\/14.0.0\/svg\/","svgExt":".svg","source":{"concatemoji":"https:\/\/code.flickr.net\/wp-includes\/js\/wp-emoji-release.min.js?ver=6.3.5"}}; /*! This file is auto-generated */ !function(i,n){var o,s,e;function c(e){try{var t={supportTests:e,timestamp:(new Date).valueOf()};sessionStorage.setItem(o,JSON.stringify(t))}catch(e){}}function p(e,t,n){e.clearRect(0,0,e.canvas.width,e.canvas.height),e.fillText(t,0,0);var t=new Uint32Array(e.getImageData(0,0,e.canvas.width,e.canvas.height).data),r=(e.clearRect(0,0,e.canvas.width,e.canvas.height),e.fillText(n,0,0),new Uint32Array(e.getImageData(0,0,e.canvas.width,e.canvas.height).data));return t.every(function(e,t){return e===r[t]})}function u(e,t,n){switch(t){case"flag":return n(e,"\ud83c\udff3\ufe0f\u200d\u26a7\ufe0f","\ud83c\udff3\ufe0f\u200b\u26a7\ufe0f")?!1:!n(e,"\ud83c\uddfa\ud83c\uddf3","\ud83c\uddfa\u200b\ud83c\uddf3")&&!n(e,"\ud83c\udff4\udb40\udc67\udb40\udc62\udb40\udc65\udb40\udc6e\udb40\udc67\udb40\udc7f","\ud83c\udff4\u200b\udb40\udc67\u200b\udb40\udc62\u200b\udb40\udc65\u200b\udb40\udc6e\u200b\udb40\udc67\u200b\udb40\udc7f");case"emoji":return!n(e,"\ud83e\udef1\ud83c\udffb\u200d\ud83e\udef2\ud83c\udfff","\ud83e\udef1\ud83c\udffb\u200b\ud83e\udef2\ud83c\udfff")}return!1}function f(e,t,n){var r="undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?new OffscreenCanvas(300,150):i.createElement("canvas"),a=r.getContext("2d",{willReadFrequently:!0}),o=(a.textBaseline="top",a.font="600 32px Arial",{});return e.forEach(function(e){o[e]=t(a,e,n)}),o}function t(e){var t=i.createElement("script");t.src=e,t.defer=!0,i.head.appendChild(t)}"undefined"!=typeof Promise&&(o="wpEmojiSettingsSupports",s=["flag","emoji"],n.supports={everything:!0,everythingExceptFlag:!0},e=new Promise(function(e){i.addEventListener("DOMContentLoaded",e,{once:!0})}),new Promise(function(t){var n=function(){try{var e=JSON.parse(sessionStorage.getItem(o));if("object"==typeof e&&"number"==typeof e.timestamp&&(new Date).valueOf()<e.timestamp+604800&&"object"==typeof e.supportTests)return e.supportTests}catch(e){}return null}();if(!n){if("undefined"!=typeof Worker&&"undefined"!=typeof OffscreenCanvas&&"undefined"!=typeof URL&&URL.createObjectURL&&"undefined"!=typeof Blob)try{var e="postMessage("+f.toString()+"("+[JSON.stringify(s),u.toString(),p.toString()].join(",")+"));",r=new Blob([e],{type:"text/javascript"}),a=new Worker(URL.createObjectURL(r),{name:"wpTestEmojiSupports"});return void(a.onmessage=function(e){c(n=e.data),a.terminate(),t(n)})}catch(e){}c(n=f(s,u,p))}t(n)}).then(function(e){for(var t in e)n.supports[t]=e[t],n.supports.everything=n.supports.everything&&n.supports[t],"flag"!==t&&(n.supports.everythingExceptFlag=n.supports.everythingExceptFlag&&n.supports[t]);n.supports.everythingExceptFlag=n.supports.everythingExceptFlag&&!n.supports.flag,n.DOMReady=!1,n.readyCallback=function(){n.DOMReady=!0}}).then(function(){return e}).then(function(){var e;n.supports.everything||(n.readyCallback(),(e=n.source||{}).concatemoji?t(e.concatemoji):e.wpemoji&&e.twemoji&&(t(e.twemoji),t(e.wpemoji)))}))}((window,document),window._wpemojiSettings); </script> <style type="text/css"> img.wp-smiley, img.emoji { display: inline !important; border: none !important; box-shadow: none !important; height: 1em !important; width: 1em !important; margin: 0 0.07em !important; vertical-align: -0.1em !important; background: none !important; padding: 0 !important; } </style> <link rel='stylesheet' id='all-css-0' href='https://code.flickr.net/wp-includes/css/dist/block-library/style.min.css?m=1732205989g' type='text/css' media='all' /> <style id='wp-block-library-inline-css'> .has-text-align-justify{text-align:justify;} </style> <style id='wp-block-library-theme-inline-css'> .wp-block-audio figcaption{color:#555;font-size:13px;text-align:center}.is-dark-theme .wp-block-audio figcaption{color:hsla(0,0%,100%,.65)}.wp-block-audio{margin:0 0 1em}.wp-block-code{border:1px solid #ccc;border-radius:4px;font-family:Menlo,Consolas,monaco,monospace;padding:.8em 1em}.wp-block-embed figcaption{color:#555;font-size:13px;text-align:center}.is-dark-theme .wp-block-embed figcaption{color:hsla(0,0%,100%,.65)}.wp-block-embed{margin:0 0 1em}.blocks-gallery-caption{color:#555;font-size:13px;text-align:center}.is-dark-theme .blocks-gallery-caption{color:hsla(0,0%,100%,.65)}.wp-block-image figcaption{color:#555;font-size:13px;text-align:center}.is-dark-theme .wp-block-image figcaption{color:hsla(0,0%,100%,.65)}.wp-block-image{margin:0 0 1em}.wp-block-pullquote{border-bottom:4px solid;border-top:4px solid;color:currentColor;margin-bottom:1.75em}.wp-block-pullquote cite,.wp-block-pullquote footer,.wp-block-pullquote__citation{color:currentColor;font-size:.8125em;font-style:normal;text-transform:uppercase}.wp-block-quote{border-left:.25em solid;margin:0 0 1.75em;padding-left:1em}.wp-block-quote cite,.wp-block-quote footer{color:currentColor;font-size:.8125em;font-style:normal;position:relative}.wp-block-quote.has-text-align-right{border-left:none;border-right:.25em solid;padding-left:0;padding-right:1em}.wp-block-quote.has-text-align-center{border:none;padding-left:0}.wp-block-quote.is-large,.wp-block-quote.is-style-large,.wp-block-quote.is-style-plain{border:none}.wp-block-search .wp-block-search__label{font-weight:700}.wp-block-search__button{border:1px solid #ccc;padding:.375em .625em}:where(.wp-block-group.has-background){padding:1.25em 2.375em}.wp-block-separator.has-css-opacity{opacity:.4}.wp-block-separator{border:none;border-bottom:2px solid;margin-left:auto;margin-right:auto}.wp-block-separator.has-alpha-channel-opacity{opacity:1}.wp-block-separator:not(.is-style-wide):not(.is-style-dots){width:100px}.wp-block-separator.has-background:not(.is-style-dots){border-bottom:none;height:1px}.wp-block-separator.has-background:not(.is-style-wide):not(.is-style-dots){height:2px}.wp-block-table{margin:0 0 1em}.wp-block-table td,.wp-block-table th{word-break:normal}.wp-block-table figcaption{color:#555;font-size:13px;text-align:center}.is-dark-theme .wp-block-table figcaption{color:hsla(0,0%,100%,.65)}.wp-block-video figcaption{color:#555;font-size:13px;text-align:center}.is-dark-theme .wp-block-video figcaption{color:hsla(0,0%,100%,.65)}.wp-block-video{margin:0 0 1em}.wp-block-template-part.has-background{margin-bottom:0;margin-top:0;padding:1.25em 2.375em} </style> <link rel='stylesheet' id='all-css-4' href='https://code.flickr.net/_static/??-eJzTLy/QzcxLzilNSS3WzyrWz01NyUxMzUnNTc0rQeEU5CRWphbp5qSmJyZX6uVm5uklFxfr6OPTDpRD5sM02efaGpobGxkZmFpaGgAARKUu4Q==' type='text/css' media='all' /> <style id='jetpack-sharing-buttons-style-inline-css'> .jetpack-sharing-buttons__services-list{display:flex;flex-direction:row;flex-wrap:wrap;gap:0;list-style-type:none;margin:5px;padding:0}.jetpack-sharing-buttons__services-list.has-small-icon-size{font-size:12px}.jetpack-sharing-buttons__services-list.has-normal-icon-size{font-size:16px}.jetpack-sharing-buttons__services-list.has-large-icon-size{font-size:24px}.jetpack-sharing-buttons__services-list.has-huge-icon-size{font-size:36px}@media print{.jetpack-sharing-buttons__services-list{display:none!important}}ul.jetpack-sharing-buttons__services-list.has-background{padding:1.25em 2.375em} </style> <style id='classic-theme-styles-inline-css'> /*! This file is auto-generated */ .wp-block-button__link{color:#fff;background-color:#32373c;border-radius:9999px;box-shadow:none;text-decoration:none;padding:calc(.667em + 2px) calc(1.333em + 2px);font-size:1.125em}.wp-block-file__button{background:#32373c;color:#fff;text-decoration:none} </style> <style id='global-styles-inline-css'> body{--wp--preset--color--black: #000;--wp--preset--color--cyan-bluish-gray: #abb8c3;--wp--preset--color--white: #fff;--wp--preset--color--pale-pink: #f78da7;--wp--preset--color--vivid-red: #cf2e2e;--wp--preset--color--luminous-vivid-orange: #ff6900;--wp--preset--color--luminous-vivid-amber: #fcb900;--wp--preset--color--light-green-cyan: #7bdcb5;--wp--preset--color--vivid-green-cyan: #00d084;--wp--preset--color--pale-cyan-blue: #8ed1fc;--wp--preset--color--vivid-cyan-blue: #0693e3;--wp--preset--color--vivid-purple: #9b51e0;--wp--preset--color--blue: #1982d1;--wp--preset--color--dark-gray: #373737;--wp--preset--color--medium-gray: #666;--wp--preset--color--light-gray: #e2e2e2;--wp--preset--gradient--vivid-cyan-blue-to-vivid-purple: linear-gradient(135deg,rgba(6,147,227,1) 0%,rgb(155,81,224) 100%);--wp--preset--gradient--light-green-cyan-to-vivid-green-cyan: linear-gradient(135deg,rgb(122,220,180) 0%,rgb(0,208,130) 100%);--wp--preset--gradient--luminous-vivid-amber-to-luminous-vivid-orange: linear-gradient(135deg,rgba(252,185,0,1) 0%,rgba(255,105,0,1) 100%);--wp--preset--gradient--luminous-vivid-orange-to-vivid-red: linear-gradient(135deg,rgba(255,105,0,1) 0%,rgb(207,46,46) 100%);--wp--preset--gradient--very-light-gray-to-cyan-bluish-gray: linear-gradient(135deg,rgb(238,238,238) 0%,rgb(169,184,195) 100%);--wp--preset--gradient--cool-to-warm-spectrum: linear-gradient(135deg,rgb(74,234,220) 0%,rgb(151,120,209) 20%,rgb(207,42,186) 40%,rgb(238,44,130) 60%,rgb(251,105,98) 80%,rgb(254,248,76) 100%);--wp--preset--gradient--blush-light-purple: linear-gradient(135deg,rgb(255,206,236) 0%,rgb(152,150,240) 100%);--wp--preset--gradient--blush-bordeaux: linear-gradient(135deg,rgb(254,205,165) 0%,rgb(254,45,45) 50%,rgb(107,0,62) 100%);--wp--preset--gradient--luminous-dusk: linear-gradient(135deg,rgb(255,203,112) 0%,rgb(199,81,192) 50%,rgb(65,88,208) 100%);--wp--preset--gradient--pale-ocean: linear-gradient(135deg,rgb(255,245,203) 0%,rgb(182,227,212) 50%,rgb(51,167,181) 100%);--wp--preset--gradient--electric-grass: linear-gradient(135deg,rgb(202,248,128) 0%,rgb(113,206,126) 100%);--wp--preset--gradient--midnight: linear-gradient(135deg,rgb(2,3,129) 0%,rgb(40,116,252) 100%);--wp--preset--font-size--small: 13px;--wp--preset--font-size--medium: 20px;--wp--preset--font-size--large: 36px;--wp--preset--font-size--x-large: 42px;--wp--preset--spacing--20: 0.44rem;--wp--preset--spacing--30: 0.67rem;--wp--preset--spacing--40: 1rem;--wp--preset--spacing--50: 1.5rem;--wp--preset--spacing--60: 2.25rem;--wp--preset--spacing--70: 3.38rem;--wp--preset--spacing--80: 5.06rem;--wp--preset--shadow--natural: 6px 6px 9px rgba(0, 0, 0, 0.2);--wp--preset--shadow--deep: 12px 12px 50px rgba(0, 0, 0, 0.4);--wp--preset--shadow--sharp: 6px 6px 0px rgba(0, 0, 0, 0.2);--wp--preset--shadow--outlined: 6px 6px 0px -3px rgba(255, 255, 255, 1), 6px 6px rgba(0, 0, 0, 1);--wp--preset--shadow--crisp: 6px 6px 0px rgba(0, 0, 0, 1);}:where(.is-layout-flex){gap: 0.5em;}:where(.is-layout-grid){gap: 0.5em;}body .is-layout-flow > .alignleft{float: left;margin-inline-start: 0;margin-inline-end: 2em;}body .is-layout-flow > .alignright{float: right;margin-inline-start: 2em;margin-inline-end: 0;}body .is-layout-flow > .aligncenter{margin-left: auto !important;margin-right: auto !important;}body .is-layout-constrained > .alignleft{float: left;margin-inline-start: 0;margin-inline-end: 2em;}body .is-layout-constrained > .alignright{float: right;margin-inline-start: 2em;margin-inline-end: 0;}body .is-layout-constrained > .aligncenter{margin-left: auto !important;margin-right: auto !important;}body .is-layout-constrained > :where(:not(.alignleft):not(.alignright):not(.alignfull)){max-width: var(--wp--style--global--content-size);margin-left: auto !important;margin-right: auto !important;}body .is-layout-constrained > .alignwide{max-width: var(--wp--style--global--wide-size);}body .is-layout-flex{display: flex;}body .is-layout-flex{flex-wrap: wrap;align-items: center;}body .is-layout-flex > *{margin: 0;}body .is-layout-grid{display: grid;}body .is-layout-grid > *{margin: 0;}:where(.wp-block-columns.is-layout-flex){gap: 2em;}:where(.wp-block-columns.is-layout-grid){gap: 2em;}:where(.wp-block-post-template.is-layout-flex){gap: 1.25em;}:where(.wp-block-post-template.is-layout-grid){gap: 1.25em;}.has-black-color{color: var(--wp--preset--color--black) !important;}.has-cyan-bluish-gray-color{color: var(--wp--preset--color--cyan-bluish-gray) !important;}.has-white-color{color: var(--wp--preset--color--white) !important;}.has-pale-pink-color{color: var(--wp--preset--color--pale-pink) !important;}.has-vivid-red-color{color: var(--wp--preset--color--vivid-red) !important;}.has-luminous-vivid-orange-color{color: var(--wp--preset--color--luminous-vivid-orange) !important;}.has-luminous-vivid-amber-color{color: var(--wp--preset--color--luminous-vivid-amber) !important;}.has-light-green-cyan-color{color: var(--wp--preset--color--light-green-cyan) !important;}.has-vivid-green-cyan-color{color: var(--wp--preset--color--vivid-green-cyan) !important;}.has-pale-cyan-blue-color{color: var(--wp--preset--color--pale-cyan-blue) !important;}.has-vivid-cyan-blue-color{color: var(--wp--preset--color--vivid-cyan-blue) !important;}.has-vivid-purple-color{color: var(--wp--preset--color--vivid-purple) !important;}.has-black-background-color{background-color: var(--wp--preset--color--black) !important;}.has-cyan-bluish-gray-background-color{background-color: var(--wp--preset--color--cyan-bluish-gray) !important;}.has-white-background-color{background-color: var(--wp--preset--color--white) !important;}.has-pale-pink-background-color{background-color: var(--wp--preset--color--pale-pink) !important;}.has-vivid-red-background-color{background-color: var(--wp--preset--color--vivid-red) !important;}.has-luminous-vivid-orange-background-color{background-color: var(--wp--preset--color--luminous-vivid-orange) !important;}.has-luminous-vivid-amber-background-color{background-color: var(--wp--preset--color--luminous-vivid-amber) !important;}.has-light-green-cyan-background-color{background-color: var(--wp--preset--color--light-green-cyan) !important;}.has-vivid-green-cyan-background-color{background-color: var(--wp--preset--color--vivid-green-cyan) !important;}.has-pale-cyan-blue-background-color{background-color: var(--wp--preset--color--pale-cyan-blue) !important;}.has-vivid-cyan-blue-background-color{background-color: var(--wp--preset--color--vivid-cyan-blue) !important;}.has-vivid-purple-background-color{background-color: var(--wp--preset--color--vivid-purple) !important;}.has-black-border-color{border-color: var(--wp--preset--color--black) !important;}.has-cyan-bluish-gray-border-color{border-color: var(--wp--preset--color--cyan-bluish-gray) !important;}.has-white-border-color{border-color: var(--wp--preset--color--white) !important;}.has-pale-pink-border-color{border-color: var(--wp--preset--color--pale-pink) !important;}.has-vivid-red-border-color{border-color: var(--wp--preset--color--vivid-red) !important;}.has-luminous-vivid-orange-border-color{border-color: var(--wp--preset--color--luminous-vivid-orange) !important;}.has-luminous-vivid-amber-border-color{border-color: var(--wp--preset--color--luminous-vivid-amber) !important;}.has-light-green-cyan-border-color{border-color: var(--wp--preset--color--light-green-cyan) !important;}.has-vivid-green-cyan-border-color{border-color: var(--wp--preset--color--vivid-green-cyan) !important;}.has-pale-cyan-blue-border-color{border-color: var(--wp--preset--color--pale-cyan-blue) !important;}.has-vivid-cyan-blue-border-color{border-color: var(--wp--preset--color--vivid-cyan-blue) !important;}.has-vivid-purple-border-color{border-color: var(--wp--preset--color--vivid-purple) !important;}.has-vivid-cyan-blue-to-vivid-purple-gradient-background{background: var(--wp--preset--gradient--vivid-cyan-blue-to-vivid-purple) !important;}.has-light-green-cyan-to-vivid-green-cyan-gradient-background{background: var(--wp--preset--gradient--light-green-cyan-to-vivid-green-cyan) !important;}.has-luminous-vivid-amber-to-luminous-vivid-orange-gradient-background{background: var(--wp--preset--gradient--luminous-vivid-amber-to-luminous-vivid-orange) !important;}.has-luminous-vivid-orange-to-vivid-red-gradient-background{background: var(--wp--preset--gradient--luminous-vivid-orange-to-vivid-red) !important;}.has-very-light-gray-to-cyan-bluish-gray-gradient-background{background: var(--wp--preset--gradient--very-light-gray-to-cyan-bluish-gray) !important;}.has-cool-to-warm-spectrum-gradient-background{background: var(--wp--preset--gradient--cool-to-warm-spectrum) !important;}.has-blush-light-purple-gradient-background{background: var(--wp--preset--gradient--blush-light-purple) !important;}.has-blush-bordeaux-gradient-background{background: var(--wp--preset--gradient--blush-bordeaux) !important;}.has-luminous-dusk-gradient-background{background: var(--wp--preset--gradient--luminous-dusk) !important;}.has-pale-ocean-gradient-background{background: var(--wp--preset--gradient--pale-ocean) !important;}.has-electric-grass-gradient-background{background: var(--wp--preset--gradient--electric-grass) !important;}.has-midnight-gradient-background{background: var(--wp--preset--gradient--midnight) !important;}.has-small-font-size{font-size: var(--wp--preset--font-size--small) !important;}.has-medium-font-size{font-size: var(--wp--preset--font-size--medium) !important;}.has-large-font-size{font-size: var(--wp--preset--font-size--large) !important;}.has-x-large-font-size{font-size: var(--wp--preset--font-size--x-large) !important;} .wp-block-navigation a:where(:not(.wp-element-button)){color: inherit;} :where(.wp-block-post-template.is-layout-flex){gap: 1.25em;}:where(.wp-block-post-template.is-layout-grid){gap: 1.25em;} :where(.wp-block-columns.is-layout-flex){gap: 2em;}:where(.wp-block-columns.is-layout-grid){gap: 2em;} .wp-block-pullquote{font-size: 1.5em;line-height: 1.6;} </style> <link rel='stylesheet' id='all-css-10' href='https://code.flickr.net/_static/??-eJzTLy/QTc7PK0nNK9EvyUjNTS3WLykHcipTc1LLUvP0i0sqc1L1kouLdfQxVablZCZnFwFFU1LxK0QxMiknPzm7GKTUPtfW0NzI0NDQxMDUEABaNTMI' type='text/css' media='all' /> <script type="text/javascript" src="https://code.flickr.net/_static/??-eJzTLy/QzcxLzilNSS3WzyrWT8ksLtEvS81LyS/SB0oV5OdUpmXm5ADVpBaV6OVm5ullFevo49FUlJqeClSbWJJfpFtUmleSmZtKjDYku/Aqz8jPzy6GqrDPtTU0NzYyMjC1tDTIAgApC0op" ></script><script src='https://code.flickr.net/wp-includes/js/dist/i18n.min.js?ver=7701b0c3857f914212ef' id='wp-i18n-js'></script> <script id="wp-i18n-js-after" type="text/javascript"> wp.i18n.setLocaleData( { 'text direction\u0004ltr': [ 'ltr' ] } ); </script> <script src='https://code.flickr.net/wp-content/mu-plugins/jetpack-13.1/jetpack_vendor/automattic/jetpack-assets/build/i18n-loader.js?minify=true&ver=ee939953aa2115e2ca59' id='wp-jp-i18n-loader-js'></script> <script id="wp-jp-i18n-loader-js-after" type="text/javascript"> wp.jpI18nLoader.state = {"baseUrl":"https://code.flickr.net/wp-content/languages/","locale":"en_US","domainMap":{"jetpack-admin-ui":"plugins/jetpack","jetpack-assets":"plugins/jetpack","jetpack-backup-pkg":"plugins/jetpack","jetpack-blaze":"plugins/jetpack","jetpack-boost-core":"plugins/jetpack","jetpack-boost-speed-score":"plugins/jetpack","jetpack-compat":"plugins/jetpack","jetpack-config":"plugins/jetpack","jetpack-connection":"plugins/jetpack","jetpack-forms":"plugins/jetpack","jetpack-google-fonts-provider":"plugins/jetpack","jetpack-idc":"plugins/jetpack","jetpack-image-cdn":"plugins/jetpack","jetpack-import":"plugins/jetpack","jetpack-ip":"plugins/jetpack","jetpack-jitm":"plugins/jetpack","jetpack-licensing":"plugins/jetpack","jetpack-my-jetpack":"plugins/jetpack","jetpack-password-checker":"plugins/jetpack","jetpack-plugins-installer":"plugins/jetpack","jetpack-post-list":"plugins/jetpack","jetpack-publicize-pkg":"plugins/jetpack","jetpack-search-pkg":"plugins/jetpack","jetpack-stats":"plugins/jetpack","jetpack-stats-admin":"plugins/jetpack","jetpack-sync":"plugins/jetpack","jetpack-videopress-pkg":"plugins/jetpack","jetpack-waf":"plugins/jetpack","jetpack-wordads":"plugins/jetpack"},"domainPaths":{"jetpack-admin-ui":"jetpack_vendor/automattic/jetpack-admin-ui/","jetpack-assets":"jetpack_vendor/automattic/jetpack-assets/","jetpack-backup-pkg":"jetpack_vendor/automattic/jetpack-backup/","jetpack-blaze":"jetpack_vendor/automattic/jetpack-blaze/","jetpack-boost-core":"jetpack_vendor/automattic/jetpack-boost-core/","jetpack-boost-speed-score":"jetpack_vendor/automattic/jetpack-boost-speed-score/","jetpack-compat":"jetpack_vendor/automattic/jetpack-compat/","jetpack-config":"jetpack_vendor/automattic/jetpack-config/","jetpack-connection":"jetpack_vendor/automattic/jetpack-connection/","jetpack-forms":"jetpack_vendor/automattic/jetpack-forms/","jetpack-google-fonts-provider":"jetpack_vendor/automattic/jetpack-google-fonts-provider/","jetpack-idc":"jetpack_vendor/automattic/jetpack-identity-crisis/","jetpack-image-cdn":"jetpack_vendor/automattic/jetpack-image-cdn/","jetpack-import":"jetpack_vendor/automattic/jetpack-import/","jetpack-ip":"jetpack_vendor/automattic/jetpack-ip/","jetpack-jitm":"jetpack_vendor/automattic/jetpack-jitm/","jetpack-licensing":"jetpack_vendor/automattic/jetpack-licensing/","jetpack-my-jetpack":"jetpack_vendor/automattic/jetpack-my-jetpack/","jetpack-password-checker":"jetpack_vendor/automattic/jetpack-password-checker/","jetpack-plugins-installer":"jetpack_vendor/automattic/jetpack-plugins-installer/","jetpack-post-list":"jetpack_vendor/automattic/jetpack-post-list/","jetpack-publicize-pkg":"jetpack_vendor/automattic/jetpack-publicize/","jetpack-search-pkg":"jetpack_vendor/automattic/jetpack-search/","jetpack-stats":"jetpack_vendor/automattic/jetpack-stats/","jetpack-stats-admin":"jetpack_vendor/automattic/jetpack-stats-admin/","jetpack-sync":"jetpack_vendor/automattic/jetpack-sync/","jetpack-videopress-pkg":"jetpack_vendor/automattic/jetpack-videopress/","jetpack-waf":"jetpack_vendor/automattic/jetpack-waf/","jetpack-wordads":"jetpack_vendor/automattic/jetpack-wordads/"}}; </script> <link rel="https://api.w.org/" href="https://code.flickr.net/wp-json/" /><link rel="EditURI" type="application/rsd+xml" title="RSD" href="https://code.flickr.net/xmlrpc.php?rsd" /> <meta name="generator" content="WordPress 6.3.5" /> <style>img#wpstats{display:none}</style> <style type="text/css" id="twentyeleven-header-css"> #site-title, #site-description { position: absolute; clip: rect(1px 1px 1px 1px); /* IE6, IE7 */ clip: rect(1px, 1px, 1px, 1px); } </style> <link rel="stylesheet" type="text/css" id="wp-custom-css" href="https://code.flickr.net/?custom-css=e0fbe57d10" /></head> <body class="home blog custom-background wp-embed-responsive jps-theme-flickr-code two-column right-sidebar"> <div class="skip-link"><a class="assistive-text" href="#content">Skip to primary content</a></div><div class="skip-link"><a class="assistive-text" href="#secondary">Skip to secondary content</a></div><div id="page" class="hfeed"> <header id="branding"> <hgroup> <h1 id="site-title"><span><a href="https://code.flickr.net/" rel="home">code.flickr.com</a></span></h1> <h2 id="site-description"></h2> </hgroup> <a href="https://code.flickr.net/"> <img src="https://wp.flickr.net/wp-content/uploads/sites/3/2012/09/code-flickr-com-drawn-header-grey-large.png" width="1000" height="157" alt="code.flickr.com" /> </a> <div class="only-search with-image"> <form method="get" id="searchform" action="https://code.flickr.net/"> <label for="s" class="assistive-text">Search</label> <input type="text" class="field" name="s" id="s" placeholder="Search" /> <input type="submit" class="submit" name="submit" id="searchsubmit" value="Search" /> </form> </div> <nav id="access"> <h3 class="assistive-text">Main menu</h3> <div class="menu-menu-container"><ul id="menu-menu" class="menu"><li id="menu-item-2084" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-2084"><a href="http://www.flickr.com/">Flickr</a></li> <li id="menu-item-2085" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-2085"><a href="http://blog.flickr.net/">Flickr Blog</a></li> <li id="menu-item-2250" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-2250"><a href="http://twitter.com/flickr">@flickr</a></li> <li id="menu-item-2086" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-2086"><a href="http://twitter.com/flickrapi">@flickrapi</a></li> <li id="menu-item-2087" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-2087"><a href="https://www.flickr.com/services/developer/">Developer Guidelines</a></li> <li id="menu-item-2088" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-2088"><a href="http://www.flickr.com/services/api/">API</a></li> <li id="menu-item-2089" class="menu-item menu-item-type-custom menu-item-object-custom menu-item-2089"><a href="http://www.flickr.com/jobs/">Jobs</a></li> </ul></div> </nav><!-- #access --> </header><!-- #branding --> <div id="main"> <div id="primary"> <div id="content" role="main"> <nav id="nav-above"> <h3 class="assistive-text">Post navigation</h3> <div class="nav-previous"><a href="https://code.flickr.net/page/2/" ><span class="meta-nav">←</span> Older posts</a></div> <div class="nav-next"></div> </nav><!-- #nav-above --> <article id="post-3642" class="post-3642 post type-post status-publish format-standard hentry category-open-source"> <header class="entry-header"> <h1 class="entry-title"><a href="https://code.flickr.net/2022/02/14/safer-internet-day-and-open-source-codes-of-conduct/" rel="bookmark">Safer Internet Day and Open Source Codes of Conduct</a></h1> <div class="entry-meta"> <span class="sep">Posted on </span><a href="https://code.flickr.net/2022/02/14/safer-internet-day-and-open-source-codes-of-conduct/" title="9:23 am" rel="bookmark"><time class="entry-date" datetime="2022-02-14T09:23:51-08:00">February 14, 2022</time></a><span class="by-author"> <span class="sep"> by </span> <span class="author vcard"><a class="url fn n" href="https://code.flickr.net/author/sgraywood/" title="View all posts by Sarah Graywood" rel="author">Sarah Graywood</a></span></span> </div><!-- .entry-meta --> </header><!-- .entry-header --> <div class="entry-content"> <p><span style="font-weight: 400;"><img decoding="async" fetchpriority="high" class="alignnone size-medium wp-image-3640" src="https://code.flickr.net/wp-content/uploads/sites/3/2022/02/7-Standard-SID-logo.png?w=800" alt="" width="800" height="450" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2022/02/7-Standard-SID-logo.png 1600w, https://code.flickr.net/wp-content/uploads/sites/3/2022/02/7-Standard-SID-logo.png?resize=150,84 150w, https://code.flickr.net/wp-content/uploads/sites/3/2022/02/7-Standard-SID-logo.png?resize=800,450 800w, https://code.flickr.net/wp-content/uploads/sites/3/2022/02/7-Standard-SID-logo.png?resize=768,432 768w, https://code.flickr.net/wp-content/uploads/sites/3/2022/02/7-Standard-SID-logo.png?resize=1024,576 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2022/02/7-Standard-SID-logo.png?resize=1536,864 1536w, https://code.flickr.net/wp-content/uploads/sites/3/2022/02/7-Standard-SID-logo.png?resize=500,281 500w" sizes="(max-width: 800px) 100vw, 800px" /></span></p> <p><span style="font-weight: 400;">Last week the world celebrated </span><a href="https://www.saferinternetday.org/en-US/"><span style="font-weight: 400;">Safer Internet Day</span></a><span style="font-weight: 400;">, a day used to call upon stakeholders to join together to make the internet a safer and better place for all, and especially for children and young people. Here at Flickr, we believe in creating spaces on the internet that take into account the safety of all of our contributors, especially our youngest and most underrepresented. So, to celebrate that and to continue the work of making our spaces safer and more accessible to all, we have added a code of conduct to our most trafficked open source repositories on GitHub.</span></p> <h1><b>What’s/Why Open Source?</b></h1> <div style="width: 810px" class="wp-caption alignnone"><a title="100_0509" href="https://www.flickr.com/photos/qrush/2960047774" data-flickr-embed="true"><img decoding="async" src="https://live.staticflickr.com/3050/2960047774_6e39a980dc_c.jpg" alt="100_0509" width="800" height="600" /></a><p class="wp-caption-text">“100_0509” by Nick Quaranto is licensed under CC BY-SA 2.0</p></div> <p><script async src="//embedr.flickr.com/assets/client-code.js" charset="utf-8"></script><br /> <span style="font-weight: 400;">Open source is a method of development that allows software to be available publicly so that contributors can modify, add, and remove code as they see fit in order to create a more robust codebase colored with the ideas and innovations of many developers rather than just a few. At Flickr we believe that innovation happens when we have a diverse and widespread set of voices coming together to suggest changes. Open source allows us to harness the power of these voices to create the very best software we can. </span></p> <p><span style="font-weight: 400;">Flickr has 15 open source repositories, 4 of which are actively contributed to. Of those four, none had a formal code of conduct to govern contributions to the code base or interpersonal interactions between developers actively working on the code… until now!</span></p> <h1><b>Why a code of conduct?</b></h1> <p><span style="font-weight: 400;">Codes of conduct are extremely common and important in the open source community. Groups like Linux, Homebrew, Bootstrap, and Kubernetes all have codes of conduct governing the use of and contributions to their open source projects. Because open source allows such a diverse set of voices to express themselves, conflicts can arise and unfortunately not all come with the best of intent. </span></p> <div style="width: 810px" class="wp-caption alignnone"><a title="Bullying" href="https://www.flickr.com/photos/agenciasenado/42102914074/in/photolist-2kKUH2o-279urvC-r6iTn9-5Swue-8KFrVd-EJMhYR-4Q911C-EJMho2-58xh7o-q5seCB-akPoZH-EuQrDE-9FgmUa-E7NHng-a4sk8m-DHUWwc-Ex5pP6-cj3oZ-Ex4Zon-Ex5x4M-nkrYVf-Ex5bqx-E7PfV6-gSFzVa-EkhJ3-8qhRSN-cY3qF3-E7NUie-dAbKpC-EFi7sn-b2KwD-mbX3PG-E7P5yV-EFhNkR-Ex56xM-DHUPnx-Eeavz1-ECYv21-DHUqvk-EFhHjD-cGmtWQ-ECYM8E-E7NYoP-MssRm-DHULui-Ex5u4P-DHUME4-4uET46-6mBGvY-8TpQdY" data-flickr-embed="true"><img decoding="async" src="https://live.staticflickr.com/896/42102914074_5f389e6e14_c.jpg" alt="Bullying" width="800" height="533" /></a><p class="wp-caption-text">“Bullying” by Senado Federal is licensed under CC by 2.0</p></div> <p><script async src="//embedr.flickr.com/assets/client-code.js" charset="utf-8"></script></p> <p><span style="font-weight: 400;">Codes of conduct allow us to have a preconceived understanding of what interactions in our community are meant to look like and why we hold these expectations of members. Codes of conduct can range from what is expected of interpersonal interactions (e.g. Demonstrate kindness and empathy toward other developers in pull request reviews) to more generalized expectations (e.g. Focus on what is best for the community as a whole rather than individual desires or needs). Codes of conduct not only benefit the community in its entirety, but also allow us to focus on protecting the psychological safety of members of our community who are most at risk. We care about all of our members while also recognizing the need for specific and directed language to protect members of underrepresented groups. The best way to do this is to have a written code of conduct with specific, actionable steps used to govern the safety of the community. </span></p> <h1><b>Why Contributor Covenant?</b></h1> <p><span style="font-weight: 400;">In order to protect underrepresented groups and to foster a strong and healthy open source community here at Flickr, we thought about whether it would be best to write our own code of conduct specifically tailored to what we value at Flickr or whether it would be better to find a code of conduct already in use that we could use to guide our own open source communities. We ended up finding a code of conduct already in use by quite a few well respected organizations that directly spoke to </span><a href="https://code.flickr.net/2021/11/22/flickr-engineering-team-vision-guiding-principles/"><span style="font-weight: 400;">our most important operating principles</span></a><span style="font-weight: 400;">.</span></p> <p><a href="https://www.contributor-covenant.org/"><span style="font-weight: 400;">Contributor Covenant</span></a><span style="font-weight: 400;"> is a code of conduct for participating in open source communities which explicitly outlines expectations in order to create a healthy open source culture. Contributor Covenant has been adopted by over a hundred thousand open source communities and projects since 2014 and is used by Linux, Babel, Bootstrap, Kubernetes, code.gov, Twilio, Homebrew-Cask, and Target to name a few. With such well-respected organizations turning to Contributor Covenant, it was something we thought we would be foolish not to consider. </span></p> <p><span style="font-weight: 400;">As we considered, we realized that Contributor Covenant had all of our values specified in </span><a href="https://www.contributor-covenant.org/version/2/1/code_of_conduct/"><span style="font-weight: 400;">a wonderful document that was only a little over a page long</span></a><span style="font-weight: 400;">. Both accessible in its readability and shortness and robust enough to do the job of protecting underrepresented contributors on our open source repositories, we had found a perfect marriage of all of the things that we wanted in a code of conduct, while also allowing us to become part of a large scale community adopting a singular vision for a healthy, safe, and innovational open source community. </span></p> </div><!-- .entry-content --> <footer class="entry-meta"> <span class="cat-links"> <span class="entry-utility-prep entry-utility-prep-cat-links">Posted in</span> <a href="https://code.flickr.net/category/open-source/" rel="category tag">open source</a> </span> </footer><!-- .entry-meta --> </article><!-- #post-3642 --> <article id="post-3617" class="post-3617 post type-post status-publish format-standard hentry category-uncategorized"> <header class="entry-header"> <h1 class="entry-title"><a href="https://code.flickr.net/2022/01/04/a-pluggable-solution-for-api-observability-on-our-php-system/" rel="bookmark">A Pluggable Solution for API Observability on our PHP System</a></h1> <div class="entry-meta"> <span class="sep">Posted on </span><a href="https://code.flickr.net/2022/01/04/a-pluggable-solution-for-api-observability-on-our-php-system/" title="2:51 pm" rel="bookmark"><time class="entry-date" datetime="2022-01-04T14:51:11-08:00">January 4, 2022</time></a><span class="by-author"> <span class="sep"> by </span> <span class="author vcard"><a class="url fn n" href="https://code.flickr.net/author/nickscheiblauer/" title="View all posts by nickscheiblauer" rel="author">nickscheiblauer</a></span></span> </div><!-- .entry-meta --> </header><!-- .entry-header --> <div class="entry-content"> <p><span style="font-weight: 400">When people think about tech and innovation, they often talk about the “next generation.”</span></p> <p><span style="font-weight: 400">Just use GraphQL and life will be easier, many will tell you.</span></p> <p><span style="font-weight: 400">The future of cloud-native is </span><i><span style="font-weight: 400">Lambda</span></i><span style="font-weight: 400">, claim others.</span></p> <p><span style="font-weight: 400">Unfortunately, most of the conversations don’t talk about the question that is most top-of-mind for me: what does the next generation of tools look like </span><i><span style="font-weight: 400">for legacy systems</span></i><span style="font-weight: 400">?</span></p> <p><span style="font-weight: 400">As a Senior Engineering Manager for Flickr’s backend team, here’s one of the major issues my team faces: we have a ton of code that engineers need to understand in order to safely and quickly ship changes. Flickr has built a product loved by millions of photographers for nearly two decades and we have some real history in our code base. You can imagine the amount of work it takes to maintain the stability of our large, complex, public-facing API—which impacts not just customers who use our API, but our own web, mobile, and desktop clients.</span></p> <p><span style="font-weight: 400">The difficulty of wrangling a legacy code base is what led us to be interested in </span><a href="https://www.akitasoftware.com/"><span style="font-weight: 400">Akita</span></a><span style="font-weight: 400">, an observability company going after the dream of “one-click” observability. Akita’s first product passively watches API traffic using packet capture (PCAP) to provide automated API monitoring, automatically infer the structure of API endpoints, and automatically detect potential issues and breaking changes. Akita’s goal is to make it possible for organizations like ours, with our hundreds of thousands of lines of legacy code, to understand system behavior in order to move quickly.</span></p> <p><span style="font-weight: 400">But there’s a catch: Akita’s first product, currently in beta, works only for representational state transfer (REST) APIs. Our API at Flickr, nearly twenty years old, coincides with the rise of REST. This blog post focuses on how I used Akita to introduce observability to our code base.</span></p> <h2><span style="font-weight: 400">Moving fast with legacy systems</span></h2> <p><span style="font-weight: 400">First, let me give some context on high-level responsibilities of backend engineering at Flickr. Since moving Flickr into the cloud two years ago we’ve had more time to focus on modernizing our services and improving our developer experience. This puts us in a much better position to build new features than before—but first, we need to streamline how we get things done, which is not nearly as simple as it sounds.</span></p> <p><span style="font-weight: 400">Today, we serve up around a billion photos daily from millions of photographers. Nearly every Flickr API request executes legacy code in some way—code that is less tested, less documented, and sometimes dangerous to mess with. A great deal of care has to be taken to avoid disruptions. And when new features need to interact with older features, this can get complex fast! On top of all that, we need to find ways to help our small but mighty team focus their limited time and attention while navigating the old and the new, without the luxury of handing this problem over to an internal tools team.</span></p> <p><span style="font-weight: 400">Our difficulty getting a handle on our legacy systems led us to become excited about using Akita for easy observability. Akita promised to tell us about our API interactions and potential issues with the API, all by passively watching API traffic. But there was, as I mentioned, a catch: Akita works only for REST APIs right now, and our API is… RESTish. Most notably, we never adopted the REST convention of using distinct URL paths for each service endpoint, and we rely heavily on passing parameters through the query string, or form-encoded in POSTs. This situation has historically made it hard for us to use other API tools as well.</span></p> <h2><span style="font-weight: 400">Getting Akita to work for my REST-like format</span></h2> <p><span style="font-weight: 400">Thankfully our PHP request handlers are plug and play so I quickly whipped up a new proof-of-concept handler showing that we </span><i><span style="font-weight: 400">could</span></i><span style="font-weight: 400"> start getting visibility into our API endpoints and their behavior using Akita. This gave me the ability to generate Akita traces using curl and the Akita command line interface (CLI) tool out of the box, but only within my local dev environment.</span></p> <p><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3620" src="https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-11-04-at-4.28.16-PM.png?w=800" alt="A screen shot of the Akita web console. The shot depicts the detected aPI specification of a single Flickr API call." width="800" height="334" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-11-04-at-4.28.16-PM.png 2450w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-11-04-at-4.28.16-PM.png?resize=150,63 150w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-11-04-at-4.28.16-PM.png?resize=800,334 800w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-11-04-at-4.28.16-PM.png?resize=768,321 768w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-11-04-at-4.28.16-PM.png?resize=1024,428 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-11-04-at-4.28.16-PM.png?resize=1536,642 1536w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-11-04-at-4.28.16-PM.png?resize=2048,856 2048w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-11-04-at-4.28.16-PM.png?resize=500,209 500w" sizes="(max-width: 800px) 100vw, 800px" /></p> <p><span style="font-weight: 400">Right away I spotted some things to improve, and more ideas came that afternoon. I wanted to put our `api_key` parameter into an Authorization header, and remove the `method` parameter since I’d used it in a fake service path. Also, our API returns a 200 HTTP Status on errors, including an element `stat` indicating failure. I wanted those to be HTTP 400s.</span></p> <p><span style="font-weight: 400">But I had a conundrum: Akita works best when observing production traffic. Real, production API requests at production load will really fill in the nooks and crannies of our API models. My progress showed it would be </span><i><span style="font-weight: 400">so worth it</span></i><span style="font-weight: 400"> to go further, so I met with the Akita team and discussed using their Go-based plugin system to transform our live requests into a desirable format based on my proof-of-concept. It turns out that most of Akita’s tooling is </span><a href="https://github.com/akitasoftware/akita-cli"><span style="font-weight: 400">open source</span></a><span style="font-weight: 400"> and I could work on the plugin myself! This turned out to be the key to making Akita work with our RESTish format.</span></p> <h2><span style="font-weight: 400">Fitting into the Go plugin format</span></h2> <p><span style="font-weight: 400">Exciting news! I just needed to turn my prototype into something that I could run with the Akita agent every time.</span></p> <p><span style="font-weight: 400">The Akita CLI has a mechanism for dynamically loading plugins, which can operate on the captured and parsed data before it is sent to the Akita cloud. My transformations of the API format into a more REST-like format could be packaged that way.</span></p> <p><span style="font-weight: 400">I soon discovered that I was the first person to try building a third-party plugin. Akita told me that they used the plugin architecture internally to package a non-open-source plugin that infers data formats, but that is compiled into the client. </span></p> <p><span style="font-weight: 400">My early attempts at working with the released CLI version resulted in nothing but discouraging error messages like:</span></p> <p><em><span style="font-weight: 400">fatal error: runtime: no plugin module data</span></em></p> <p><span style="font-weight: 400">I worked around this by compiling the open-source version of the Akita CLI myself and pointing the plugin build at the exact same version of the source code. An engineer at Akita reported the same problem and concluded that the plugin needed to be built at the same time as the program that will use it. Go’s idiosyncratic linking conventions seem to make it virtually impossible for such an external plugin to satisfy its dependencies against multiple versions of the base binary. Later, we learned the following from Russ Cox, confirming that our decision to abandon the external plugin approach was wise:</span></p> <p><a href="https://twitter.com/_rsc/status/1459257455360229387"><span style="font-weight: 400">https://twitter.com/_rsc/status/1459257455360229387</span></a></p> <p><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3622" src="https://code.flickr.net/wp-content/uploads/sites/3/2022/01/image3.png?w=767" alt="Screenshot of a Tweet from Russ Cox that responds to a quoted question asking what the Go Team's direction for Go Plugins is. Russ answers, "Kind of rudderless right now. Higher priority things are taking all our cycles, so mostly benign neglect for plugins. Sorry."" width="767" height="355" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2022/01/image3.png 767w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/image3.png?resize=150,69 150w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/image3.png?resize=500,231 500w" sizes="(max-width: 767px) 100vw, 767px" /></p> <p><span style="font-weight: 400">To make this process repeatable, we adopted a hybrid approach where I added the Flickr-specific transformations of the API as a plugin in a newly created Akita open source repository. (</span><a href="https://github.com/akitasoftware/plugin-flickr"><span style="font-weight: 400">You can check out the code here</span></a><span style="font-weight: 400">!) Akita will compile that plugin in all their future CLI builds so there would be no problem with dynamic loading. I can enable the plugin for my traces with a command-line flag and use the most recent version of the CLI without recompiling my plugin to match. This is the same way Akita incorporates modules for type inference. Other users can incorporate contributions in a similar way.</span></p> <h2><span style="font-weight: 400">Using Akita to move faster</span></h2> <p><span style="font-weight: 400">Now that we have the plugin written, we’re moving toward integration with our production environment. Here’s an example of what we’re able to understand with Akita. Note that the </span><em><span style="font-weight: 400">person.new</span></em><span style="font-weight: 400"> response element has been detected as both datetime and string data types. We should fix that!</span><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3621" src="https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-12-21-at-8.23.47-AM.png?w=800" alt="Another screenshot of the Akita web console. Two entries are highlighted, showing instances of mixed data types detected for the same field. " width="800" height="232" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-12-21-at-8.23.47-AM.png 2548w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-12-21-at-8.23.47-AM.png?resize=150,44 150w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-12-21-at-8.23.47-AM.png?resize=800,232 800w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-12-21-at-8.23.47-AM.png?resize=768,223 768w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-12-21-at-8.23.47-AM.png?resize=1024,297 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-12-21-at-8.23.47-AM.png?resize=1536,446 1536w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-12-21-at-8.23.47-AM.png?resize=2048,595 2048w, https://code.flickr.net/wp-content/uploads/sites/3/2022/01/Screen-Shot-2021-12-21-at-8.23.47-AM.png?resize=500,145 500w" sizes="(max-width: 800px) 100vw, 800px" /></p> <p><span style="font-weight: 400">Here’s what we’re integrating Akita to do:</span></p> <ul> <li><b>Taking snapshots of our API endpoints. </b><span style="font-weight: 400">Having a large API footprint makes it all the more important for us to generate defacto specifications and curate the result, rather than try to hand-write specifications from scratch. Once we have a solid OpenAPI3 specification we can make tactical changes to ensure the API adheres to the spec without doing a full-on rewrite of the backend.</span></li> </ul> <ul> <li><b>Identifying changes to our API endpoints. </b><span style="font-weight: 400">The ability to detect unexpected or off-spec responses will make it a lot easier for us to code from the client side, particularly the Android and iOS mobile apps. We expect to reduce defensive exception handling on the client side, making our mobile code easier to work with and less of a resource hog. </span></li> <li><b>Tracking our inter-service communication as we modernize our infrastructure. </b><span style="font-weight: 400">Observing the interaction between services is increasingly important as we use more and more microservices and refine our service oriented architecture. For example, having a high level view of impacted services during a production incident will expedite service recovery and get our users back to doing what they love.</span></li> </ul> <p><span style="font-weight: 400">While we currently have metrics, monitoring, and logging in place with AWS CloudWatch and Splunk, Akita is able to provide us the information we need in a structured, per-endpoint way, making it easier for our developers to understand what’s going on and focus their attention on what matters. Stay tuned for updates!</span></p> <h2><span style="font-weight: 400">Thoughts on tools for legacy systems in general</span></h2> <p><span style="font-weight: 400">I see our partnership with Akita as a key part of the beginning of our effort to innovate how to move fast with a legacy system. This problem is not unique to us: Facebook has built multiple type systems for multiple different dynamically typed languages to deal with it! But the fact that we can’t spin off dedicated teams to write compilers for PHP places its own set of constraints. And there are many companies that are in a similar boat: small or medium sized engineering teams of passionate, driven, smart people working on products they love and want you, their customer, to love, too.</span></p> <p><span style="font-weight: 400">I love working on these sorts of problems because they are among the hardest to solve. It takes a lot more than finding a new database or coming up with a faster algorithm; working with large legacy codebases presents challenges that seem intractable. In my experience, you need the right balance of organization, process, tooling, and grit. </span></p> <p><span style="font-weight: 400">Successful companies eventually reach the point where addressing these things is critical and necessary or delivering value slows to a crawl. I’ve found Flickr to be a unique combination of legacy systems, wonderful engineering heritage, and forward-looking, motivated people. If you work somewhere that would benefit from improved production and development observability, you should </span><a href="https://www.akitasoftware.com/"><span style="font-weight: 400">check out what Akita is up to</span></a><span style="font-weight: 400">. And if you’re interested in working with us here, check out the </span><a href="https://flickr.com/jobs"><span style="font-weight: 400">Flickr jobs page</span></a><span style="font-weight: 400">!</span></p> <p> </p> <p><em>Many thanks to Jean Yang, Mark Gritter, and the Akita team for their assistance with this post and our integration with their marvelous new product!</em></p> </div><!-- .entry-content --> <footer class="entry-meta"> <span class="cat-links"> <span class="entry-utility-prep entry-utility-prep-cat-links">Posted in</span> <a href="https://code.flickr.net/category/uncategorized/" rel="category tag">Uncategorized</a> </span> </footer><!-- .entry-meta --> </article><!-- #post-3617 --> <article id="post-3599" class="post-3599 post type-post status-publish format-standard hentry category-uncategorized"> <header class="entry-header"> <h1 class="entry-title"><a href="https://code.flickr.net/2021/11/22/flickr-engineering-team-vision-guiding-principles/" rel="bookmark">Flickr Engineering Team Vision & Guiding Principles</a></h1> <div class="entry-meta"> <span class="sep">Posted on </span><a href="https://code.flickr.net/2021/11/22/flickr-engineering-team-vision-guiding-principles/" title="4:00 pm" rel="bookmark"><time class="entry-date" datetime="2021-11-22T16:00:45-08:00">November 22, 2021</time></a><span class="by-author"> <span class="sep"> by </span> <span class="author vcard"><a class="url fn n" href="https://code.flickr.net/author/flickrengineering8/" title="View all posts by Alex Seville" rel="author">Alex Seville</a></span></span> </div><!-- .entry-meta --> </header><!-- .entry-header --> <div class="entry-content"> <p><span style="font-weight: 400;">There’s a rich history of engineering innovation and excellence at Flickr. The team has been involved in the development of specs and open standards, been an early adopter of technologies like NodeJS, and successfully migrated from Yahoo data centers to AWS in less than a year! </span></p> <p><span style="font-weight: 400;">Through all the years, there has been a sense of vision and principles on the team, but nothing formally documented. We were inspired by </span><a href="https://github.com/artsy/README/blob/master/culture/engineering-principles.md"><span style="font-weight: 400;">Artsy</span></a><span style="font-weight: 400;"> and </span><a href="https://www.amazon.jobs/en/landing_pages/pe-community-tenets"><span style="font-weight: 400;">Amazon</span></a><span style="font-weight: 400;"> to create a team vision and guiding principles, and share those with the team, job candidates, and the public.</span></p> <p><span style="font-weight: 400;">We hope that this document evolves with the team, and look forward to discussing it with future coworkers!</span></p> <p> </p> <h2><strong>Flickr Engineering Team Vision</strong></h2> <p><em><span style="font-weight: 400;">Flickr Engineering exists to design, build, and maintain software that enables the global community of photography enthusiasts to find inspiration, connect, and share. We succeed by building a culture of innovation, being generous with providing and soliciting feedback, embracing and sharing our strengths, and delivering consistently, reliably, and predictably.</span></em></p> <p> </p> <h2><strong>Flickr Engineering Guiding Principles</strong><b></b></h2> <h3><b>1. Psychological Safety</b><b></b></h3> <p><span style="font-weight: 400;">You and your coworkers are the most important element of the engineering organization. To learn, grow, and be productive as an engineer, you must feel safe at work. Everyone at Flickr Engineering, especially those in leadership positions, are responsible for fostering a psychologically safe work environment.</span></p> <p><span style="font-weight: 400;">Ways to do that will include:</span></p> <ul> <li style="font-weight: 400;" aria-level="1"><span style="font-weight: 400;">Admitting and discussing mistakes</span></li> <li style="font-weight: 400;" aria-level="1"><span style="font-weight: 400;">Framing work as a learning experience</span></li> <li style="font-weight: 400;" aria-level="1"><span style="font-weight: 400;">Ensuring communication and teamwork is inclusive and respectful</span></li> <li style="font-weight: 400;" aria-level="1"><span style="font-weight: 400;">Growing a team comprised of individuals across various diverse backgrounds</span></li> <li style="font-weight: 400;" aria-level="1"><span style="font-weight: 400;">Engaging in continuous feedback and praise to coworkers</span></li> <li style="font-weight: 400;" aria-level="1"><span style="font-weight: 400;">Modeling open and respectful communication</span></li> <li style="font-weight: 400;" aria-level="1"><span style="font-weight: 400;">Sharing knowledge and opportunities to help each other level up</span></li> </ul> <p><span style="font-weight: 400;">Further Reading:</span></p> <ul> <li style="list-style-type: none;"> <ul> <li style="font-weight: 400;" aria-level="2"><a href="https://compassionatecoding.com/blog/2016/8/15/what-does-compassion-have-to-do-with-coding"><span style="font-weight: 400;">What Does Compassion have to do With Coding</span></a></li> <li style="font-weight: 400;" aria-level="2"><a href="https://medium.com/artsy-blog/what-it-feels-like-to-work-in-a-supportive-environment-for-female-engineers-3c994a001007"><span style="font-weight: 400;">What it Feels Like to Work in a Supportive Environment for Female Engineers</span></a></li> <li style="font-weight: 400;" aria-level="2"><a href="https://ashfurrow.com/blog/building-better-software-by-building-better-teams/"><span style="font-weight: 400;">Building Better Software by Building Better Teams</span></a></li> <li style="font-weight: 400;" aria-level="2"><a href="https://hbr.org/2017/08/high-performing-teams-need-psychological-safety-heres-how-to-create-it"><span style="font-weight: 400;">High-Performing Teams Need Psychological Safety. Here’s How to Create It</span></a></li> <li style="font-weight: 400;" aria-level="2"><a href="https://hackernoon.com/psychological-safety-risk-tolerance-and-high-functioning-software-teams-75701ed23f68"><span style="font-weight: 400;">Psychological Safety, Risk Tolerance and High Functioning Software Teams</span></a></li> </ul> </li> </ul> <p> </p> <p> </p> <h3><b>2. Incremental Revolution</b><b></b></h3> <p><span style="font-weight: 400;">Introduce new technologies slowly and incrementally. Avoid re-writes. Build tools to allow hybrids of different types of technology when possible. Sometimes you need to make a big leap, but aim to approach them incrementally.</span></p> <p><span style="font-weight: 400;">Explore bleeding-edge technologies on projects with an end-date that can become safely classed “done.” These can be used to inform decisions on long-running projects. Run spike projects when trying to settle between technology trade-offs.</span></p> <p><span style="font-weight: 400;">Examples include:</span><span style="font-weight: 400;"><br /> </span></p> <ul> <li style="font-weight: 400;" aria-level="1"><span style="font-weight: 400;">Developing or adapting code to macro-services </span></li> <li style="font-weight: 400;" aria-level="1"><span style="font-weight: 400;">Avoid creating more stacks to support, by not anticipating the scale of the work involved</span></li> </ul> <p> </p> <p> </p> <h3><b>3. Own Your Dependencies</b><b></b></h3> <p><span style="font-weight: 400;">Take the dependencies which fit your problem and make them better. If there’s no perfect match, take a 90% fit and contribute back to get it to 100%.</span></p> <p><span style="font-weight: 400;">We use dependencies to save re-inventing, but it doesn’t mean our responsibility stops at installing it. Security patches, updates, roadmap changes are all vital to be aware of and tracked.</span></p> <p><span style="font-weight: 400;">Our goal will be to feel like we can influence the design and execution of all the components in our apps. Aim to be a trusted contributor to the communities surrounding your work, communicate clearly and publicly, and be empathetic to the priorities of others.</span></p> <p><span style="font-weight: 400;">Examples:</span></p> <ul> <li style="font-weight: 400;" aria-level="1"><span style="font-weight: 400;">Node modules for NodeJS projects</span></li> </ul> <p> </p> <p> </p> <h3><b>4. Done Means Done</b><b></b></h3> <p><span style="font-weight: 400;">Being responsible for your code extends beyond delivery date. Done being done means feeling confident that you’ve protected your changes with tests, ensured deployment works, and feel confident in your tools for measuring.</span></p> <p><span style="font-weight: 400;">When something is done, it doesn’t mean that you’ll never need to go back to it, but that going back to it is a new project. It’s done.</span></p> <p> </p> <p> </p> <h3><b>5. Build for 10x</b><b></b></h3> <p><span style="font-weight: 400;">Technology choices should strive to be optimal while avoiding over-engineering. When designing systems or evaluating scalability and performance, we aim for today’s decisions to withstand 10x the traffic, data, or scale. Flickr is big and we can’t always anticipate the way a feature of a system will be used, especially as things evolve, but scale has always increased. This realistic horizon helps us balance the need to move quickly with the sometimes-competing need to invest in infrastructure and architecture. It also recognizes that solutions are expected to evolve and be replaced.</span></p> <ul> <li style="font-weight: 400;" aria-level="1"><span style="font-weight: 400;">Further Reading</span> <ul> <li style="font-weight: 400;" aria-level="2"><a href="https://lethain.com/migrations/"><span style="font-weight: 400;">Migrations: the sole scalable fix to tech debt</span></a></li> <li style="font-weight: 400;" aria-level="2"><a href="https://lethain.com/productivity-in-the-age-of-hypergrowth/"><span style="font-weight: 400;">Productivity in the age of hypergrowth.</span></a></li> </ul> </li> </ul> <p> </p> <p> </p> <h3><b>6. Appreciate What Came Before</b><b></b></h3> <p><span style="font-weight: 400;">We respect our predecessors and the decisions they made. We can’t always know the context, constraints, or reasons for a decision, so we’ll give them the benefit of the doubt.</span></p> <p><span style="font-weight: 400;">We appreciate the value of working systems and the lessons they embody. We understand that many problems are not essentially new.</span></p> <p><span style="font-weight: 400;">We learn together from mistakes, and appreciate it as an experience that helps us grow.</span></p> <p> </p> <p> </p> </div><!-- .entry-content --> <footer class="entry-meta"> <span class="cat-links"> <span class="entry-utility-prep entry-utility-prep-cat-links">Posted in</span> <a href="https://code.flickr.net/category/uncategorized/" rel="category tag">Uncategorized</a> </span> </footer><!-- .entry-meta --> </article><!-- #post-3599 --> <article id="post-3590" class="post-3590 post type-post status-publish format-standard hentry category-uncategorized"> <header class="entry-header"> <h1 class="entry-title"><a href="https://code.flickr.net/2018/04/20/together/" rel="bookmark">Together</a></h1> <div class="entry-meta"> <span class="sep">Posted on </span><a href="https://code.flickr.net/2018/04/20/together/" title="3:15 pm" rel="bookmark"><time class="entry-date" datetime="2018-04-20T15:15:17-07:00">April 20, 2018</time></a><span class="by-author"> <span class="sep"> by </span> <span class="author vcard"><a class="url fn n" href="https://code.flickr.net/author/almonroth/" title="View all posts by Matthew Roth" rel="author">Matthew Roth</a></span></span> </div><!-- .entry-meta --> </header><!-- .entry-header --> <div class="entry-content"> <p>Flickr is excited to be <a href="https://www.smugmug.com/together">joining SmugMug</a>!</p> <p>We’re looking forward to some interesting and challenging engineering projects in the next year, and would love to have more great people join the team!</p> <p>We want to talk to people who are interested in working on an inclusive, diverse team, building large-scale systems that are backing a much-loved product.</p> <p>You can learn more about open positions at: <a href="http://jobs.smugmug.com/">http://jobs.smugmug.com/</a></p> <p>Read our <a href="https://blog.flickr.net/en/2018/04/20/together-smugmug-flickr/">announcement blog post</a> and <a href="https://blog.flickr.net/en/2018/04/20/together-smugmug-flickr-faq/">our extended Q&A</a> for more details.</p> <p>~The Flickr Team</p> </div><!-- .entry-content --> <footer class="entry-meta"> <span class="cat-links"> <span class="entry-utility-prep entry-utility-prep-cat-links">Posted in</span> <a href="https://code.flickr.net/category/uncategorized/" rel="category tag">Uncategorized</a> </span> </footer><!-- .entry-meta --> </article><!-- #post-3590 --> <article id="post-3571" class="post-3571 post type-post status-publish format-standard hentry category-uncategorized tag-machine-learning tag-machine-tags tag-neural-network tag-similarity-search tag-visual-similarity"> <header class="entry-header"> <h1 class="entry-title"><a href="https://code.flickr.net/2017/03/07/introducing-similarity-search-at-flickr/" rel="bookmark">Introducing Similarity Search at Flickr</a></h1> <div class="entry-meta"> <span class="sep">Posted on </span><a href="https://code.flickr.net/2017/03/07/introducing-similarity-search-at-flickr/" title="6:04 pm" rel="bookmark"><time class="entry-date" datetime="2017-03-07T18:04:36-08:00">March 7, 2017</time></a><span class="by-author"> <span class="sep"> by </span> <span class="author vcard"><a class="url fn n" href="https://code.flickr.net/author/claytonhoo/" title="View all posts by Clayton Mellina" rel="author">Clayton Mellina</a></span></span> </div><!-- .entry-meta --> </header><!-- .entry-header --> <div class="entry-content"> <p><span style="font-weight:400;">At Flickr, we understand that the value in our image corpus is only unlocked when our members can find photos and photographers that inspire them, so we strive to enable the discovery and appreciation of new photos.</span></p> <p><span style="font-weight:400;">To further that effort, today we are introducing </span><b>similarity search</b><span style="font-weight:400;"> on Flickr. If you hover over a photo on a search result page, you will reveal a “…” button that exposes a menu that gives you the option to search for photos similar to the photo you are currently viewing.</span></p> <p><span style="font-weight:400;">In many ways, photo search is very different from traditional web or text search. First, the goal of web search is usually to satisfy a particular information need, while with photo search the goal is often one of </span><i><span style="font-weight:400;">discovery</span></i><span style="font-weight:400;">; as such, it should be delightful as well as functional. We have taken this to heart throughout Flickr. For instance, our color search feature, which allows filtering by color scheme, and our style filters, which allow filtering by styles such as “minimalist” or “patterns,” encourage exploration. Second, in traditional web search, the goal is usually to match documents to a set of keywords in the query. That is, the query is in the same modality—text—as the documents being searched. Photo search usually matches </span><i><span style="font-weight:400;">across</span></i><span style="font-weight:400;"> modalities: text to image. Text querying is a necessary feature of a photo search engine, but, as the saying goes, a picture is worth a thousand words. And beyond saving people the effort of so much typing, many visual concepts genuinely defy accurate description. Now, we’re giving our community a way to easily explore those visual concepts with the “…” button, a feature we call the </span><b>similarity pivot</b><span style="font-weight:400;">.</span></p> <p><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3572" src="https://wp.flickr.net/wp-content/uploads/sites/3/2017/03/demo.gif?w=800" alt="" width="800" height="412" /></p> <p><span style="font-weight:400;">The similarity pivot is a significant addition to the Flickr experience because it offers our community an entirely new way to explore and discover the billions of incredible photos and millions of incredible photographers on Flickr. It allows people to look for </span><a href="https://www.flickr.com/search/?similarity_id=29327172003"><span style="font-weight:400;">images of a particular style</span></a><span style="font-weight:400;">, it gives people a view into </span><a href="https://www.flickr.com/search/?similarity_id=5742058855"><span style="font-weight:400;">universal behaviors</span></a><span style="font-weight:400;">, and even when it “messes up,” it can force people to look at the </span><a href="https://www.flickr.com/search/?similarity_id=14198128453"><span style="font-weight:400;">unexpected</span></a> <a href="https://www.flickr.com/search/?similarity_id=28863966765"><span style="font-weight:400;">commonalities</span></a><span style="font-weight:400;"> and </span><a href="https://www.flickr.com/search/?similarity_id=8002923505"><span style="font-weight:400;">oddities</span></a><span style="font-weight:400;"> of our visual world with a </span><a href="https://www.flickr.com/search/?similarity_id=13759436465"><span style="font-weight:400;">fresh</span></a> <a href="https://www.flickr.com/search/?similarity_id=15346205045"><span style="font-weight:400;">perspective</span></a><span style="font-weight:400;">.</span></p> <h2><span style="font-weight:400;">What is “similarity”?</span></h2> <p><span style="font-weight:400;">To understand how an experience like this is powered, we first need to understand what we mean by “similarity.” There are many ways photos can be similar to one another. Consider some examples.</span></p> <p><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3573" src="https://wp.flickr.net/wp-content/uploads/sites/3/2017/03/color_sim.png?w=800" alt="" width="800" height="343" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2017/03/color_sim.png 1033w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/color_sim.png?resize=150,64 150w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/color_sim.png?resize=800,343 800w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/color_sim.png?resize=768,329 768w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/color_sim.png?resize=1024,439 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/color_sim.png?resize=500,214 500w" sizes="(max-width: 800px) 100vw, 800px" /></p> <p><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3576" src="https://wp.flickr.net/wp-content/uploads/sites/3/2017/03/texture_sim.png?w=800" alt="" width="800" height="343" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2017/03/texture_sim.png 1030w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/texture_sim.png?resize=150,64 150w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/texture_sim.png?resize=800,343 800w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/texture_sim.png?resize=768,330 768w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/texture_sim.png?resize=1024,439 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/texture_sim.png?resize=500,215 500w" sizes="(max-width: 800px) 100vw, 800px" /></p> <p><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3575" src="https://wp.flickr.net/wp-content/uploads/sites/3/2017/03/semantic_sim.png?w=800" alt="" width="800" height="344" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2017/03/semantic_sim.png 1029w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/semantic_sim.png?resize=150,65 150w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/semantic_sim.png?resize=800,344 800w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/semantic_sim.png?resize=768,331 768w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/semantic_sim.png?resize=1024,441 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/semantic_sim.png?resize=500,215 500w" sizes="(max-width: 800px) 100vw, 800px" /></p> <p><span style="font-weight:400;">It is apparent that all of these groups of photos illustrate some notion of “similarity,” but each is different. Roughly, they are: similarity of color, similarity of texture, and similarity of semantic category. And there are many others that you might imagine as well.</span></p> <p>What notion of similarity is best suited for a site like Flickr? Ideally, we’d like to be able to capture multiple types of similarity, but we decided early on that semantic similarity—similarity based on the semantic content of the photos—was vital to facilitate discovery on Flickr. This requires a deep understanding of image content for which we employ deep neural networks.</p> <p>We have been using deep neural networks at Flickr for a while for various tasks such as object recognition, NSFW prediction, and even prediction of aesthetic quality. For these tasks, we train a neural network to map the raw pixels of a photo into a set of relevant tags, as illustrated below.</p> <p><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3578" src="https://wp.flickr.net/wp-content/uploads/sites/3/2017/03/nn_tag.png?w=800" alt="" width="800" height="230" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_tag.png 1408w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_tag.png?resize=150,43 150w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_tag.png?resize=800,230 800w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_tag.png?resize=768,220 768w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_tag.png?resize=1024,294 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_tag.png?resize=1000,288 1000w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_tag.png?resize=500,143 500w" sizes="(max-width: 800px) 100vw, 800px" /></p> <p><span style="font-weight:400;">Internally, the neural network accomplishes this mapping incrementally by applying a series of transformations to the image, which can be thought of as a vector of numbers corresponding to the pixel intensities. Each transformation in the series produces another vector, which is in turn the input to the next transformation, until finally we have a vector that we specifically constrain to be a list of probabilities for each class we are trying to recognize in the image. To be able to go from raw pixels to a semantic label like “hot air balloon,” the network discards lots of information about the image, including information about appearance, such as the color of the balloon, its relative position in the sky, etc. Instead, we can extract an internal vector in the network before the final output.</span></p> <p><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3579" src="https://wp.flickr.net/wp-content/uploads/sites/3/2017/03/nn_feature.png?w=800" alt="" width="800" height="458" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_feature.png 1244w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_feature.png?resize=150,86 150w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_feature.png?resize=800,458 800w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_feature.png?resize=768,440 768w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_feature.png?resize=1024,586 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/nn_feature.png?resize=500,286 500w" sizes="(max-width: 800px) 100vw, 800px" /></p> <p><span style="font-weight:400;">For common neural network architectures, this vector—which we call a “feature vector”—has many hundreds or thousands of dimensions. We can’t necessarily say with certainty that any one of these dimensions means something in particular as we could at the final network output, whose dimensions correspond to tag probabilities. But these vectors have an important property: when you compute the </span><a href="https://en.wikipedia.org/wiki/Euclidean_distance"><span style="font-weight:400;">Euclidean distance</span></a><span style="font-weight:400;"> between these vectors, images containing similar content will tend to have feature vectors closer together than images containing dissimilar content. You can think of this as a way that the network has learned to organize information present in the image so that it can output the required class prediction. This is exactly what we are looking for: Euclidian distance in this high-dimensional feature space is a measure of semantic similarity. The graphic below illustrates this idea: points in the neighborhood around the query image are semantically similar to the query image, whereas points in neighborhoods further away are not.</span></p> <p><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3580" src="https://wp.flickr.net/wp-content/uploads/sites/3/2017/03/retrieval.png?w=800" alt="" width="800" height="366" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2017/03/retrieval.png 1686w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/retrieval.png?resize=150,69 150w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/retrieval.png?resize=800,366 800w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/retrieval.png?resize=768,352 768w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/retrieval.png?resize=1024,469 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/retrieval.png?resize=1536,703 1536w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/retrieval.png?resize=500,229 500w" sizes="(max-width: 800px) 100vw, 800px" /></p> <p><span style="font-weight:400;">This measure of similarity is not perfect and cannot capture all possible notions of similarity—it will be constrained by the particular task the network was trained to perform, i.e., scene recognition. However, it is effective for our purposes, and, importantly, it contains information beyond merely the semantic content of the image, such as appearance, composition, and texture. Most importantly, it gives us a simple algorithm for finding visually similar photos: compute the distance in the feature space of a query image to each index image and return the images with lowest distance. Of course, there is much more work to do to make this idea work for billions of images.</span></p> <h2><span style="font-weight:400;">Large-scale approximate nearest neighbor search</span></h2> <p><span style="font-weight:400;">With an index as large as Flickr’s, computing distances exhaustively for each query is intractable. Additionally, storing a high-dimensional floating point feature vector for each of billions of images takes a large amount of disk space and poses even more difficulty if these features need to be in memory for fast ranking. To solve these two issues, we adopt a state-of-the-art approximate nearest neighbor algorithm called </span><a href="http://image.ntua.gr/iva/files/lopq.pdf"><span style="font-weight:400;">Locally Optimized Product Quantization</span></a><span style="font-weight:400;"> (LOPQ).</span></p> <p><span style="font-weight:400;">To understand LOPQ, it is useful to first look at a simple strategy. Rather than ranking all vectors in the index, we can first filter a set of good candidates and only do expensive distance computations on them. For example, we can use an algorithm like </span><a href="https://en.wikipedia.org/wiki/K-means_clustering"><i><span style="font-weight:400;">k</span></i><span style="font-weight:400;">-means</span></a><span style="font-weight:400;"> to cluster our index vectors, find the cluster to which each vector is assigned, and index the corresponding cluster id for each vector. At query time, we find the cluster that the query vector is assigned to and fetch the items that belong to the same cluster from the index. We can even expand this set if we like by fetching items from the next nearest cluster.</span></p> <p><span style="font-weight:400;">This idea will take us far, but not far enough for a billions-scale index. For example, with 1 billion photos, we need 1 million clusters so that each cluster contains an average of 1000 photos. At query time, we will have to compute the distance from the query to each of these 1 million cluster centroids in order to find the nearest clusters. This is quite a lot. We can do better, however, if we instead split our vectors in half by dimension and cluster each half separately. In this scheme, each vector will be assigned to a pair of cluster ids, one for each half of the vector. If we choose k = 1000 to cluster both halves, we have k<sup>2</sup>= 1000 * 1000 = 1e6 possible pairs. In other words, by clustering each half separately and assigning each item a pair of cluster ids, we can get the same granularity of partitioning (1 million clusters total) with only 2 * 1000 distance computations with half the number of dimensions for a total computational savings of 1000x. Conversely, for the same computational cost, we gain a factor of k more partitions of the data space, providing a much finer-grained index.</span></p> <p><span style="font-weight:400;">This idea of splitting vectors into subvectors and clustering each split separately is called </span><a href="https://lear.inrialpes.fr/pubs/2011/JDS11/jegou_searching_with_quantization.pdf"><i><span style="font-weight:400;">product quantization</span></i></a><span style="font-weight:400;">. When we use this idea to index a dataset it is called the </span><a href="http://cache-ash04.cdn.yandex.net/download.yandex.ru/company/cvpr2012.pdf"><i><span style="font-weight:400;">inverted multi-index</span></i></a><span style="font-weight:400;">, and it forms the basis for fast candidate retrieval in our similarity index. Typically the distribution of points over the clusters in a multi-index will be unbalanced as compared to a standard k-means index, but this unbalance is a fair trade for the much higher resolution partitioning that it buys us. In fact, a multi-index will only be balanced across clusters if the two halves of the vectors are perfectly statistically independent. This is not the case in most real world data, but some heuristic preprocessing—like </span><a href="https://en.wikipedia.org/wiki/Principal_component_analysis"><span style="font-weight:400;">PCA-ing</span></a><span style="font-weight:400;"> and permuting the dimensions so that the cumulative per-dimension variance is approximately balanced between the halves—helps in many cases. And just like the simple k-means index, there is a fast algorithm for finding a ranked list of clusters to a query if we need to expand the candidate set.</span></p> <p><span style="font-weight:400;">After we have a set of candidates, we must rank them. We could store the full vector in the index and use it to compute the distance for each candidate item, but this would incur a large memory overhead (for example, 256 dimensional vectors of 4 byte floats would require 1Tb for 1 billion photos) as well as a computational overhead. LOPQ solves these issues by performing another product quantization, this time on the </span><i><span style="font-weight:400;">residuals</span></i><span style="font-weight:400;"> of the data. The residual of a point is the difference vector between the point and its closest cluster centroid. Given a residual vector and the cluster indexes along with the corresponding centroids, we have enough information to reproduce the original vector exactly. Instead of storing the residuals, LOPQ product quantizes the residuals, usually with a higher number of splits, and stores only the cluster indexes in the index. For example, if we split the vector into 8 splits and each split is clustered with 256 centroids, we can store the compressed vector with only 8 bytes regardless of the number of dimensions to start (though certainly a higher number of dimensions will result in higher approximation error). With this </span><a href="https://en.wikipedia.org/wiki/Lossy_compression"><span style="font-weight:400;">lossy representation</span></a><span style="font-weight:400;"> we can produce a reconstruction of a vector from the 8 byte codes: we simply take each quantization code, look up the corresponding centroid, and concatenate these 8 centroids together to produce a reconstruction. Likewise, we can approximate the distance from the query to an index vector by computing the distance between the query and the reconstruction. We can do this computation quickly for many candidate points by computing the squared difference of each split of the query to all of the centroids for that split. After computing this table, we can compute the squared difference for an index point by looking up the precomputed squared difference for each of the 8 indexes and summing them together to get the total squared difference. This caching trick allows us to quickly rank many candidates without resorting to distance computations in the original vector space.</span></p> <p>LOPQ adds one final detail: for each cluster in the multi-index, LOPQ fits a local rotation to the residuals of the points that fall in that cluster. This rotation is simply a PCA that aligns the major directions of variation in the data to the axes followed by a permutation to heuristically balance the variance across the splits of the product quantization. Note that this is the exact preprocessing step that is usually performed at the top-level multi-index. It tends to make the approximate distance computations more accurate by mitigating errors introduced by assuming that each split of the vector in the production quantization is statistically independent from other splits. Additionally, since a rotation is fit for each cluster, they serve to fit the local data distribution better.</p> <p>Below is a diagram from the LOPQ paper that illustrates the core ideas of LOPQ. K-means (a) is very effective at allocating cluster centroids, illustrated as red points, that target the distribution of the data, but it has other drawbacks at scale as discussed earlier. In the 2d example shown, we can imagine product quantizing the space with 2 splits, each with 1 dimension. Product Quantization (b) clusters each dimension independently and cluster centroids are specified by pairs of cluster indexes, one for each split. This is effectively a grid over the space. Since the splits are treated as if they were statistically independent, we will, unfortunately, get many clusters that are “wasted” by not targeting the data distribution. We can improve on this situation by rotating the data such that the main dimensions of variation are axis-aligned. This version, called Optimized Product Quantization (c), does a better job of making sure each centroid is useful. LOPQ (d) extends this idea by first coarsely clustering the data and then doing a separate instance of OPQ for each cluster, allowing highly targeted centroids while still reaping the benefits of product quantization in terms of scalability.</p> <p><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3581" src="https://wp.flickr.net/wp-content/uploads/sites/3/2017/03/lopq.png?w=800" alt="" width="800" height="689" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2017/03/lopq.png 1022w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/lopq.png?resize=150,129 150w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/lopq.png?resize=800,689 800w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/lopq.png?resize=768,661 768w, https://code.flickr.net/wp-content/uploads/sites/3/2017/03/lopq.png?resize=348,300 348w" sizes="(max-width: 800px) 100vw, 800px" /></p> <p><span style="font-weight:400;">LOPQ is state-of-the-art for quantization methods, and you can find more information about the algorithm, as well as benchmarks, </span><a href="http://image.ntua.gr/iva/research/lopq/"><span style="font-weight:400;">here</span></a><span style="font-weight:400;">. Additionally, we provide an </span><a href="https://github.com/yahoo/lopq"><span style="font-weight:400;">open-source implementation</span></a><span style="font-weight:400;"> in Python and Spark which you can apply to your own datasets. The algorithm produces a set of cluster indexes that can be queried efficiently in an inverted index, as described. We have also explored use cases that use these indexes as a hash for fast deduplication of images and large-scale clustering. These extended use cases are studied </span><a href="https://arxiv.org/abs/1604.06480"><span style="font-weight:400;">here</span></a><span style="font-weight:400;">.</span></p> <h2><span style="font-weight:400;">Conclusion</span></h2> <p><span style="font-weight:400;">We have described our system for large-scale visual similarity search at Flickr. Techniques for producing high-quality vector representations for images with deep learning are constantly improving, enabling new ways to search and explore large multimedia collections. These techniques are being applied in other domains as well to, for example, produce vector representations for </span><a href="https://en.wikipedia.org/wiki/Word2vec"><span style="font-weight:400;">text</span></a><span style="font-weight:400;">, </span><a href="https://arxiv.org/pdf/1502.04681.pdf"><span style="font-weight:400;">video</span></a><span style="font-weight:400;">, and even </span><a href="https://arxiv.org/pdf/1603.00856.pdf"><span style="font-weight:400;">molecules</span></a><span style="font-weight:400;">. Large-scale approximate nearest neighbor search has importance and potential application in these domains as well as many others. Though these techniques are in their infancy, we hope similarity search provides a useful new way to appreciate the amazing collection of images at Flickr and surface photos of interest that may have previously gone undiscovered. We are excited about the future of this technology at Flickr and beyond.</span></p> <p><b>Acknowledgements</b></p> <p><span style="font-weight:400;">Yannis Kalantidis, Huy Nguyen, Stacey Svetlichnaya, Arel Cordero. Special thanks to the rest of the Computer Vision and Machine Learning team and the Vespa search team who manages Yahoo’s internal search engine.</span></p> </div><!-- .entry-content --> <footer class="entry-meta"> <span class="cat-links"> <span class="entry-utility-prep entry-utility-prep-cat-links">Posted in</span> <a href="https://code.flickr.net/category/uncategorized/" rel="category tag">Uncategorized</a> </span> <span class="sep"> | </span> <span class="tag-links"> <span class="entry-utility-prep entry-utility-prep-tag-links">Tagged</span> <a href="https://code.flickr.net/tag/machine-learning/" rel="tag">machine learning</a>, <a href="https://code.flickr.net/tag/machine-tags/" rel="tag">machine tags</a>, <a href="https://code.flickr.net/tag/neural-network/" rel="tag">neural network</a>, <a href="https://code.flickr.net/tag/similarity-search/" rel="tag">similarity search</a>, <a href="https://code.flickr.net/tag/visual-similarity/" rel="tag">visual similarity</a> </span> </footer><!-- .entry-meta --> </article><!-- #post-3571 --> <article id="post-3550" class="post-3550 post type-post status-publish format-standard hentry category-uncategorized"> <header class="entry-header"> <h1 class="entry-title"><a href="https://code.flickr.net/2017/01/05/a-year-without-a-byte/" rel="bookmark">A Year Without a Byte</a></h1> <div class="entry-meta"> <span class="sep">Posted on </span><a href="https://code.flickr.net/2017/01/05/a-year-without-a-byte/" title="1:47 am" rel="bookmark"><time class="entry-date" datetime="2017-01-05T01:47:56-08:00">January 5, 2017</time></a><span class="by-author"> <span class="sep"> by </span> <span class="author vcard"><a class="url fn n" href="https://code.flickr.net/author/archieflickr/" title="View all posts by Archie Russell" rel="author">Archie Russell</a></span></span> </div><!-- .entry-meta --> </header><!-- .entry-header --> <div class="entry-content"> <p>One of the largest cost drivers in running a service like Flickr is storage. We’ve described multiple techniques to get this cost down over the years: use of <a href="https://yahooeng.tumblr.com/post/116391291701/yahoo-cloud-object-store-object-storage-at">COS</a>, <a href="http://code.flickr.net/2015/06/25/real-time-resizing-of-flickr-images-using-gpus">creating sizes dynamically</a> on GPUs and <a href="http://code.flickr.net/2015/09/25/perceptual-image-compression-at-flickr/">perceptual compression</a>. These projects have been very successful, but our storage cost is still significant.<br /> At the beginning of 2016, we challenged ourselves to go further — to go a full year without needing new storage hardware. Using multiple techniques, we got there.</p> <h2>The Cost Story</h2> <p>A little back-of-the-envelope math shows storage costs are a real concern. On a very high-traffic day, Flickr users upload as many as twenty-five million photos. These photos require an average of 3.25 megabytes of storage each, totalling over 80 terabytes of data. Stored naively in a cloud service similar to S3, this day’s worth of data would cost over $30,000 per year, and continue to incur costs every year.</p> <p>And a very large service will have over two hundred million active users. At a thousand images each, storage in a service similar to S3 would cost over $250 million per year (or $1.25 / user-year) plus network and other expenses. This compounds as new users sign up and existing users continue to take photos at an accelerating rate. Thankfully, our costs, and <em>every</em> large service’s costs, are different than storing naively at S3, but remain significant.</p> <p class="figure"><img decoding="async" loading="lazy" src="https://c1.staticflickr.com/1/445/32109964665_f465976673_o.png" alt="" width="800" height="600" /><br /> <span class="caption"><br /> Cost per byte have decreased, but bytes per image from iPhone-type platforms have increased. Cost per image hasn’t changed significantly.<br /> </span></p> <p>Storage costs <em>do</em> drop over time. For example, S3 costs dropped from $0.15 per gigabyte month in 2009 to $0.03 per gigabyte-month in 2014, and cloud storage vendors have added low-cost options for data that is infrequently accessed. NAS vendors have also delivered large price reductions.</p> <p>Unfortunately, these lower costs <em>per byte</em> are counteracted by other forces. On iPhones, increasing camera resolution, burst mode and the addition of short animations (Live Photos) have increased bytes-per-image rapidly enough to keep storage cost <em>per image</em> roughly constant. And iPhone images are far from the largest.</p> <p>In response to these costs, photo storage services have pursued a variety of product options. To name a few: storing lower quality images or re-compressing, charging users for their data usage, incorporating advertising, selling associated products such as prints, and tying storage to purchases of handsets.</p> <p>There are also a number of engineering approaches to controlling storage costs. We sketched out a few and cover three that we implemented below: adjusting thresholds on our storage systems, rolling out existing savings approaches to more images, and deploying lossless JPG compression.</p> <h2>Adjusting Storage Thresholds</h2> <p>As we dug into the problem, we looked at our storage systems in detail. We discovered that our settings were based on assumptions about high write and delete loads that didn’t hold. Our storage is pretty static. Users only rarely delete or change images once uploaded. We also had two distinct areas of just-in-case space. 5% of our storage was reserved space for snapshots, useful for undoing accidental deletes or writes, and 8.5% was held free in reserve. This resulted in about 13% of our storage going unused. Trade lore states that disks should remain 10% free to avoid performance degradation, but we found 5% to be sufficient for our workload. So we combined our our two just-in-case areas into one and reduced our free space threshold to that level. This was our simplest approach to the problem (by far), but it resulted in a large gain. With a couple simple configuration changes, we freed up more than 8% of our storage.</p> <p class="figure"><img decoding="async" loading="lazy" src="https://c1.staticflickr.com/1/267/31961928142_a5f68d8501_o.png" alt="" width="800" height="600" /><br /> <span class="caption"><br /> Adjusting storage thresholds<br /> </span></p> <h2>Extending Existing Approaches</h2> <p>In our earlier posts, we have described dynamic generation of thumbnail sizes and perceptual compression. Combining the two approaches decreased thumbnail storage requirements by 65%, though we hadn’t applied these techniques to many of our images uploaded prior to 2014. One big reason for this: large-scale changes to older files are inherently risky, and require significant time and engineering work to do safely.</p> <p>Because we were concerned that further rollout of dynamic thumbnail generation would place a heavy load on our resizing infrastructure, we targeted only thumbnails from less-popular images for deletes. Using this approach, we were able to handle our complete resize load with just four GPUs. The process put a heavy load on our storage systems; to minimize the impact we randomized our operations across volumes. The entire process took about four months, resulting in even more significant gains than our storage threshold adjustments.</p> <p class="figure"><img decoding="async" loading="lazy" src="https://c1.staticflickr.com/1/315/31269155834_a74f75a611_o.png" alt="" width="800" height="600" /><br /> <span class="caption"><br /> Decreasing the number of thumbnail sizes<br /> </span></p> <h2>Lossless JPG Compression</h2> <p>Flickr has had a long-standing commitment to keeping uploaded images byte-for-byte intact. This has placed a floor on how much storage reduction we can do, but there are tools that can losslessly compress JPG images. Two well-known options are <a href="http://www.elektronik.htw-aalen.de/packjpg/">PackJPG</a> and <a href="https://blogs.dropbox.com/tech/2016/07/lepton-image-compression-saving-22-losslessly-from-images-at-15mbs">Lepton</a>, from Dropbox. These tools work by decoding the JPG, then very carefully compressing it using a more efficient approach. This typically shrinks a JPG by about 22%. At Flickr’s scale, this is significant. The downside is that these re-compressors use a lot of CPU. PackJPG compresses at about 2MB/s on a single core, or about fifteen core-years for a single petabyte worth of JPGs. Lepton uses multiple cores and, at 15MB/s, is much faster than packJPG, but uses roughly the same amount of CPU time.</p> <p>This CPU requirement also complicated on-demand serving. If we recompressed all the images on Flickr, we would need potentially thousands of cores to handle our decompress load. We considered putting some restrictions on access to compressed images, such as requiring users to login to access original images, but ultimately found that if we targeted only rarely accessed private images, decompressions would occur only infrequently. Additionally, restricting the maximum size of images we compressed limited our CPU time per decompress. We rolled this out as a component of our existing serving stack without requiring any additional CPUs, and with only minor impact to user experience.</p> <p>Running our users’ original photos through lossless compression was probably our highest-risk approach. We can recreate thumbnails easily, but a corrupted source image cannot be recovered. Key to our approach was a re-compress-decompress-verify strategy: every recompressed image was decompressed and compared to its source before removing the uncompressed source image.</p> <p>This is still a work-in-progress. We have compressed many images but to do our entire corpus is a lengthy process, and we had reached our zero-new-storage-gear goal by mid-year.</p> <h2>On The Drawing Board</h2> <p>We have several other ideas which we’ve investigated but haven’t implemented yet.</p> <p>In our current storage model, we have originals and thumbnails available for every image, each stored in two datacenters. This model assumes that the images need to be viewable relatively quickly at any point in time. But private images belonging to accounts that have been inactive for more than a few months are unlikely to be accessed. We could “freeze” these images, dropping their thumbnails and recreate them when the dormant user returns. This “thaw” process would take under thirty seconds for a typical account. Additionally, for photos that are private (but not dormant), we could go to a single uncompressed copy of each thumbnail, storing a compressed copy in a second datacenter that would be decompressed as needed.</p> <p>We might not even need two copies of each dormant original image available on disk. We’ve pencilled out a model where we place one copy on a slower, but underutilized, tape-based system while leaving the other on disk. This would decrease availability during an outage, but as these images belong to dormant users, the effect would be minimal and users would still see their thumbnails. The delicate piece here is the placement of data, as seeks on tape systems are prohibitively slow. Depending on the details of what constitutes a “dormant” photo these techniques could comfortably reduce storage used by over 25%.</p> <p>We’ve also looked into de-duplication, but we found our duplicate rate is in the 3% range. Users do have many duplicates of their <em>own</em> images on their devices, but these are excluded by our upload tools. We’ve also looked into using alternate image formats for our thumbnail storage. <a href="https://developers.google.com/speed/webp/">WebP</a> can be much more compact than ordinary JPG but our use of perceptual compression gets us close to WebP byte size and permits much faster resize. The <a href="http://bellard.org/bpg/">BPG</a> project proposes a <em>dramatically</em> smaller, H.265 based encoding but has IP and other issues.</p> <p>There are several similar optimizations available for videos. Although Flickr is primarily image-focused, videos are typically much larger than images and consume considerably more storage.</p> <h2>Conclusion</h2> <p class="figure"><img decoding="async" loading="lazy" src="https://c1.staticflickr.com/1/751/32071867876_1cd6466b9a_o.png" alt="" width="800" height="600" /><br /> <span class="caption"><br /> Optimization over several releases<br /> </span></p> <p>Since 2013 we’ve optimized our usage of storage by nearly 50%. Our latest efforts helped us get through 2016 without purchasing any additional storage, and we still have a few more options available.</p> <p>Peter Norby, Teja Komma, Shijo Joy and Bei Wu formed the core team for our zero-storage-budget project. Many others assisted the effort.</p> </div><!-- .entry-content --> <footer class="entry-meta"> <span class="cat-links"> <span class="entry-utility-prep entry-utility-prep-cat-links">Posted in</span> <a href="https://code.flickr.net/category/uncategorized/" rel="category tag">Uncategorized</a> </span> </footer><!-- .entry-meta --> </article><!-- #post-3550 --> <article id="post-3453" class="post-3453 post type-post status-publish format-standard hentry category-hadoop category-infrastructure tag-personalization"> <header class="entry-header"> <h1 class="entry-title"><a href="https://code.flickr.net/2016/09/30/personalized-group-recommendations-on-flickr/" rel="bookmark">Personalized Group Recommendations on Flickr</a></h1> <div class="entry-meta"> <span class="sep">Posted on </span><a href="https://code.flickr.net/2016/09/30/personalized-group-recommendations-on-flickr/" title="4:04 am" rel="bookmark"><time class="entry-date" datetime="2016-09-30T04:04:10-07:00">September 30, 2016</time></a><span class="by-author"> <span class="sep"> by </span> <span class="author vcard"><a class="url fn n" href="https://code.flickr.net/author/mehulpatel001/" title="View all posts by Mehul Patel" rel="author">Mehul Patel</a></span></span> </div><!-- .entry-meta --> </header><!-- .entry-header --> <div class="entry-content"> <p><span style="font-weight:400;">There are two primary paradigms for the discovery of digital content. First is the search paradigm, in which the user is actively looking for specific content using search terms and filters (e.g., Google </span><a href="https://www.google.com/?q=iceland"><span style="font-weight:400;">web search</span></a><span style="font-weight:400;">, Flickr </span><a href="https://www.flickr.com/search/?text=iceland&dimension_search_mode=min&height=1024&width=1024"><span style="font-weight:400;">image search</span></a><span style="font-weight:400;">, Yelp </span><a href="https://www.yelp.com/search?find_desc=irish+pub&find_loc=San+Francisco,+CA&start=0&attrs=RestaurantsPriceRange2.2&open_now=4277"><span style="font-weight:400;">restaurant search</span></a><span style="font-weight:400;">, etc.). Second is a passive approach, in which the user browses content presented to them (e.g., NYTimes </span><a href="http://www.nytimes.com/"><span style="font-weight:400;">news</span></a><span style="font-weight:400;">, Flickr </span><a href="https://www.flickr.com/explore"><span style="font-weight:400;">Explore</span></a><span style="font-weight:400;">, and Twitter </span><a href="https://twitter.com/trendingtopics/"><span style="font-weight:400;">trending topics</span></a><span style="font-weight:400;">). Personalization benefits both approaches by providing relevant content that is tailored to users’ tastes (e.g., Google </span><a href="https://news.google.com/"><span style="font-weight:400;">News</span></a><span style="font-weight:400;">, Netflix </span><a href="https://www.netflix.com/browse"><span style="font-weight:400;">homepage</span></a><span style="font-weight:400;">, LinkedIn </span><a href="https://www.linkedin.com"><span style="font-weight:400;">job search</span></a><span style="font-weight:400;">, etc.). We believe personalization can improve the user experience at Flickr by guiding both new as well as more experienced members as they explore photography. Today, we’re excited to bring you personalized group recommendations.</span></p> <p><span style="font-weight:400;">Flickr Groups are great for bringing people together around a common theme, be it a style of photography, camera, place, event, topic, or just some fun. Community members join for several reasons—to consume photos, to get feedback, to play games, to get more views, or to start a discussion about photos, cameras, life or the universe. We see value in connecting people with appropriate groups based on their interests. Hence, we decided to start the personalization journey by providing contextually relevant and personalized content that is tuned to each person’s unique taste. </span></p> <p><span style="font-weight:400;">Of course, in order to respect users’ privacy, group recommendations only consider public photos and public groups. Additionally, recommendations are private to the user. In other words, nobody else sees what is recommended to an individual. </span></p> <p><span style="font-weight:400;">In this post we describe how we are improving Flickr’s group recommendations. In particular, we describe how we are replacing a curated, non-personalized, static list of groups with a dynamic group recommendation engine that automatically generates new results based on user interactions to provide personalized recommendations unique to each person. The algorithms and backend systems we are building are broad and applicable to other scenarios, such as photo recommendations, contact recommendations, content discovery, etc.</span></p> <p><img decoding="async" loading="lazy" class=" size-full wp-image-3463 aligncenter" src="https://wp.flickr.net/wp-content/uploads/sites/3/2016/09/group_recommendations2.png" alt="Group_recommendations2.png" width="2188" height="1924" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2016/09/group_recommendations2.png 2188w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/group_recommendations2.png?resize=150,132 150w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/group_recommendations2.png?resize=800,703 800w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/group_recommendations2.png?resize=768,675 768w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/group_recommendations2.png?resize=1024,900 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/group_recommendations2.png?resize=1536,1351 1536w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/group_recommendations2.png?resize=2048,1801 2048w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/group_recommendations2.png?resize=341,300 341w" sizes="(max-width: 2188px) 100vw, 2188px" /></p> <p style="text-align:center;"><b>Figure</b><span style="font-weight:400;">: Personalized group recommendations</span></p> <h1>Challenges</h1> <p><span style="font-weight:400;">One challenge of recommendations is determining a user’s interests. These interests could be user-specified, explicit preferences or could be inferred implicitly from their actions, supported by user feedback. For example: </span></p> <ul> <li style="font-weight:400;"><span style="font-weight:400;">Explicit:</span> <ul> <li style="font-weight:400;"><span style="font-weight:400;">Ask users what topics interest them</span></li> <li style="font-weight:400;"><span style="font-weight:400;">Ask users why they joined a particular group</span></li> </ul> </li> <li style="font-weight:400;"><span style="font-weight:400;">Implicit:</span> <ul> <li style="font-weight:400;"><span style="font-weight:400;">Infer user tastes from groups they join, photos they like, and users they follow</span></li> <li style="font-weight:400;"><span style="font-weight:400;">Infer why users joined a particular group based on their activity, interactions, and dwell time</span></li> </ul> </li> <li style="font-weight:400;"><span style="font-weight:400;">Feedback:</span> <ul> <li style="font-weight:400;"><span style="font-weight:400;">Get feedback on recommended items when users perform actions such as “Join” or “Follow” or click “Not interested”</span></li> </ul> </li> </ul> <p><span style="font-weight:400;">Another challenge of recommendations is figuring out group characteristics. I.e.: what type of group is it? What interests does it serve? What brings Flickr members to this group? We can infer this by analyzing group members, photos posted to the group, discussions and amount of activity in the group.</span></p> <p><span style="font-weight:400;">Once we have figured out user preferences and group characteristics, recommendations essentially becomes a matchmaking process. At a high-level, we want to support 3 use cases:</span></p> <ul> <li style="font-weight:400;"><b>Use Case # 1</b><span style="font-weight:400;">: Given a group, return all groups that are “similar”</span></li> <li style="font-weight:400;"><b>Use Case # 2</b><span style="font-weight:400;">: Given a user, return a list of recommended groups</span></li> <li style="font-weight:400;"><b>Use Case # 3</b><span style="font-weight:400;">: Given a photo, return a list of groups that the photo could belong to</span></li> </ul> <h1>Collaborative Filtering</h1> <p><span style="font-weight:400;">One approach to recommender systems is presenting similar content in the current context of actions. For example, Amazon’s “Customers who bought this item also bought” or LinkedIn’s “People also viewed.” Item-based collaborative filtering can be used for computing similar items.</span></p> <p><img decoding="async" loading="lazy" class="alignnone wp-image-3457 aligncenter" src="https://wp.flickr.net/wp-content/uploads/sites/3/2016/09/collaborative_filtering.gif" alt="collaborative_filtering" width="463" height="447" /></p> <p style="text-align:center;"><b>Figure</b><span style="font-weight:400;">: Collaborative filtering in action</span></p> <p style="text-align:center;"><span style="font-weight:400;">By Moshanin (Own work) [</span><a href="http://creativecommons.org/licenses/by-sa/3.0"><span style="font-weight:400;">CC BY-SA 3.0</span></a><span style="font-weight:400;">] from </span><a href="https://upload.wikimedia.org/wikipedia/commons/5/52/Collaborative_filtering.gif"><span style="font-weight:400;">Wikipedia</span></a></p> <p><span style="font-weight:400;">Intuitively, two groups are similar if they have the same content or same set of users. We observed that users often post the same photo to multiple groups. So, to begin, we compute group similarity based on a photo’s presence in multiple groups. </span></p> <p style="text-align:left;"><span style="font-weight:400;">Consider the following sample matrix </span><span style="font-weight:400;">M</span><span style="font-weight:400;">(</span><span style="font-weight:400;">G</span><span style="font-weight:400;">i</span><span style="font-weight:400;"> -> </span><span style="font-weight:400;">P</span><span style="font-weight:400;">j</span><span style="font-weight:400;">) constructed from group photo pools, where 1 means a corresponding group (</span><span style="font-weight:400;">G</span><span style="font-weight:400;">i</span><span style="font-weight:400;">) contains an image, and empty (0) means a group does not contain the image.</span></p> <p style="text-align:left;"><img decoding="async" loading="lazy" class=" size-full wp-image-3472 aligncenter" src="https://wp.flickr.net/wp-content/uploads/sites/3/2016/09/matrix1.png" alt="matrix1" width="569" height="237" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2016/09/matrix1.png 569w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/matrix1.png?resize=150,62 150w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/matrix1.png?resize=500,208 500w" sizes="(max-width: 569px) 100vw, 569px" /></p> <p style="text-align:left;"><span style="font-weight:400;">From this, we can compute </span><span style="font-weight:400;">M.</span><span style="font-weight:400;">M’</span> <span style="font-weight:400;">(</span><span style="font-weight:400;">M</span><span style="font-weight:400;">’s </span><a href="https://en.wikipedia.org/wiki/Transpose"><span style="font-weight:400;">transpose</span></a><span style="font-weight:400;">), which gives us the number of common photos between every pair of groups (G</span><span style="font-weight:400;">i</span><span style="font-weight:400;">, G</span><span style="font-weight:400;">j</span><span style="font-weight:400;">):</span></p> <p style="text-align:left;"><img decoding="async" loading="lazy" class="size-full wp-image-3535 aligncenter" src="https://wp.flickr.net/wp-content/uploads/sites/3/2016/09/matrix21.png" alt="matrix2" width="512" height="182" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2016/09/matrix21.png 512w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/matrix21.png?resize=150,53 150w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/matrix21.png?resize=500,178 500w" sizes="(max-width: 512px) 100vw, 512px" /></p> <p><span style="font-weight:400;">We use modified </span><a href="https://en.wikipedia.org/wiki/Cosine_similarity"><span style="font-weight:400;">cosine similarity</span></a><span style="font-weight:400;"> to compute a similarity score between every pair of groups: </span></p> <p style="text-align:center;"><img decoding="async" loading="lazy" class="alignnone size-full wp-image-3536" src="https://wp.flickr.net/wp-content/uploads/sites/3/2016/09/cosinesimilarity1.png" alt="cosinesimilarity" width="337" height="59" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2016/09/cosinesimilarity1.png 337w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/cosinesimilarity1.png?resize=150,26 150w" sizes="(max-width: 337px) 100vw, 337px" /></p> <p><span style="font-weight:400;">To make this calculation robust, we only consider groups that have a minimum of X photos and keep only strong relationships (i.e., groups that have at least Y common photos). Finally, we use the similarity scores to come up with the top k-nearest neighbors for each group. </span></p> <p><span style="font-weight:400;">We also compute group similarity based on group membership —i.e., by defining group-user relationship (G</span><span style="font-weight:400;">i</span><span style="font-weight:400;"> -> U</span><span style="font-weight:400;">j</span><span style="font-weight:400;">) matrix. It is interesting to note that the results obtained from this relationship are very different compared to (G</span><span style="font-weight:400;">i</span><span style="font-weight:400;">, P</span><span style="font-weight:400;">j</span><span style="font-weight:400;">) matrix. The group-photo relationship tends to capture groups that are similar by content (e.g.,“macro photography”). On the other hand, the group-user relationship gives us groups that the same users have joined but are possibly about very different topics, thus providing us with a diversity of results. We can extend this approach by computing group similarity using other features and relationships (e.g., autotags of photos to cluster groups by themes, geotags of photos to cluster groups by place, frequency of discussion to cluster groups by interaction model, etc.).</span></p> <p><span style="font-weight:400;">Using this we can easily come up with a list of similar groups (Use Case # 1). We can either merge the results obtained by different similarity relationships into a single result set, or keep them separate to power features like “Other groups similar to this group” and “People who joined this group also joined.”</span></p> <p><span style="font-weight:400;">We can also use the same data for recommending groups to users (Use Case # 2). We can look at all the groups that the user has already joined and recommend groups similar to those. </span></p> <p><span style="font-weight:400;">To come up with a list of relevant groups for a photo (Use Case # 3), we can compute photo similarity either by using a similar approach as above or by using Flickr computer vision models for finding photos similar to the query photo. A simple approach would then be to recommend groups that these similar photos belong to.</span></p> <h1>Implementation</h1> <p><span style="font-weight:400;">Due to the massive scale (millions of users x 100k groups) of data, we used </span><a href="http://yahoohadoop.tumblr.com/"><span style="font-weight:400;">Yahoo’s Hadoop Stack</span></a><span style="font-weight:400;"> to implement the collaborative filtering algorithm. We exploited sparsity of entity-item relationship matrices to come up with a more efficient model of computation and used several optimizations for computational efficiency. We only need to compute the similarity model once every 7 days, since signals change slowly. </span></p> <p><img decoding="async" loading="lazy" class=" wp-image-3537 aligncenter" src="https://wp.flickr.net/wp-content/uploads/sites/3/2016/09/architecture_diagram1.jpg" alt="architecture_diagram" width="507" height="383" /></p> <p style="text-align:center;"><b>Figure</b><span style="font-weight:400;">: Computational architecture</span></p> <p style="text-align:center;"><span style="font-weight:400;">(All logos and icons are trademarks of respective entities)</span></p> <p> </p> <p><span style="font-weight:400;">Similarity scores and top k-nearest neighbors for each group are published to </span><a href="http://redis.io/"><span style="font-weight:400;">Redis</span></a><span style="font-weight:400;"> for quick lookups needed by the serving layer. Recommendations for each user are computed in real-time when the user visits the </span><a href="https://www.flickr.com/groups"><span style="font-weight:400;">groups</span></a><span style="font-weight:400;"> page. Implementation of the serving layer takes care of a few aspects that are important from usability and performance point-of-view:</span></p> <ul> <li style="font-weight:400;"><b>Freshness of results</b><span style="font-weight:400;">: Users hate to see the same results being offered even though they might be relevant. We have implemented a randomization scheme that returns fresh results every X hours, while making sure that results stay static over a user’s single session.</span></li> <li style="font-weight:400;"><b>Diversity of results</b><span style="font-weight:400;">: Diversity of results in recommendations is very important since a user might not want to join a group that is very similar to a group he’s already involved in. We require a good threshold that balances similarity and diversity. To improve diversity further, we combine recommendations from different algorithms. We also cluster the user’s groups into diverse sets before computing recommendations.</span></li> <li style="font-weight:400;"><b>Dynamic results</b><span style="font-weight:400;">: Users expect their interactions to have a quick effect on recommendations. We thus incorporate user interactions while making subsequent recommendations so that the system feels dynamic.</span></li> <li style="font-weight:400;"><b>Performance</b><span style="font-weight:400;">: Recommendation results are cached so that API response is quick on subsequent visits. </span></li> </ul> <h1>Cold Start</h1> <p><span style="font-weight:400;">The drawback to collaborative filtering is that it cannot offer recommendations to new users who do not have any associations. For these users, we plan to recommend groups from an algorithmically computed list of top/trending groups alongside manual curation. As users interact with the system by joining groups, the recommendations become more personalized.</span></p> <h1>Measuring Effectiveness</h1> <p><span style="font-weight:400;">We use qualitative feedback from user studies and alpha group testing to understand user expectation and to guide initial feature design. However, for continued algorithmic improvements, we need an objective quantitative metric. Recommendation results by their very nature are subjective, so measuring effectiveness is tricky. The usual approach taken is to roll out to a random population of users and measure the outcome of interest for the test group as compared to the control group (ref: </span><a href="https://en.wikipedia.org/wiki/A/B_testing"><span style="font-weight:400;">A/B testing</span></a><span style="font-weight:400;">). </span></p> <p><span style="font-weight:400;">We plan to employ this technique and measure user interaction and engagement to keep improving the recommendation algorithms. Additionally, we plan to measure explicit signals such as when users click “Not interested.” This feedback will also be used to fine-tune future recommendations for users.</span></p> <p><img decoding="async" loading="lazy" class="size-medium wp-image-3538 aligncenter" src="https://wp.flickr.net/wp-content/uploads/sites/3/2016/09/measuringeffectiveness1.png?w=800" alt="measuringeffectiveness" width="800" height="349" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2016/09/measuringeffectiveness1.png 2110w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/measuringeffectiveness1.png?resize=150,65 150w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/measuringeffectiveness1.png?resize=800,349 800w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/measuringeffectiveness1.png?resize=768,335 768w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/measuringeffectiveness1.png?resize=1024,446 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/measuringeffectiveness1.png?resize=1536,670 1536w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/measuringeffectiveness1.png?resize=2048,893 2048w, https://code.flickr.net/wp-content/uploads/sites/3/2016/09/measuringeffectiveness1.png?resize=500,218 500w" sizes="(max-width: 800px) 100vw, 800px" /></p> <p style="text-align:center;"><b>Figure</b><span style="font-weight:400;">: Measuring user engagement</span></p> <h1>Future Directions</h1> <p><span style="font-weight:400;">While we’re seeing good initial results, we’d like to continue improving the algorithms to provide better results to the Flickr community. Potential future directions can be classified broadly into 3 buckets: algorithmic improvements, new product use cases, and new recommendation applications.</span></p> <p><i><span style="font-weight:400;">If you’d like to help, we’re hiring. Check out our </span></i><a href="https://www.flickr.com/jobs"><i><span style="font-weight:400;">jobs page</span></i></a><i><span style="font-weight:400;"> and get in touch.</span></i></p> <p><b><i>Product Engineering</i></b><i><span style="font-weight:400;">: Mehul Patel, Chenfan (Frank) Sun, Chinmay Kini</span></i></p> </div><!-- .entry-content --> <footer class="entry-meta"> <span class="cat-links"> <span class="entry-utility-prep entry-utility-prep-cat-links">Posted in</span> <a href="https://code.flickr.net/category/hadoop/" rel="category tag">hadoop</a>, <a href="https://code.flickr.net/category/infrastructure/" rel="category tag">infrastructure</a> </span> <span class="sep"> | </span> <span class="tag-links"> <span class="entry-utility-prep entry-utility-prep-tag-links">Tagged</span> <a href="https://code.flickr.net/tag/personalization/" rel="tag">personalization</a> </span> </footer><!-- .entry-meta --> </article><!-- #post-3453 --> <article id="post-3432" class="post-3432 post type-post status-publish format-standard hentry category-uncategorized tag-careers tag-jobs"> <header class="entry-header"> <h1 class="entry-title"><a href="https://code.flickr.net/2016/05/11/we-want-you-and-your-teammates/" rel="bookmark">We Want You… and Your Teammates</a></h1> <div class="entry-meta"> <span class="sep">Posted on </span><a href="https://code.flickr.net/2016/05/11/we-want-you-and-your-teammates/" title="8:19 pm" rel="bookmark"><time class="entry-date" datetime="2016-05-11T20:19:40-07:00">May 11, 2016</time></a><span class="by-author"> <span class="sep"> by </span> <span class="author vcard"><a class="url fn n" href="https://code.flickr.net/author/xanthet/" title="View all posts by Xanthe Travlos" rel="author">Xanthe Travlos</a></span></span> </div><!-- .entry-meta --> </header><!-- .entry-header --> <div class="entry-content"> <p><a href="https://www.flickr.com/jobs"><span style="font-weight:400;"><img decoding="async" loading="lazy" class="size-medium wp-image-3436 aligncenter" src="https://wp.flickr.net/wp-content/uploads/sites/3/2016/05/14493569810_7ac064e3c4_o.jpg?w=800" alt="14493569810_7ac064e3c4_o" width="800" height="210" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2016/05/14493569810_7ac064e3c4_o.jpg 9208w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/14493569810_7ac064e3c4_o.jpg?resize=150,39 150w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/14493569810_7ac064e3c4_o.jpg?resize=800,210 800w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/14493569810_7ac064e3c4_o.jpg?resize=768,202 768w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/14493569810_7ac064e3c4_o.jpg?resize=1024,269 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/14493569810_7ac064e3c4_o.jpg?resize=1536,404 1536w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/14493569810_7ac064e3c4_o.jpg?resize=2048,538 2048w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/14493569810_7ac064e3c4_o.jpg?resize=500,131 500w" sizes="(max-width: 800px) 100vw, 800px" /></span></a><a href="https://www.flickr.com/jobs"><span style="font-weight:400;">We’re hiring here at Flickr</span></a><span style="font-weight:400;"> and we got pretty excited the other week when we saw Stripe’s post: </span><a href="https://stripe.com/blog/bring-your-own-team"><span style="font-weight:400;">BYOT (Bring Your Own Team)</span></a><span style="font-weight:400;">. The sum of the parts is greater than the whole and all that. Genius <big hat tip to them>.</span></p> <p><span style="font-weight:400;">In case you didn’t read Stripe’s post, here’s the gist: you’re a team player, you like to make an impact, focus on a tough problem, set a challenging goal, and see the fruits of your labor after blood, sweat, and tears (or, maybe just brainpower). But you’ve got the itch to collaborate, to talk an idea through, break it down, and parallelize tasks or simply to be around your mates through work and play. Turns out you already have your go-to group of colleagues, roommates, siblings, or buddies that push, inspire, and get the best out of you. Well, in that case we may want to hire all of you! </span></p> <p><span style="font-weight:400;">Like Stripe, we understand the importance of team dynamics. So if you’ve already got something good going on, we want in on it too. We love Stripe and are stoked for this initiative of theirs, but if Flickr tickles your fancy (and it does ours :) consider bringing that team of yours this way too, especially if you’ve got a penchant for mobile development. We’d love to chat! </span></p> <p><span style="font-weight:400;">Email us: jobs at flickr.com</span></p> <p><img decoding="async" loading="lazy" class="alignnone size-medium wp-image-3445" src="https://wp.flickr.net/wp-content/uploads/sites/3/2016/05/team-crop.jpg?w=800" alt="Team crop" width="800" height="497" srcset="https://code.flickr.net/wp-content/uploads/sites/3/2016/05/team-crop.jpg 1200w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/team-crop.jpg?resize=150,93 150w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/team-crop.jpg?resize=800,497 800w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/team-crop.jpg?resize=768,477 768w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/team-crop.jpg?resize=1024,637 1024w, https://code.flickr.net/wp-content/uploads/sites/3/2016/05/team-crop.jpg?resize=483,300 483w" sizes="(max-width: 800px) 100vw, 800px" /></p> <p>Photos by: <a href="https://www.flickr.com/photos/cjmartin/14493569810/in/photolist-o5KkmE-s1Ab2X-oB9Jcz-r6894C-8UE9qs-nUiqdZ-nUD1T8-nSeAva-pu3e82-pEYdJy-rtfXVi-pAY9az-6qsxHF-nTFAd6-p3U1U1-qqhDn3-qY2QhS-or84zJ-osKqkn-qhnbAv-r5mmHv-ejKoBB-rASeqq-ejKp2H-qJV7kr-en2Lwp-oqgHNq-qsiNCQ-gKdheW-qDW829-o3RLfK-rw8To2-qbYAJC-nMYHBp-q4sajA-s3j5Ec-rj62WB-rxW8je-r31xGH-rqTqsT-occV8T-pRUg6U-en2L4a-p4R7rj-6cMnzR-oF16rb-7QBcD1-5XH2Yb-5XdwzJ-qjBx4n'">@Chris Martin</a> and <a href="https://www.flickr.com/photos/superic/17255895320/in/photolist-shQYuy-4y4vxZ-fqd5ou-dLQWLM">@Captain Eric Willis</a></p> </div><!-- .entry-content --> <footer class="entry-meta"> <span class="cat-links"> <span class="entry-utility-prep entry-utility-prep-cat-links">Posted in</span> <a href="https://code.flickr.net/category/uncategorized/" rel="category tag">Uncategorized</a> </span> <span class="sep"> | </span> <span class="tag-links"> <span class="entry-utility-prep entry-utility-prep-tag-links">Tagged</span> <a href="https://code.flickr.net/tag/careers/" rel="tag">careers</a>, <a href="https://code.flickr.net/tag/jobs/" rel="tag">jobs</a> </span> </footer><!-- .entry-meta --> </article><!-- #post-3432 --> <article id="post-3394" class="post-3394 post type-post status-publish format-standard hentry category-api-2 category-open-source tag-api tag-javascript tag-node-js"> <header class="entry-header"> <h1 class="entry-title"><a href="https://code.flickr.net/2016/04/25/introducing-yakbak-record-and-playback-http-interactions-in-nodejs/" rel="bookmark">Introducing yakbak: Record and playback HTTP interactions in NodeJS</a></h1> <div class="entry-meta"> <span class="sep">Posted on </span><a href="https://code.flickr.net/2016/04/25/introducing-yakbak-record-and-playback-http-interactions-in-nodejs/" title="7:22 pm" rel="bookmark"><time class="entry-date" datetime="2016-04-25T19:22:18-07:00">April 25, 2016</time></a><span class="by-author"> <span class="sep"> by </span> <span class="author vcard"><a class="url fn n" href="https://code.flickr.net/author/jeremyruppel/" title="View all posts by jeremyruppel" rel="author">jeremyruppel</a></span></span> </div><!-- .entry-meta --> </header><!-- .entry-header --> <div class="entry-content"> <p><b></b><span style="font-weight:400;">Did you know that the new Front End of <a href="https://www.flickr.com" target="_blank">www.flickr.com</a> is one big Flickr API client? Writing a client for an existing API or service can be a lot of fun, but decoupling and testing that client can be quite tricky. There are many different approaches to taking the backing service out of the equation when it comes to writing tests for client code. Today we’ll discuss the pros and cons of some of these approaches, describe how the Flickr Front End team tests service-dependent libraries, and introduce you to our new NodeJS HTTP playback module: </span><a href="https://github.com/flickr/yakbak"><span style="font-weight:400;">yakbak</span></a><span style="font-weight:400;">!</span></p> <p><b>Scenario: Testing a Flickr API Client</b></p> <p><span style="font-weight:400;">Let’s jump into some code, shall we? Suppose we’re testing a (very, very simple) photo search API client:</span></p> <p><a href="https://gist.github.com/jeremyruppel/fd25c723a5962a49936f174d765aa11a" rel="nofollow">https://gist.github.com/jeremyruppel/fd25c723a5962a49936f174d765aa11a</a></p> <p><span style="font-weight:400;">Currently, this code will make an HTTP request to the Flickr API on every test run. This is less than desirable for several reasons:</span></p> <ul> <li style="font-weight:400;"><a href="https://en.wikipedia.org/wiki/User-generated_content"><i><span style="font-weight:400;">UGC</span></i></a><i><span style="font-weight:400;"> is unpredictable</span></i><span style="font-weight:400;">. In this test, we’re asserting that the response code is an HTTP 200, but obviously our client code needs to provide the response data to be useful. It’s impossible to write a meaningful and predictable test against live content.</span></li> <li style="font-weight:400;"><i><span style="font-weight:400;">Traffic is unpredictable</span></i><span style="font-weight:400;">. This photos search API call usually takes ~150ms for simple queries, but a more complex query or a call during peak traffic may take longer.</span></li> <li style="font-weight:400;"><i><span style="font-weight:400;">Downtime is unpredictable</span></i><span style="font-weight:400;">. Every service has downtime (the term is </span><a href="https://en.wikipedia.org/wiki/High_availability"><span style="font-weight:400;">“four nines,”</span></a><span style="font-weight:400;"> not “one hundred percent” for a reason), and if your service is down, your client tests will fail.</span></li> <li style="font-weight:400;"><i><span style="font-weight:400;">Networks are unpredictable</span></i><span style="font-weight:400;">. Have you ever tried coding on a plane? Enough said.</span></li> </ul> <p><span style="font-weight:400;">We want our test suite to be consistent, predictable, and fast. We’re also only trying to test our client code, not the API. Let’s take a look at some ways to replace the API with a control, allowing us to predictably test the client code.</span></p> <p><b>Approach 1: Stub the HTTP client methods</b></p> <p><span style="font-weight:400;">We’re using </span><a href="https://github.com/visionmedia/superagent"><span style="font-weight:400;">superagent</span></a><span style="font-weight:400;"> as our HTTP client, so we could use a mocking library like </span><a href="http://sinonjs.org/"><span style="font-weight:400;">sinon</span></a><span style="font-weight:400;"> to stub out superagent’s Request methods:</span></p> <p><a href="https://gist.github.com/jeremyruppel/8b837f439663db325aaa2437a2259934" rel="nofollow">https://gist.github.com/jeremyruppel/8b837f439663db325aaa2437a2259934</a></p> <p><span style="font-weight:400;">With these changes, we never actually make an HTTP request to the API during a test run. Now our test is predictable, controlled, and it runs </span><b>crazy fast</b><span style="font-weight:400;">. However, this approach has some major drawbacks:</span></p> <ul> <li style="font-weight:400;"><i><span style="font-weight:400;">Tightly coupled with superagent.</span></i><span style="font-weight:400;"> We’re all up in the client’s implementation details here, so if superagent ever changes their API, we’ll need to correct our tests to match. Likewise, if we ever want to use a different HTTP client, we’ll need to correct our tests as well.</span></li> <li style="font-weight:400;"><i><span style="font-weight:400;">Difficult to specify the full HTTP response</span></i><span style="font-weight:400;">. Here we’re only specifying the </span><span style="font-weight:400;">statusCode</span><span style="font-weight:400;">; what about when we need to specify the body or the headers? Talk about verbose.</span></li> <li style="font-weight:400;"><i><span style="font-weight:400;">Not necessarily accurate</span></i><span style="font-weight:400;">. We’re trusting the test author to provide a fake response that matches what the actual server would send back. What happens if the API changes the response schema? Some unhappy developer will have to manually update the tests to match reality (probably an intern, let’s be honest).</span></li> </ul> <p><span style="font-weight:400;">We’ve at least managed to replace the service with a control in our tests, but we can do (slightly) better.</span></p> <p><b>Approach 2: Mock the NodeJS HTTP module</b></p> <p><span style="font-weight:400;">Every NodeJS HTTP client will eventually delegate to the standard NodeJS http module to perform the network request. This means we can intercept the request at a low level by using a tool like </span><a href="https://www.npmjs.com/package/nock"><span style="font-weight:400;">nock</span></a><span style="font-weight:400;">:</span></p> <p><a href="https://gist.github.com/jeremyruppel/d92a62400f635b42249adc041cdecc96" rel="nofollow">https://gist.github.com/jeremyruppel/d92a62400f635b42249adc041cdecc96</a></p> <p><span style="font-weight:400;">Great! We’re no longer stubbing out superagent and we can still control the HTTP response. This avoids the HTTP client coupling from the previous step, but still has many similar drawbacks:</span></p> <ul> <li style="font-weight:400;"><span style="font-weight:400;">We’re still completely implementation-dependent. If we want to pass a new query string parameter to our service, for example, we’ll also need to add it to the test so that nock will match the request.</span></li> <li style="font-weight:400;"><span style="font-weight:400;">It’s still laborious to specify the response headers, body, etc.</span></li> <li style="font-weight:400;"><span style="font-weight:400;">It’s still difficult to make sure the response body always matches reality.</span></li> </ul> <p><span style="font-weight:400;">At this point, it’s worth noting that none of these bullet points were an issue back when we were actually making the HTTP request. So, let’s do exactly that (once!).</span></p> <p><b>Approach 3: Record and playback the HTTP interaction</b></p> <p><span style="font-weight:400;">The Ruby community created the excellent </span><a href="https://github.com/vcr/vcr"><span style="font-weight:400;">VCR</span></a><span style="font-weight:400;"> gem for recording and replaying HTTP interactions during tests. Recorded HTTP requests exist as “tapes”, which are just files with some sort of format describing the interaction. The basic workflow goes like this:</span></p> <ol> <li style="font-weight:400;"><span style="font-weight:400;">The client makes an actual HTTP request.</span></li> <li style="font-weight:400;"><span style="font-weight:400;">VCR sits in front of the system’s HTTP library and intercepts the request.</span></li> <li style="font-weight:400;"><span style="font-weight:400;">If VCR has a tape matching the request, it simply replays the response to the client.</span></li> <li style="font-weight:400;"><span style="font-weight:400;">Otherwise, VCR lets the HTTP request through to the service, records the interaction to a new tape on disk and plays it back to the client.</span></li> </ol> <p><b>Introducing yakbak</b></p> <p><span style="font-weight:400;">Today we’re open-sourcing </span><a href="https://github.com/flickr/yakbak"><span style="font-weight:400;">yakbak</span></a><span style="font-weight:400;">, our take on recording and playing back HTTP interactions in NodeJS. Here’s what our tests look like with a yakbak proxy:</span></p> <p><a href="https://gist.github.com/jeremyruppel/7050b34342a10d8e3dd8bc2dba0d50c0" rel="nofollow">https://gist.github.com/jeremyruppel/7050b34342a10d8e3dd8bc2dba0d50c0</a></p> <p>Here we’ve created a standard NodeJS http.Server with our proxy middleware. We’ve also configured our client to point to the proxy server instead of the origin service. Look, no implementation details!</p> <p>yakbak tries to do things The Node Way™ wherever possible. For example, each yakbak “tape” is actually its own module that simply exports an http.Server handler, which <span style="font-weight:400;">allows us to do some really cool things. For example, it’s trivial to create a server that always responds a certain way. Since the tape’s hash is based solely on the incoming request, we can easily edit the response however we like. We’re also kicking around a </span><a href="https://github.com/flickr/yakbak/issues?q=is%3Aopen+is%3Aissue+label%3Aenhancement"><span style="font-weight:400;">handful of enhancements</span></a><span style="font-weight:400;"> that should make yakbak an even more powerful development tool. </span></p> <p><span style="font-weight:400;">Thanks to yakbak, we’ve been writing fast, consistent, and reliable tests for our HTTP clients and applications. Want to give it a spin? Check it out today: </span><a href="https://github.com/flickr/yakbak"><span style="font-weight:400;">https://github.com/flickr/yakbak</span></a></p> <p><b>P.S. We’re hiring!</b></p> <p><span style="font-weight:400;">Do you love development tooling and helping keep teams on the latest and greatest technology? Or maybe you just want to help build the best home for your photos on the entire internet? </span><a href="https://www.flickr.com/jobs"><span style="font-weight:400;">We’re hiring Front End Ops</span></a><span style="font-weight:400;"> and tons of other great positions. We’d love to hear from you!</span></p> </div><!-- .entry-content --> <footer class="entry-meta"> <span class="cat-links"> <span class="entry-utility-prep entry-utility-prep-cat-links">Posted in</span> <a href="https://code.flickr.net/category/api-2/" rel="category tag">API</a>, <a href="https://code.flickr.net/category/open-source/" rel="category tag">open source</a> </span> <span class="sep"> | </span> <span class="tag-links"> <span class="entry-utility-prep entry-utility-prep-tag-links">Tagged</span> <a href="https://code.flickr.net/tag/api/" rel="tag">api</a>, <a href="https://code.flickr.net/tag/javascript/" rel="tag">javascript</a>, <a href="https://code.flickr.net/tag/node-js/" rel="tag">node.js</a> </span> </footer><!-- .entry-meta --> </article><!-- #post-3394 --> <article id="post-3370" class="post-3370 post type-post status-publish format-standard hentry category-open-source"> <header class="entry-header"> <h1 class="entry-title"><a href="https://code.flickr.net/2016/04/05/our-justified-layout-goes-open-source/" rel="bookmark">Our Justified Layout Goes Open Source</a></h1> <div class="entry-meta"> <span class="sep">Posted on </span><a href="https://code.flickr.net/2016/04/05/our-justified-layout-goes-open-source/" title="5:01 pm" rel="bookmark"><time class="entry-date" datetime="2016-04-05T17:01:53-07:00">April 5, 2016</time></a><span class="by-author"> <span class="sep"> by </span> <span class="author vcard"><a class="url fn n" href="https://code.flickr.net/author/jimwhimpey/" title="View all posts by jimwhimpey" rel="author">jimwhimpey</a></span></span> </div><!-- .entry-meta --> </header><!-- .entry-header --> <div class="entry-content"> <p><span style="font-weight:400;">We introduced the justified layout on Flickr.com late in 2011. Our community of photographers loved it for its ability to efficiently display many photos at their native aspect ratio with visually pleasing, consistent whitespace, so we quickly added it to the rest of the website.</span></p> <p><a href="https://www.flickr.com/photos/onlywhenchased/albums/72157625187496924"><img decoding="async" loading="lazy" class="alignnone size-full wp-image-3315" src="https://cloud.githubusercontent.com/assets/43693/14259021/e4aede8e-fa59-11e5-9fca-bf86ae5bc2e1.png" alt="Justified Example" width="805" height="244" /></a></p> <p><span style="font-weight:400;">It’s been through many iterations and optimizations. From back when we were primarily on the </span><a href="http://code.flickr.net/2013/06/"><span style="font-weight:400;">PHP stack</span></a><span style="font-weight:400;"> to our lovely new JavaScript based isomorphic stack. Last year Eric Socolofsky did </span><a href="http://code.flickr.net/2015/03/24/much-photos/"><span style="font-weight:400;">a great job explaining how the algorithm works</span></a><span style="font-weight:400;"> and how it fits into a larger infrastructure for Flickr specifically.</span></p> <p><span style="font-weight:400;">In the years following its launch, we’ve had requests from our front end colleagues in other teams across Yahoo for a reusable package that does photo (or any rectangle) presentation like this, but it’s always been too tightly coupled to our stack to separate it out and hand it over. Until now! Today we’re publishing the </span><a href="http://flickr.github.io/justified-layout/"><span style="font-weight:400;">justified-layout algorithm wrapped in an npm module</span></a><span style="font-weight:400;"> for you to use on the server, or client, in your own projects.</span></p> <h2><b>Install/Download</b></h2> <pre><span style="font-weight:400;">npm install justified-layout --save</span></pre> <p><span style="font-weight:400;">Or grab it directly </span><a href="https://github.com/flickr/justified-layout"><span style="font-weight:400;">from Github</span></a><span style="font-weight:400;">.</span></p> <h2><b>Using it</b></h2> <p><span style="font-weight:400;">It’s really easy to use. No configuration is required. Just pass in an array of aspect ratios representing the photos/boxes you’d like to lay out:</span></p> <pre><span style="font-weight:400;">var layoutGeometry = require('justified-layout')([1.33, 1, 0.65] [, config]);</span></pre> <p><span style="font-weight:400;">If you only have dimensions and don’t want an extra step to convert them to aspect ratios, you can pass in an array of widths and heights like this:</span></p> <p><a href="https://gist.github.com/jimwhimpey/825377b78ef8d9b10e702aa6adc41eb4" rel="nofollow">https://gist.github.com/jimwhimpey/825377b78ef8d9b10e702aa6adc41eb4</a></p> <h2><b>What it returns</b></h2> <p><span style="font-weight:400;">The geometry data for the layout items, in the same order they’re passed in.</span></p> <p><a href="https://gist.github.com/jimwhimpey/faaf2c95809647abcbea481d8445ecf9" rel="nofollow">https://gist.github.com/jimwhimpey/faaf2c95809647abcbea481d8445ecf9</a></p> <p><span style="font-weight:400;">This is the extent of what the module provides. There’s no rendering component. It’s up to you to use this data to render boxes the way you want. Use absolute positioning, background positions, canvas, generate a static image on the backend, whatever you like! There’s a very basic implementation used on </span><a href="https://github.com/flickr/justified-layout/blob/gh-pages/index.html#L21-L33"><span style="font-weight:400;">the demo and docs page</span></a><span style="font-weight:400;">.</span></p> <h2><b>Configuration</b></h2> <p><span style="font-weight:400;">It’s highly likely the defaults don’t satisfy your requirements; they don’t even satisfy ours. There’s a full set of configuration options to customize the output just the way you want. My favorite is the </span><span style="font-weight:400;">fullWidthBreakoutRowCadence</span><span style="font-weight:400;"> option that we use </span><a href="https://www.flickr.com/photos/cameron_obscura/sets/72157651385352366"><span style="font-weight:400;">on album pages</span></a><span style="font-weight:400;">. All config options are documented on the </span><a href="http://flickr.github.io/justified-layout/#options"><span style="font-weight:400;">docs and demo page</span></a><span style="font-weight:400;">.</span></p> <h2><b>Compatibility</b></h2> <ul> <li style="font-weight:400;"><span style="font-weight:400;">Latest Chrome</span></li> <li style="font-weight:400;"><span style="font-weight:400;">Latest Safari</span></li> <li style="font-weight:400;"><span style="font-weight:400;">Latest Firefox</span></li> <li style="font-weight:400;"><span style="font-weight:400;">Latest Mobile Safari</span></li> <li style="font-weight:400;"><span style="font-weight:400;">IE 9+</span></li> <li style="font-weight:400;"><span style="font-weight:400;">Node 0.10+</span></li> </ul> <h2><b>The future</b></h2> <p><span style="font-weight:400;">The justified layout algorithm is just one part of our photo list infrastructure. Following this, we’ll be open sourcing more modules for handling data, handling state, reverse layouts, appending and prepending items for pagination. </span></p> <p><span style="font-weight:400;">We welcome your feedback, issues and contributions </span><a href="https://github.com/flickr/justified-layout"><span style="font-weight:400;">on Github</span></a><span style="font-weight:400;">.</span></p> <h2><b>P.S. Open Source at Flickr</b></h2> <p><span style="font-weight:400;">This is the first of quite a bit of code we have in the works for open source release. If working on open source projects appeals to you, </span><a href="https://www.flickr.com/jobs/"><span style="font-weight:400;">we’re hiring</span></a><span style="font-weight:400;">!</span></p> <p> </p> </div><!-- .entry-content --> <footer class="entry-meta"> <span class="cat-links"> <span class="entry-utility-prep entry-utility-prep-cat-links">Posted in</span> <a href="https://code.flickr.net/category/open-source/" rel="category tag">open source</a> </span> </footer><!-- .entry-meta --> </article><!-- #post-3370 --> <nav id="nav-below"> <h3 class="assistive-text">Post navigation</h3> <div class="nav-previous"><a href="https://code.flickr.net/page/2/" ><span class="meta-nav">←</span> Older posts</a></div> <div class="nav-next"></div> </nav><!-- #nav-above --> </div><!-- #content --> </div><!-- #primary --> <div id="secondary" class="widget-area" role="complementary"> <aside id="jetpack-search-filters-3" class="widget jetpack-filters widget_search"> <div id="jetpack-search-filters-3-wrapper" class="jetpack-instant-search-wrapper"> <div class="jetpack-search-form"> <form method="get" id="searchform" action="https://code.flickr.net/"> <label for="s" class="assistive-text">Search</label> <input type="text" class="field" name="s" id="s" placeholder="Search" /> <input type="submit" class="submit" name="submit" id="searchsubmit" value="Search" /> <input type="hidden" name="orderby" value="" /><input type="hidden" name="order" value="" /></form> </div> <h4 class="jetpack-search-filters-widget__sub-heading"> Categories </h4> <ul class="jetpack-search-filters-widget__filter-list"> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="taxonomy" data-taxonomy="category" data-val="uncategorized"> Uncategorized (136) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="taxonomy" data-taxonomy="category" data-val="geo"> geo (12) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="taxonomy" data-taxonomy="category" data-val="kittens"> kittens (10) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="taxonomy" data-taxonomy="category" data-val="change-log"> changelog (7) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="taxonomy" data-taxonomy="category" data-val="uploadr"> uploadr (6) </a> </li> </ul> <h4 class="jetpack-search-filters-widget__sub-heading"> Tags </h4> <ul class="jetpack-search-filters-widget__filter-list"> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="taxonomy" data-taxonomy="post_tag" data-val="api"> api (24) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="taxonomy" data-taxonomy="post_tag" data-val="geo"> geo (13) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="taxonomy" data-taxonomy="post_tag" data-val="machine-tags"> machine tags (10) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="taxonomy" data-taxonomy="post_tag" data-val="javascript"> javascript (9) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="taxonomy" data-taxonomy="post_tag" data-val="kittentuesday"> kittentuesday (8) </a> </li> </ul> <h4 class="jetpack-search-filters-widget__sub-heading"> Year </h4> <ul class="jetpack-search-filters-widget__filter-list"> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="year_post_date" data-val="2022-01-01 00:00:00" > 2022 (2) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="year_post_date" data-val="2021-01-01 00:00:00" > 2021 (1) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="year_post_date" data-val="2018-01-01 00:00:00" > 2018 (1) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="year_post_date" data-val="2017-01-01 00:00:00" > 2017 (2) </a> </li> <li> <a href="#" class="jetpack-search-filter__link" data-filter-type="year_post_date" data-val="2016-01-01 00:00:00" > 2016 (5) </a> </li> </ul> </div></aside> <aside id="recent-posts-2" class="widget widget_recent_entries"> <h3 class="widget-title">Recent Posts</h3> <ul> <li> <a href="https://code.flickr.net/2022/02/14/safer-internet-day-and-open-source-codes-of-conduct/">Safer Internet Day and Open Source Codes of Conduct</a> </li> <li> <a href="https://code.flickr.net/2022/01/04/a-pluggable-solution-for-api-observability-on-our-php-system/">A Pluggable Solution for API Observability on our PHP System</a> </li> <li> <a href="https://code.flickr.net/2021/11/22/flickr-engineering-team-vision-guiding-principles/">Flickr Engineering Team Vision & Guiding Principles</a> </li> <li> <a href="https://code.flickr.net/2018/04/20/together/">Together</a> </li> <li> <a href="https://code.flickr.net/2017/03/07/introducing-similarity-search-at-flickr/">Introducing Similarity Search at Flickr</a> </li> </ul> </aside><aside id="archives-2" class="widget widget_archive"><h3 class="widget-title">Archives</h3> <ul> <li><a href='https://code.flickr.net/2022/02/'>February 2022</a></li> <li><a href='https://code.flickr.net/2022/01/'>January 2022</a></li> <li><a href='https://code.flickr.net/2021/11/'>November 2021</a></li> <li><a href='https://code.flickr.net/2018/04/'>April 2018</a></li> <li><a href='https://code.flickr.net/2017/03/'>March 2017</a></li> <li><a href='https://code.flickr.net/2017/01/'>January 2017</a></li> <li><a href='https://code.flickr.net/2016/09/'>September 2016</a></li> <li><a href='https://code.flickr.net/2016/05/'>May 2016</a></li> <li><a href='https://code.flickr.net/2016/04/'>April 2016</a></li> <li><a href='https://code.flickr.net/2016/03/'>March 2016</a></li> <li><a href='https://code.flickr.net/2015/12/'>December 2015</a></li> <li><a href='https://code.flickr.net/2015/11/'>November 2015</a></li> <li><a href='https://code.flickr.net/2015/09/'>September 2015</a></li> <li><a href='https://code.flickr.net/2015/07/'>July 2015</a></li> <li><a href='https://code.flickr.net/2015/06/'>June 2015</a></li> <li><a href='https://code.flickr.net/2015/03/'>March 2015</a></li> <li><a href='https://code.flickr.net/2014/10/'>October 2014</a></li> <li><a href='https://code.flickr.net/2014/08/'>August 2014</a></li> <li><a href='https://code.flickr.net/2014/07/'>July 2014</a></li> <li><a href='https://code.flickr.net/2014/05/'>May 2014</a></li> <li><a href='https://code.flickr.net/2014/04/'>April 2014</a></li> <li><a href='https://code.flickr.net/2014/02/'>February 2014</a></li> <li><a href='https://code.flickr.net/2013/09/'>September 2013</a></li> <li><a href='https://code.flickr.net/2013/06/'>June 2013</a></li> <li><a href='https://code.flickr.net/2013/03/'>March 2013</a></li> <li><a href='https://code.flickr.net/2012/12/'>December 2012</a></li> <li><a href='https://code.flickr.net/2012/10/'>October 2012</a></li> <li><a href='https://code.flickr.net/2012/07/'>July 2012</a></li> <li><a href='https://code.flickr.net/2012/06/'>June 2012</a></li> <li><a href='https://code.flickr.net/2012/05/'>May 2012</a></li> <li><a href='https://code.flickr.net/2012/04/'>April 2012</a></li> <li><a href='https://code.flickr.net/2012/02/'>February 2012</a></li> <li><a href='https://code.flickr.net/2012/01/'>January 2012</a></li> <li><a href='https://code.flickr.net/2011/12/'>December 2011</a></li> <li><a href='https://code.flickr.net/2011/10/'>October 2011</a></li> <li><a href='https://code.flickr.net/2011/09/'>September 2011</a></li> <li><a href='https://code.flickr.net/2011/08/'>August 2011</a></li> <li><a href='https://code.flickr.net/2011/07/'>July 2011</a></li> <li><a href='https://code.flickr.net/2011/06/'>June 2011</a></li> <li><a href='https://code.flickr.net/2011/03/'>March 2011</a></li> <li><a href='https://code.flickr.net/2011/02/'>February 2011</a></li> <li><a href='https://code.flickr.net/2011/01/'>January 2011</a></li> <li><a href='https://code.flickr.net/2010/11/'>November 2010</a></li> <li><a href='https://code.flickr.net/2010/10/'>October 2010</a></li> <li><a href='https://code.flickr.net/2010/09/'>September 2010</a></li> <li><a href='https://code.flickr.net/2010/08/'>August 2010</a></li> <li><a href='https://code.flickr.net/2010/07/'>July 2010</a></li> <li><a href='https://code.flickr.net/2010/05/'>May 2010</a></li> <li><a href='https://code.flickr.net/2010/04/'>April 2010</a></li> <li><a href='https://code.flickr.net/2010/03/'>March 2010</a></li> <li><a href='https://code.flickr.net/2010/02/'>February 2010</a></li> <li><a href='https://code.flickr.net/2010/01/'>January 2010</a></li> <li><a href='https://code.flickr.net/2009/12/'>December 2009</a></li> <li><a href='https://code.flickr.net/2009/11/'>November 2009</a></li> <li><a href='https://code.flickr.net/2009/10/'>October 2009</a></li> <li><a href='https://code.flickr.net/2009/09/'>September 2009</a></li> <li><a href='https://code.flickr.net/2009/07/'>July 2009</a></li> <li><a href='https://code.flickr.net/2009/06/'>June 2009</a></li> <li><a href='https://code.flickr.net/2009/05/'>May 2009</a></li> <li><a href='https://code.flickr.net/2009/04/'>April 2009</a></li> <li><a href='https://code.flickr.net/2009/03/'>March 2009</a></li> <li><a href='https://code.flickr.net/2009/02/'>February 2009</a></li> <li><a href='https://code.flickr.net/2009/01/'>January 2009</a></li> <li><a href='https://code.flickr.net/2008/12/'>December 2008</a></li> <li><a href='https://code.flickr.net/2008/11/'>November 2008</a></li> <li><a href='https://code.flickr.net/2008/10/'>October 2008</a></li> <li><a href='https://code.flickr.net/2008/09/'>September 2008</a></li> <li><a href='https://code.flickr.net/2008/08/'>August 2008</a></li> <li><a href='https://code.flickr.net/2008/07/'>July 2008</a></li> <li><a href='https://code.flickr.net/2008/06/'>June 2008</a></li> <li><a href='https://code.flickr.net/2008/05/'>May 2008</a></li> <li><a href='https://code.flickr.net/2008/04/'>April 2008</a></li> </ul> </aside><aside id="categories-2" class="widget widget_categories"><h3 class="widget-title">Categories</h3> <ul> <li class="cat-item cat-item-11749740"><a href="https://code.flickr.net/category/api-2/">API</a> </li> <li class="cat-item cat-item-564792"><a href="https://code.flickr.net/category/change-log/">changelog</a> </li> <li class="cat-item cat-item-5784"><a href="https://code.flickr.net/category/event/">event</a> </li> <li class="cat-item cat-item-29160"><a href="https://code.flickr.net/category/geo/">geo</a> </li> <li class="cat-item cat-item-139037766"><a href="https://code.flickr.net/category/hadoop/">hadoop</a> </li> <li class="cat-item cat-item-32"><a href="https://code.flickr.net/category/infrastructure/">infrastructure</a> </li> <li class="cat-item cat-item-139037765"><a href="https://code.flickr.net/category/kittens/">kittens</a> </li> <li class="cat-item cat-item-20156"><a href="https://code.flickr.net/category/labs/">labs</a> </li> <li class="cat-item cat-item-171"><a href="https://code.flickr.net/category/meta/">meta</a> </li> <li class="cat-item cat-item-7092"><a href="https://code.flickr.net/category/metrics/">metrics</a> </li> <li class="cat-item cat-item-139037764"><a href="https://code.flickr.net/category/open-source/">open source</a> </li> <li class="cat-item cat-item-1930"><a href="https://code.flickr.net/category/performance/">performance</a> </li> <li class="cat-item cat-item-304"><a href="https://code.flickr.net/category/photos/">photos</a> </li> <li class="cat-item cat-item-2373"><a href="https://code.flickr.net/category/search/">search</a> </li> <li class="cat-item cat-item-1"><a href="https://code.flickr.net/category/uncategorized/">Uncategorized</a> </li> <li class="cat-item cat-item-249276"><a href="https://code.flickr.net/category/uploadr/">uploadr</a> </li> <li class="cat-item cat-item-412"><a href="https://code.flickr.net/category/video/">video</a> </li> <li class="cat-item cat-item-830560"><a href="https://code.flickr.net/category/xulrunner/">xulrunner</a> </li> </ul> </aside><aside id="meta-2" class="widget widget_meta"><h3 class="widget-title">Meta</h3> <ul> <li><a href="https://code.flickr.net/wp-login.php">Log in</a></li> <li><a href="https://code.flickr.net/feed/">Entries feed</a></li> <li><a href="https://code.flickr.net/comments/feed/">Comments feed</a></li> <li><a href="https://wordpress.org/">WordPress.org</a></li> </ul> </aside> </div><!-- #secondary .widget-area --> </div><!-- #main --> <footer id="colophon" role="contentinfo"> <div id="site-generator"> © 2024 Flickr, Inc. All rights reserved. | Powered by <a href="https://wpvip.com/?utm_source=vip_powered_wpcom&utm_medium=web&utm_campaign=VIP%20Footer%20Credit&utm_term=code.flickr.net" rel="generator nofollow" class="powered-by-wpcom">WordPress VIP</a> </div> </footer><!-- #colophon --> </div><!-- #page --> <div class="jetpack-instant-search__widget-area" style="display: none"> <div id="jetpack-search-filters-2" class="widget jetpack-filters widget_search"> <div id="jetpack-search-filters-2-wrapper" class="jetpack-instant-search-wrapper"> </div></div> </div> <script type="text/javascript" src="https://code.flickr.net/wp-includes/js/dist/url.min.js?m=1732205990g" ></script><script id="jetpack-instant-search-js-before" type="text/javascript"> var JetpackInstantSearchOptions=JSON.parse(decodeURIComponent("%7B%22overlayOptions%22%3A%7B%22colorTheme%22%3A%22light%22%2C%22enableInfScroll%22%3Atrue%2C%22enableFilteringOpensOverlay%22%3Atrue%2C%22enablePostDate%22%3Atrue%2C%22enableSort%22%3Atrue%2C%22highlightColor%22%3A%22%23FFC%22%2C%22overlayTrigger%22%3A%22submit%22%2C%22resultFormat%22%3A%22expanded%22%2C%22showPoweredBy%22%3Atrue%2C%22defaultSort%22%3A%22relevance%22%2C%22excludedPostTypes%22%3A%5B%5D%7D%2C%22homeUrl%22%3A%22https%3A%5C%2F%5C%2Fcode.flickr.net%22%2C%22locale%22%3A%22en-US%22%2C%22postsPerPage%22%3A10%2C%22siteId%22%3A185426273%2C%22postTypes%22%3A%7B%22post%22%3A%7B%22singular_name%22%3A%22Post%22%2C%22name%22%3A%22Posts%22%7D%2C%22page%22%3A%7B%22singular_name%22%3A%22Page%22%2C%22name%22%3A%22Pages%22%7D%2C%22attachment%22%3A%7B%22singular_name%22%3A%22Media%22%2C%22name%22%3A%22Media%22%7D%7D%2C%22webpackPublicPath%22%3A%22https%3A%5C%2F%5C%2Fcode.flickr.net%5C%2Fwp-content%5C%2Fmu-plugins%5C%2Fjetpack-13.1%5C%2Fjetpack_vendor%5C%2Fautomattic%5C%2Fjetpack-search%5C%2Fbuild%5C%2Finstant-search%5C%2F%22%2C%22isPhotonEnabled%22%3Afalse%2C%22isFreePlan%22%3Afalse%2C%22apiRoot%22%3A%22https%3A%5C%2F%5C%2Fcode.flickr.net%5C%2Fwp-json%5C%2F%22%2C%22apiNonce%22%3A%22fbe66387bb%22%2C%22isPrivateSite%22%3Afalse%2C%22isWpcom%22%3Afalse%2C%22hasOverlayWidgets%22%3Atrue%2C%22widgets%22%3A%5B%7B%22filters%22%3A%5B%7B%22name%22%3A%22Bylines%22%2C%22type%22%3A%22taxonomy%22%2C%22taxonomy%22%3A%22byline%22%2C%22count%22%3A5%2C%22widget_id%22%3A%22jetpack-search-filters-2%22%2C%22filter_id%22%3A%22taxonomy_0%22%7D%2C%7B%22name%22%3A%22Categories%22%2C%22type%22%3A%22taxonomy%22%2C%22taxonomy%22%3A%22category%22%2C%22count%22%3A5%2C%22widget_id%22%3A%22jetpack-search-filters-2%22%2C%22filter_id%22%3A%22taxonomy_1%22%7D%2C%7B%22name%22%3A%22Tags%22%2C%22type%22%3A%22taxonomy%22%2C%22taxonomy%22%3A%22post_tag%22%2C%22count%22%3A5%2C%22widget_id%22%3A%22jetpack-search-filters-2%22%2C%22filter_id%22%3A%22taxonomy_2%22%7D%2C%7B%22name%22%3A%22Year%22%2C%22type%22%3A%22date_histogram%22%2C%22count%22%3A5%2C%22field%22%3A%22post_date%22%2C%22interval%22%3A%22year%22%2C%22widget_id%22%3A%22jetpack-search-filters-2%22%2C%22filter_id%22%3A%22date_histogram_3%22%7D%5D%2C%22widget_id%22%3A%22jetpack-search-filters-2%22%7D%5D%2C%22widgetsOutsideOverlay%22%3A%5B%7B%22filters%22%3A%5B%7B%22name%22%3A%22Bylines%22%2C%22type%22%3A%22taxonomy%22%2C%22taxonomy%22%3A%22byline%22%2C%22count%22%3A5%2C%22widget_id%22%3A%22jetpack-search-filters-3%22%2C%22filter_id%22%3A%22taxonomy_4%22%7D%2C%7B%22name%22%3A%22Categories%22%2C%22type%22%3A%22taxonomy%22%2C%22taxonomy%22%3A%22category%22%2C%22count%22%3A5%2C%22widget_id%22%3A%22jetpack-search-filters-3%22%2C%22filter_id%22%3A%22taxonomy_5%22%7D%2C%7B%22name%22%3A%22Tags%22%2C%22type%22%3A%22taxonomy%22%2C%22taxonomy%22%3A%22post_tag%22%2C%22count%22%3A5%2C%22widget_id%22%3A%22jetpack-search-filters-3%22%2C%22filter_id%22%3A%22taxonomy_6%22%7D%2C%7B%22name%22%3A%22Year%22%2C%22type%22%3A%22date_histogram%22%2C%22count%22%3A5%2C%22field%22%3A%22post_date%22%2C%22interval%22%3A%22year%22%2C%22widget_id%22%3A%22jetpack-search-filters-3%22%2C%22filter_id%22%3A%22date_histogram_7%22%7D%5D%2C%22widget_id%22%3A%22jetpack-search-filters-3%22%7D%5D%2C%22hasNonSearchWidgets%22%3Afalse%2C%22preventTrackingCookiesReset%22%3Afalse%7D")); </script> <script src='https://code.flickr.net/wp-content/mu-plugins/jetpack-13.1/jetpack_vendor/automattic/jetpack-search/build/instant-search/jp-search.js?minify=false&ver=32fdf369306ecec73d70' id='jetpack-instant-search-js'></script> <script src='//stats.wp.com/w.js?ver=202447' id='jp-tracks-js'></script> <script src='https://stats.wp.com/e-202447.js' id='jetpack-stats-js' data-wp-strategy='defer'></script> <script id="jetpack-stats-js-after" type="text/javascript"> _stq = window._stq || []; _stq.push([ "view", JSON.parse("{\"v\":\"ext\",\"blog\":\"185426273\",\"post\":\"0\",\"tz\":\"-8\",\"srv\":\"code.flickr.net\",\"hp\":\"vip\",\"j\":\"1:13.1.4\"}") ]); _stq.push([ "clickTrackerInit", "185426273", "0" ]); </script> <script async src="https://embedr.flickr.com/assets/client-code.js" charset="utf-8"></script> </body> </html>