CINXE.COM

<!doctype html><html lang="en"><head><title data-rh="true">Microsoft GraphRAG and Ollama: Code Your Way to Smarter Question Answering | by kirouane Ayoub | GoPenAI</title><meta data-rh="true" charset="utf-8"/><meta data-rh="true" name="viewport" content="width=device-width,minimum-scale=1,initial-scale=1,maximum-scale=1"/><meta data-rh="true" name="theme-color" content="#000000"/><meta data-rh="true" name="twitter:app:name:iphone" content="Medium"/><meta data-rh="true" name="twitter:app:id:iphone" content="828256236"/><meta data-rh="true" property="al:ios:app_name" content="Medium"/><meta data-rh="true" property="al:ios:app_store_id" content="828256236"/><meta data-rh="true" property="al:android:package" content="com.medium.reader"/><meta data-rh="true" property="fb:app_id" content="542599432471018"/><meta data-rh="true" property="og:site_name" content="Medium"/><meta data-rh="true" property="og:type" content="article"/><meta data-rh="true" property="article:published_time" content="2024-08-29T12:57:22.719Z"/><meta data-rh="true" name="title" content="Microsoft GraphRAG and Ollama: Code Your Way to Smarter Question Answering | by kirouane Ayoub | GoPenAI"/><meta data-rh="true" property="og:title" content="Microsoft GraphRAG and Ollama: Code Your Way to Smarter Question Answering"/><meta data-rh="true" property="al:android:url" content="medium://p/45a57cc5c38b"/><meta data-rh="true" property="al:ios:url" content="medium://p/45a57cc5c38b"/><meta data-rh="true" property="al:android:app_name" content="Medium"/><meta data-rh="true" name="description" content="Traditional RAG methods, which primarily rely on semantic similarity search, often fall short when faced with complex questions that require connecting disparate pieces of information or…"/><meta data-rh="true" property="og:description" content="Traditional RAG methods, which primarily rely on semantic similarity search, often fall short when faced with complex questions that…"/><meta data-rh="true" property="og:url" content="https://blog.gopenai.com/microsoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b"/><meta data-rh="true" property="al:web:url" content="https://blog.gopenai.com/microsoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b"/><meta data-rh="true" property="og:image" content="https://miro.medium.com/v2/resize:fit:1200/1*ocprwBCYr-Vt9AHAhMaZuA.png"/><meta data-rh="true" property="article:author" content="https://medium.com/@ayoubkirouane3"/><meta data-rh="true" name="author" content="kirouane Ayoub"/><meta data-rh="true" name="robots" content="index,noarchive,follow,max-image-preview:large"/><meta data-rh="true" name="referrer" content="unsafe-url"/><meta data-rh="true" property="twitter:title" content="Microsoft GraphRAG and Ollama: Code Your Way to Smarter Question Answering"/><meta data-rh="true" name="twitter:site" content="@Medium"/><meta data-rh="true" name="twitter:app:url:iphone" content="medium://p/45a57cc5c38b"/><meta data-rh="true" property="twitter:description" content="Traditional RAG methods, which primarily rely on semantic similarity search, often fall short when faced with complex questions that…"/><meta data-rh="true" name="twitter:image:src" content="https://miro.medium.com/v2/resize:fit:1200/1*ocprwBCYr-Vt9AHAhMaZuA.png"/><meta data-rh="true" name="twitter:card" content="summary_large_image"/><meta data-rh="true" name="twitter:label1" content="Reading time"/><meta data-rh="true" name="twitter:data1" content="15 min read"/><link data-rh="true" rel="icon" href="https://miro.medium.com/v2/resize:fill:256:256/1*K5XzilB_IShmRLSqZxFW2w.png"/><link data-rh="true" rel="search" type="application/opensearchdescription+xml" title="Medium" href="/osd.xml"/><link data-rh="true" rel="apple-touch-icon" sizes="152x152" href="https://miro.medium.com/v2/resize:fill:304:304/10fd5c419ac61637245384e7099e131627900034828f4f386bdaa47a74eae156"/><link data-rh="true" rel="apple-touch-icon" sizes="120x120" href="https://miro.medium.com/v2/resize:fill:240:240/10fd5c419ac61637245384e7099e131627900034828f4f386bdaa47a74eae156"/><link data-rh="true" rel="apple-touch-icon" sizes="76x76" href="https://miro.medium.com/v2/resize:fill:152:152/10fd5c419ac61637245384e7099e131627900034828f4f386bdaa47a74eae156"/><link data-rh="true" rel="apple-touch-icon" sizes="60x60" href="https://miro.medium.com/v2/resize:fill:120:120/10fd5c419ac61637245384e7099e131627900034828f4f386bdaa47a74eae156"/><link data-rh="true" rel="mask-icon" href="https://miro.medium.com/v2/resize:fill:1000:1000/7*GAOKVe--MXbEJmV9230oOQ.png" color="#171717"/><link data-rh="true" id="glyph_preload_link" rel="preload" as="style" type="text/css" href="https://glyph.medium.com/css/unbound.css"/><link data-rh="true" id="glyph_link" rel="stylesheet" type="text/css" href="https://glyph.medium.com/css/unbound.css"/><link data-rh="true" rel="author" href="https://medium.com/@ayoubkirouane3"/><link data-rh="true" rel="canonical" href="https://blog.gopenai.com/microsoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b"/><link data-rh="true" rel="alternate" href="android-app://com.medium.reader/https/medium.com/p/45a57cc5c38b"/><script data-rh="true" type="application/ld+json">{"@context":"http:\u002F\u002Fschema.org","@type":"NewsArticle","image":["https:\u002F\u002Fmiro.medium.com\u002Fv2\u002Fresize:fit:1200\u002F1*ocprwBCYr-Vt9AHAhMaZuA.png"],"url":"https:\u002F\u002Fblog.gopenai.com\u002Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b","dateCreated":"2024-08-29T07:03:34.517Z","datePublished":"2024-08-29T07:03:34.517Z","dateModified":"2024-11-17T13:45:26.484Z","headline":"Microsoft GraphRAG and Ollama: Code Your Way to Smarter Question Answering","name":"Microsoft GraphRAG and Ollama: Code Your Way to Smarter Question Answering","description":"Traditional RAG methods, which primarily rely on semantic similarity search, often fall short when faced with complex questions that require connecting disparate pieces of information or…","identifier":"45a57cc5c38b","author":{"@type":"Person","name":"kirouane Ayoub","url":"https:\u002F\u002Fblog.gopenai.com\u002F@ayoubkirouane3"},"creator":["kirouane Ayoub"],"publisher":{"@type":"Organization","name":"GoPenAI","url":"blog.gopenai.com","logo":{"@type":"ImageObject","width":182,"height":60,"url":"https:\u002F\u002Fmiro.medium.com\u002Fv2\u002Fresize:fit:364\u002F1*U9yO19cFJjaX8_S95AATgA.png"}},"mainEntityOfPage":"https:\u002F\u002Fblog.gopenai.com\u002Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b"}</script><style type="text/css" data-fela-rehydration="573" data-fela-type="STATIC">html{box-sizing:border-box;-webkit-text-size-adjust:100%}*, *:before, *:after{box-sizing:inherit}body{margin:0;padding:0;text-rendering:optimizeLegibility;-webkit-font-smoothing:antialiased;color:rgba(0,0,0,0.8);position:relative;min-height:100vh}h1, h2, h3, h4, h5, h6, dl, dd, ol, ul, menu, figure, blockquote, p, pre, form{margin:0}menu, ol, ul{padding:0;list-style:none;list-style-image:none}main{display:block}a{color:inherit;text-decoration:none}a, button, input{-webkit-tap-highlight-color:transparent}img, svg{vertical-align:middle}button{background:transparent;overflow:visible}button, input, optgroup, select, textarea{margin:0}:root{--reach-tabs:1;--reach-menu-button:1}#speechify-root{font-family:Sohne, sans-serif}div[data-popper-reference-hidden="true"]{visibility:hidden;pointer-events:none}.grecaptcha-badge{visibility:hidden} /*XCode style (c) Angel Garcia <angelgarcia.mail@gmail.com>*/.hljs {background: #fff;color: black; }/* Gray DOCTYPE selectors like WebKit */ .xml .hljs-meta {color: #c0c0c0; }.hljs-comment, .hljs-quote {color: #007400; }.hljs-tag, .hljs-attribute, .hljs-keyword, .hljs-selector-tag, .hljs-literal, .hljs-name {color: #aa0d91; }.hljs-variable, .hljs-template-variable {color: #3F6E74; }.hljs-code, .hljs-string, .hljs-meta .hljs-string {color: #c41a16; }.hljs-regexp, .hljs-link {color: #0E0EFF; }.hljs-title, .hljs-symbol, .hljs-bullet, .hljs-number {color: #1c00cf; }.hljs-section, .hljs-meta {color: #643820; }.hljs-title.class_, .hljs-class .hljs-title, .hljs-type, .hljs-built_in, .hljs-params {color: #5c2699; }.hljs-attr {color: #836C28; }.hljs-subst {color: #000; }.hljs-formula {background-color: #eee;font-style: italic; }.hljs-addition {background-color: #baeeba; }.hljs-deletion {background-color: #ffc8bd; }.hljs-selector-id, .hljs-selector-class {color: #9b703f; }.hljs-doctag, .hljs-strong {font-weight: bold; }.hljs-emphasis {font-style: italic; } </style><style type="text/css" data-fela-rehydration="573" data-fela-type="KEYFRAME">@-webkit-keyframes k1{0%{opacity:0.8}50%{opacity:0.5}100%{opacity:0.8}}@-moz-keyframes k1{0%{opacity:0.8}50%{opacity:0.5}100%{opacity:0.8}}@keyframes k1{0%{opacity:0.8}50%{opacity:0.5}100%{opacity:0.8}}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE">.a{font-family:medium-content-sans-serif-font, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, Cantarell, "Open Sans", "Helvetica Neue", sans-serif}.b{font-weight:400}.c{background-color:rgba(255, 255, 255, 1)}.l{display:block}.m{position:sticky}.n{top:0}.o{z-index:500}.p{padding:0 24px}.q{align-items:center}.r{border-bottom:solid 1px #F2F2F2}.y{height:41px}.z{line-height:20px}.ab{display:flex}.ac{height:57px}.ae{flex:1 0 auto}.af{color:inherit}.ag{fill:inherit}.ah{font-size:inherit}.ai{border:inherit}.aj{font-family:inherit}.ak{letter-spacing:inherit}.al{font-weight:inherit}.am{padding:0}.an{margin:0}.ao{cursor:pointer}.ap:disabled{cursor:not-allowed}.aq:disabled{color:#6B6B6B}.ar:disabled{fill:#6B6B6B}.au{width:auto}.av path{fill:#242424}.aw{height:25px}.ax{margin-left:16px}.ay{border:none}.az{border-radius:20px}.ba{width:240px}.bb{background:#F9F9F9}.bc path{fill:#6B6B6B}.be{outline:none}.bf{font-family:sohne, "Helvetica Neue", Helvetica, Arial, sans-serif}.bg{font-size:14px}.bh{width:100%}.bi{padding:10px 20px 10px 0}.bj{background-color:transparent}.bk{color:#242424}.bl::placeholder{color:#6B6B6B}.bm{display:inline-block}.bn{margin-left:12px}.bo{margin-right:12px}.bp{border-radius:4px}.bq{margin-left:24px}.br{height:24px}.bx{background-color:#F9F9F9}.by{border-radius:50%}.bz{height:32px}.ca{width:32px}.cb{justify-content:center}.ch{max-width:680px}.ci{min-width:0}.cj{animation:k1 1.2s ease-in-out infinite}.ck{height:100vh}.cl{margin-bottom:16px}.cm{margin-top:48px}.cn{align-items:flex-start}.co{flex-direction:column}.cp{justify-content:space-between}.cq{margin-bottom:24px}.cw{width:80%}.cx{background-color:#F2F2F2}.dd{height:44px}.de{width:44px}.df{margin:auto 0}.dg{margin-bottom:4px}.dh{height:16px}.di{width:120px}.dj{width:80px}.dp{margin-bottom:8px}.dq{width:96%}.dr{width:98%}.ds{width:81%}.dt{margin-left:8px}.du{color:#6B6B6B}.dv{font-size:13px}.dw{height:100%}.ep{color:#FFFFFF}.eq{fill:#FFFFFF}.er{background:rgba(98, 139, 170, 1)}.es{border-color:rgba(98, 139, 170, 1)}.ew:disabled{cursor:inherit !important}.ex:disabled{opacity:0.3}.ey:disabled:hover{background:rgba(98, 139, 170, 1)}.ez:disabled:hover{border-color:rgba(98, 139, 170, 1)}.fa{border-radius:99em}.fb{border-width:1px}.fc{border-style:solid}.fd{box-sizing:border-box}.fe{text-decoration:none}.ff{text-align:center}.fi{margin-right:32px}.fj{position:relative}.fk{fill:#6B6B6B}.fn{background:transparent}.fo svg{margin-left:4px}.fp svg{fill:#6B6B6B}.fr{box-shadow:inset 0 0 0 1px rgba(0, 0, 0, 0.05)}.fs{position:absolute}.fz{margin:0 24px}.gd{background:rgba(255, 255, 255, 1)}.ge{border:1px solid #F2F2F2}.gf{box-shadow:0 1px 4px #F2F2F2}.gg{max-height:100vh}.gh{overflow-y:auto}.gi{left:0}.gj{top:calc(100vh + 100px)}.gk{bottom:calc(100vh + 100px)}.gl{width:10px}.gm{pointer-events:none}.gn{word-break:break-word}.go{word-wrap:break-word}.gp:after{display:block}.gq:after{content:""}.gr:after{clear:both}.gs{line-height:1.23}.gt{letter-spacing:0}.gu{font-style:normal}.gv{font-weight:700}.ia{align-items:baseline}.ib{width:48px}.ic{height:48px}.id{border:2px solid rgba(255, 255, 255, 1)}.ie{z-index:0}.if{box-shadow:none}.ig{border:1px solid rgba(0, 0, 0, 0.05)}.ih{margin-left:-12px}.ii{width:28px}.ij{height:28px}.ik{z-index:1}.il{width:24px}.im{margin-bottom:2px}.in{flex-wrap:nowrap}.io{font-size:16px}.ip{line-height:24px}.ir{margin:0 8px}.is{display:inline}.it{color:rgba(98, 139, 170, 1)}.iu{fill:rgba(98, 139, 170, 1)}.ix{flex:0 0 auto}.ja{flex-wrap:wrap}.jd{white-space:pre-wrap}.je{margin-right:4px}.jf{overflow:hidden}.jg{max-height:20px}.jh{text-overflow:ellipsis}.ji{display:-webkit-box}.jj{-webkit-line-clamp:1}.jk{-webkit-box-orient:vertical}.jl{word-break:break-all}.jn{padding-left:8px}.jo{padding-right:8px}.kp> *{flex-shrink:0}.kq{overflow-x:scroll}.kr::-webkit-scrollbar{display:none}.ks{scrollbar-width:none}.kt{-ms-overflow-style:none}.ku{width:74px}.kv{flex-direction:row}.kw{z-index:2}.kz{-webkit-user-select:none}.la{border:0}.lb{fill:rgba(117, 117, 117, 1)}.le{outline:0}.lf{user-select:none}.lg> svg{pointer-events:none}.lp{cursor:progress}.lq{opacity:1}.lr{padding:4px 0}.lu{margin-top:0px}.lv{width:16px}.lx{display:inline-flex}.md{max-width:100%}.me{padding:8px 2px}.mf svg{color:#6B6B6B}.mw{margin-left:auto}.mx{margin-right:auto}.my{max-width:1400px}.ne{clear:both}.ng{cursor:zoom-in}.nh{z-index:auto}.nj{height:auto}.nk{line-height:1.58}.nl{letter-spacing:-0.004em}.nm{font-family:source-serif-pro, Georgia, Cambria, "Times New Roman", Times, serif}.oh{margin-bottom:-0.46em}.oi{line-height:1.12}.oj{letter-spacing:-0.022em}.ok{font-weight:600}.pf{margin-bottom:-0.28em}.pl{text-decoration:underline}.pm{max-width:1282px}.ps{max-width:1298px}.pt{max-width:1293px}.pu{overflow-x:auto}.pv{font-family:source-code-pro, Menlo, Monaco, "Courier New", Courier, monospace}.pw{padding:32px}.px{border:1px solid #E5E5E5}.py{line-height:1.4}.pz{margin-top:-0.2em}.qa{margin-bottom:-0.2em}.qb{white-space:pre}.qc{min-width:fit-content}.qd{padding:2px 4px}.qe{font-size:75%}.qf> strong{font-family:inherit}.qg{list-style-type:disc}.qh{margin-left:30px}.qi{padding-left:0px}.qo{line-height:1.18}.rc{margin-bottom:-0.31em}.rd{margin-top:32px}.re{margin-bottom:14px}.rf{padding-top:24px}.rg{padding-bottom:10px}.rh{background-color:#000000}.ri{height:3px}.rj{width:3px}.rk{margin-right:20px}.rq{margin-bottom:26px}.rr{margin-top:6px}.rs{margin-top:8px}.rt{margin-right:8px}.ru{padding:8px 16px}.rv{border-radius:100px}.rw{transition:background 300ms ease}.ry{white-space:nowrap}.rz{border-top:none}.sa{height:52px}.sb{max-height:52px}.sc{box-sizing:content-box}.sd{position:static}.sf{max-width:155px}.sq{height:0px}.sr{margin-bottom:40px}.ss{margin-bottom:48px}.tg{border-radius:2px}.ti{height:64px}.tj{width:64px}.tk{align-self:flex-end}.tl{color:rgba(255, 255, 255, 1)}.tm{fill:rgba(255, 255, 255, 1)}.tn{background:rgba(25, 25, 25, 1)}.to{border-color:rgba(25, 25, 25, 1)}.tr:disabled{opacity:0.1}.ts:disabled:hover{background:rgba(25, 25, 25, 1)}.tt:disabled:hover{border-color:rgba(25, 25, 25, 1)}.tu{flex:1 1 auto}.ua{padding-right:4px}.ub{font-weight:500}.ui{margin-top:16px}.uj{margin-bottom:54px}.up{gap:18px}.uq{fill:rgba(61, 61, 61, 1)}.ux{border-bottom:solid 1px #E5E5E5}.uy{margin-top:72px}.uz{padding:24px 0}.va{margin-bottom:0px}.vb{margin-right:16px}.as:hover:not(:disabled){color:rgba(25, 25, 25, 1)}.at:hover:not(:disabled){fill:rgba(25, 25, 25, 1)}.et:hover{background:rgba(87, 119, 143, 1)}.eu:hover{border-color:rgba(87, 119, 143, 1)}.ev:hover{cursor:pointer}.fl:hover{color:#242424}.fm:hover{fill:#242424}.fq:hover svg{fill:#242424}.ft:hover{background-color:rgba(0, 0, 0, 0.1)}.iq:hover{text-decoration:underline}.iv:hover:not(:disabled){color:rgba(87, 119, 143, 1)}.iw:hover:not(:disabled){fill:rgba(87, 119, 143, 1)}.ld:hover{fill:rgba(8, 8, 8, 1)}.ls:hover{fill:#000000}.lt:hover p{color:#000000}.lw:hover{color:#000000}.mg:hover svg{color:#000000}.rx:hover{background-color:#F2F2F2}.th:hover{background-color:none}.tp:hover{background:#000000}.tq:hover{border-color:#242424}.ur:hover{fill:rgba(25, 25, 25, 1)}.bd:focus-within path{fill:#242424}.lc:focus{fill:rgba(8, 8, 8, 1)}.mh:focus svg{color:#000000}.ni:focus{transform:scale(1.01)}.lh:active{border-style:none}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE" media="all and (min-width: 1080px)">.d{display:none}.bw{width:64px}.cg{margin:0 64px}.cv{height:48px}.dc{margin-bottom:52px}.do{margin-bottom:48px}.ef{font-size:14px}.eg{line-height:20px}.em{font-size:13px}.eo{padding:5px 12px}.fh{display:flex}.fy{margin-bottom:68px}.gc{max-width:680px}.hq{font-size:42px}.hr{margin-top:1.19em}.hs{margin-bottom:32px}.ht{line-height:52px}.hu{letter-spacing:-0.011em}.hz{align-items:center}.kb{border-top:solid 1px #F2F2F2}.kc{border-bottom:solid 1px #F2F2F2}.kd{margin:32px 0 0}.ke{padding:3px 8px}.kn> *{margin-right:24px}.ko> :last-child{margin-right:0}.lo{margin-top:0px}.mc{margin:0}.nd{margin-top:40px}.od{font-size:20px}.oe{margin-top:2.14em}.of{line-height:32px}.og{letter-spacing:-0.003em}.pb{font-size:24px}.pc{margin-top:1.95em}.pd{line-height:30px}.pe{letter-spacing:-0.016em}.pk{margin-top:0.94em}.pr{margin-top:56px}.qn{margin-top:1.14em}.qz{margin-top:1.72em}.ra{line-height:24px}.rb{letter-spacing:0}.rp{margin-top:1.25em}.sk{display:inline-block}.sp{margin-bottom:104px}.st{flex-direction:row}.sw{margin-bottom:0}.sx{margin-right:20px}.tv{max-width:500px}.uo{margin-bottom:72px}.uw{padding-top:72px}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE" media="all and (max-width: 1079.98px)">.e{display:none}.ln{margin-top:0px}.sj{display:inline-block}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE" media="all and (max-width: 903.98px)">.f{display:none}.lm{margin-top:0px}.si{display:inline-block}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE" media="all and (max-width: 727.98px)">.g{display:none}.lk{margin-top:0px}.ll{margin-right:0px}.sh{display:inline-block}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE" media="all and (max-width: 551.98px)">.h{display:none}.s{display:flex}.t{justify-content:space-between}.bs{width:24px}.cc{margin:0 24px}.cr{height:40px}.cy{margin-bottom:44px}.dk{margin-bottom:32px}.dx{font-size:13px}.dy{line-height:20px}.eh{padding:0px 8px 1px}.fu{margin-bottom:4px}.gw{font-size:32px}.gx{margin-top:1.01em}.gy{margin-bottom:24px}.gz{line-height:38px}.ha{letter-spacing:-0.014em}.hv{align-items:flex-start}.iy{flex-direction:column}.jb{margin-bottom:2px}.jp{margin:24px -24px 0}.jq{padding:0}.kf> *{margin-right:8px}.kg> :last-child{margin-right:24px}.kx{margin-left:0px}.li{margin-top:0px}.lj{margin-right:0px}.ly{margin:0}.mi{border:1px solid #F2F2F2}.mj{border-radius:99em}.mk{padding:0px 16px 0px 12px}.ml{height:38px}.mm{align-items:center}.mo svg{margin-right:8px}.mz{margin-top:32px}.nn{font-size:18px}.no{margin-top:1.56em}.np{line-height:28px}.nq{letter-spacing:-0.003em}.ol{font-size:20px}.om{margin-top:1.2em}.on{line-height:24px}.oo{letter-spacing:0}.pg{margin-top:0.67em}.pn{margin-top:40px}.qj{margin-top:1.34em}.qp{font-size:16px}.qq{margin-top:1.23em}.rl{margin-top:0.93em}.sg{display:inline-block}.sl{margin-bottom:96px}.te{margin-bottom:20px}.tf{margin-right:0}.tz{max-width:100%}.uc{font-size:24px}.ud{line-height:30px}.ue{letter-spacing:-0.016em}.uk{margin-bottom:64px}.us{padding-top:48px}.mn:hover{border-color:#E5E5E5}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE" media="all and (min-width: 904px) and (max-width: 1079.98px)">.i{display:none}.bv{width:64px}.cf{margin:0 64px}.cu{height:48px}.db{margin-bottom:52px}.dn{margin-bottom:48px}.ed{font-size:14px}.ee{line-height:20px}.ek{font-size:13px}.el{padding:5px 12px}.fg{display:flex}.fx{margin-bottom:68px}.gb{max-width:680px}.hl{font-size:42px}.hm{margin-top:1.19em}.hn{margin-bottom:32px}.ho{line-height:52px}.hp{letter-spacing:-0.011em}.hy{align-items:center}.jx{border-top:solid 1px #F2F2F2}.jy{border-bottom:solid 1px #F2F2F2}.jz{margin:32px 0 0}.ka{padding:3px 8px}.kl> *{margin-right:24px}.km> :last-child{margin-right:0}.mb{margin:0}.nc{margin-top:40px}.nz{font-size:20px}.oa{margin-top:2.14em}.ob{line-height:32px}.oc{letter-spacing:-0.003em}.ox{font-size:24px}.oy{margin-top:1.95em}.oz{line-height:30px}.pa{letter-spacing:-0.016em}.pj{margin-top:0.94em}.pq{margin-top:56px}.qm{margin-top:1.14em}.qw{margin-top:1.72em}.qx{line-height:24px}.qy{letter-spacing:0}.ro{margin-top:1.25em}.so{margin-bottom:104px}.su{flex-direction:row}.sy{margin-bottom:0}.sz{margin-right:20px}.tw{max-width:500px}.un{margin-bottom:72px}.uv{padding-top:72px}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE" media="all and (min-width: 728px) and (max-width: 903.98px)">.j{display:none}.w{display:flex}.x{justify-content:space-between}.bu{width:64px}.ce{margin:0 48px}.ct{height:48px}.da{margin-bottom:52px}.dm{margin-bottom:48px}.eb{font-size:13px}.ec{line-height:20px}.ej{padding:0px 8px 1px}.fw{margin-bottom:68px}.ga{max-width:680px}.hg{font-size:42px}.hh{margin-top:1.19em}.hi{margin-bottom:32px}.hj{line-height:52px}.hk{letter-spacing:-0.011em}.hx{align-items:center}.jt{border-top:solid 1px #F2F2F2}.ju{border-bottom:solid 1px #F2F2F2}.jv{margin:32px 0 0}.jw{padding:3px 8px}.kj> *{margin-right:24px}.kk> :last-child{margin-right:0}.ma{margin:0}.nb{margin-top:40px}.nv{font-size:20px}.nw{margin-top:2.14em}.nx{line-height:32px}.ny{letter-spacing:-0.003em}.ot{font-size:24px}.ou{margin-top:1.95em}.ov{line-height:30px}.ow{letter-spacing:-0.016em}.pi{margin-top:0.94em}.pp{margin-top:56px}.ql{margin-top:1.14em}.qt{margin-top:1.72em}.qu{line-height:24px}.qv{letter-spacing:0}.rn{margin-top:1.25em}.sn{margin-bottom:104px}.sv{flex-direction:row}.ta{margin-bottom:0}.tb{margin-right:20px}.tx{max-width:500px}.um{margin-bottom:72px}.uu{padding-top:72px}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE" media="all and (min-width: 552px) and (max-width: 727.98px)">.k{display:none}.u{display:flex}.v{justify-content:space-between}.bt{width:24px}.cd{margin:0 24px}.cs{height:40px}.cz{margin-bottom:44px}.dl{margin-bottom:32px}.dz{font-size:13px}.ea{line-height:20px}.ei{padding:0px 8px 1px}.fv{margin-bottom:4px}.hb{font-size:32px}.hc{margin-top:1.01em}.hd{margin-bottom:24px}.he{line-height:38px}.hf{letter-spacing:-0.014em}.hw{align-items:flex-start}.iz{flex-direction:column}.jc{margin-bottom:2px}.jr{margin:24px 0 0}.js{padding:0}.kh> *{margin-right:8px}.ki> :last-child{margin-right:8px}.ky{margin-left:0px}.lz{margin:0}.mp{border:1px solid #F2F2F2}.mq{border-radius:99em}.mr{padding:0px 16px 0px 12px}.ms{height:38px}.mt{align-items:center}.mv svg{margin-right:8px}.na{margin-top:32px}.nr{font-size:18px}.ns{margin-top:1.56em}.nt{line-height:28px}.nu{letter-spacing:-0.003em}.op{font-size:20px}.oq{margin-top:1.2em}.or{line-height:24px}.os{letter-spacing:0}.ph{margin-top:0.67em}.po{margin-top:40px}.qk{margin-top:1.34em}.qr{font-size:16px}.qs{margin-top:1.23em}.rm{margin-top:0.93em}.sm{margin-bottom:96px}.tc{margin-bottom:20px}.td{margin-right:0}.ty{max-width:100%}.uf{font-size:24px}.ug{line-height:30px}.uh{letter-spacing:-0.016em}.ul{margin-bottom:64px}.ut{padding-top:48px}.mu:hover{border-color:#E5E5E5}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE" media="print">.se{display:none}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE" media="(orientation: landscape) and (max-width: 903.98px)">.jm{max-height:none}</style><style type="text/css" data-fela-rehydration="573" data-fela-type="RULE" media="(prefers-reduced-motion: no-preference)">.nf{transition:transform 300ms cubic-bezier(0.2, 0, 0.2, 1)}</style></head><body><div id="root"><div class="a b c"><div class="d e f g h i j k"></div><script>document.domain = document.domain;</script><div class="l c"><div class="l m n o c"><div class="p q r s t u v w x i d y z"><a class="du ag dv bf ak b am an ao ap aq ar as at s u w i d q dw z" href="https://rsci.app.link/?%24canonical_url=https%3A%2F%2Fmedium.com%2Fp%2F45a57cc5c38b&amp;%7Efeature=LoOpenInAppButton&amp;%7Echannel=ShowPostUnderCollection&amp;source=---top_nav_layout_nav----------------------------------" rel="noopener follow">Open in app<svg xmlns="http://www.w3.org/2000/svg" width="10" height="10" fill="none" viewBox="0 0 10 10" class="dt"><path fill="currentColor" d="M.985 8.485a.375.375 0 1 0 .53.53zM8.75 1.25h.375A.375.375 0 0 0 8.75.875zM8.375 6.5a.375.375 0 1 0 .75 0zM3.5.875a.375.375 0 1 0 0 .75zm-1.985 8.14 7.5-7.5-.53-.53-7.5 7.5zm6.86-7.765V6.5h.75V1.25zM3.5 1.625h5.25v-.75H3.5z"></path></svg></a><div class="ab q"><p class="bf b dx dy dz ea eb ec ed ee ef eg du"><span><a class="bf b dx dy eh dz ea ei eb ec ej ek ee el em eg eo ep eq er es et eu ev ew ex ey ez fa fb fc fd bm fe ff" data-testid="headerSignUpButton" href="https://medium.com/m/signin?operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;source=post_page---top_nav_layout_nav-----------------------global_nav-----------" rel="noopener follow">Sign up</a></span></p><div class="ax l"><p class="bf b dx dy dz ea eb ec ed ee ef eg du"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerSignInButton" href="https://medium.com/m/signin?operation=login&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;source=post_page---top_nav_layout_nav-----------------------global_nav-----------" rel="noopener follow">Sign in</a></span></p></div></div></div><div class="p q r ab ac"><div class="ab q ae"><a class="af ag ah ai aj ak al am an ao ap aq ar as at ab" aria-label="Homepage" data-testid="headerMediumLogo" href="https://medium.com/?source=---top_nav_layout_nav----------------------------------" rel="noopener follow"><svg xmlns="http://www.w3.org/2000/svg" width="719" height="160" fill="none" viewBox="0 0 719 160" class="au av aw"><path fill="#242424" d="m174.104 9.734.215-.047V8.02H130.39L89.6 103.89 48.81 8.021H1.472v1.666l.212.047c8.018 1.81 12.09 4.509 12.09 14.242V137.93c0 9.734-4.087 12.433-12.106 14.243l-.212.047v1.671h32.118v-1.665l-.213-.048c-8.018-1.809-12.089-4.509-12.089-14.242V30.586l52.399 123.305h2.972l53.925-126.743V140.75c-.687 7.688-4.721 10.062-11.982 11.701l-.215.05v1.652h55.948v-1.652l-.215-.05c-7.269-1.639-11.4-4.013-12.087-11.701l-.037-116.774h.037c0-9.733 4.071-12.432 12.087-14.242m25.555 75.488c.915-20.474 8.268-35.252 20.606-35.507 3.806.063 6.998 1.312 9.479 3.714 5.272 5.118 7.751 15.812 7.368 31.793zm-.553 5.77h65.573v-.275c-.186-15.656-4.721-27.834-13.466-36.196-7.559-7.227-18.751-11.203-30.507-11.203h-.263c-6.101 0-13.584 1.48-18.909 4.16-6.061 2.807-11.407 7.003-15.855 12.511-7.161 8.874-11.499 20.866-12.554 34.343q-.05.606-.092 1.212a50 50 0 0 0-.065 1.151 85.807 85.807 0 0 0-.094 5.689c.71 30.524 17.198 54.917 46.483 54.917 25.705 0 40.675-18.791 44.407-44.013l-1.886-.664c-6.557 13.556-18.334 21.771-31.738 20.769-18.297-1.369-32.314-19.922-31.042-42.395m139.722 41.359c-2.151 5.101-6.639 7.908-12.653 7.908s-11.513-4.129-15.418-11.63c-4.197-8.053-6.405-19.436-6.405-32.92 0-28.067 8.729-46.22 22.24-46.22 5.657 0 10.111 2.807 12.236 7.704zm43.499 20.008c-8.019-1.897-12.089-4.722-12.089-14.951V1.309l-48.716 14.353v1.757l.299-.024c6.72-.543 11.278.386 13.925 2.83 2.072 1.915 3.082 4.853 3.082 8.987v18.66c-4.803-3.067-10.516-4.56-17.448-4.56-14.059 0-26.909 5.92-36.176 16.672-9.66 11.205-14.767 26.518-14.767 44.278-.003 31.72 15.612 53.039 38.851 53.039 13.595 0 24.533-7.449 29.54-20.013v16.865h43.711v-1.746zM424.1 19.819c0-9.904-7.468-17.374-17.375-17.374-9.859 0-17.573 7.632-17.573 17.374s7.721 17.374 17.573 17.374c9.907 0 17.375-7.47 17.375-17.374m11.499 132.546c-8.019-1.897-12.089-4.722-12.089-14.951h-.035V43.635l-43.714 12.551v1.705l.263.024c9.458.842 12.047 4.1 12.047 15.152v81.086h43.751v-1.746zm112.013 0c-8.018-1.897-12.089-4.722-12.089-14.951V43.635l-41.621 12.137v1.71l.246.026c7.733.813 9.967 4.257 9.967 15.36v59.279c-2.578 5.102-7.415 8.131-13.274 8.336-9.503 0-14.736-6.419-14.736-18.073V43.638l-43.714 12.55v1.703l.262.024c9.459.84 12.05 4.097 12.05 15.152v50.17a56.3 56.3 0 0 0 .91 10.444l.787 3.423c3.701 13.262 13.398 20.197 28.59 20.197 12.868 0 24.147-7.966 29.115-20.43v17.311h43.714v-1.747zm169.818 1.788v-1.749l-.213-.05c-8.7-2.006-12.089-5.789-12.089-13.49v-63.79c0-19.89-11.171-31.761-29.883-31.761-13.64 0-25.141 7.882-29.569 20.16-3.517-13.01-13.639-20.16-28.606-20.16-13.146 0-23.449 6.938-27.869 18.657V43.643L545.487 55.68v1.715l.263.024c9.345.829 12.047 4.181 12.047 14.95v81.784h40.787v-1.746l-.215-.053c-6.941-1.631-9.181-4.606-9.181-12.239V66.998c1.836-4.289 5.537-9.37 12.853-9.37 9.086 0 13.692 6.296 13.692 18.697v77.828h40.797v-1.746l-.215-.053c-6.94-1.631-9.18-4.606-9.18-12.239V75.066a42 42 0 0 0-.578-7.26c1.947-4.661 5.86-10.177 13.475-10.177 9.214 0 13.691 6.114 13.691 18.696v77.828z"></path></svg></a><div class="ax h"><div class="ab ay az ba bb q bc bd"><div class="bm" aria-hidden="false" aria-describedby="searchResults" aria-labelledby="searchResults"></div><div class="bn bo ab"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24"><path fill="currentColor" fill-rule="evenodd" d="M4.092 11.06a6.95 6.95 0 1 1 13.9 0 6.95 6.95 0 0 1-13.9 0m6.95-8.05a8.05 8.05 0 1 0 5.13 14.26l3.75 3.75a.56.56 0 1 0 .79-.79l-3.73-3.73A8.05 8.05 0 0 0 11.042 3z" clip-rule="evenodd"></path></svg></div><input role="combobox" aria-controls="searchResults" aria-expanded="false" aria-label="search" data-testid="headerSearchInput" tabindex="0" class="ay be bf bg z bh bi bj bk bl" placeholder="Search" value=""/></div></div></div><div class="h k w fg fh"><div class="fi ab"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerWriteButton" href="https://medium.com/m/signin?operation=register&amp;redirect=https%3A%2F%2Fmedium.com%2Fnew-story&amp;source=---top_nav_layout_nav-----------------------new_post_topnav-----------" rel="noopener follow"><div class="bf b bg z du fj fk ab q fl fm"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24" aria-label="Write"><path fill="currentColor" d="M14 4a.5.5 0 0 0 0-1zm7 6a.5.5 0 0 0-1 0zm-7-7H4v1h10zM3 4v16h1V4zm1 17h16v-1H4zm17-1V10h-1v10zm-1 1a1 1 0 0 0 1-1h-1zM3 20a1 1 0 0 0 1 1v-1zM4 3a1 1 0 0 0-1 1h1z"></path><path stroke="currentColor" d="m17.5 4.5-8.458 8.458a.25.25 0 0 0-.06.098l-.824 2.47a.25.25 0 0 0 .316.316l2.47-.823a.25.25 0 0 0 .098-.06L19.5 6.5m-2-2 2.323-2.323a.25.25 0 0 1 .354 0l1.646 1.646a.25.25 0 0 1 0 .354L19.5 6.5m-2-2 2 2"></path></svg><div class="dt l">Write</div></div></a></span></div></div><div class="k j i d"><div class="fi ab"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerSearchButton" href="https://medium.com/search?source=---top_nav_layout_nav----------------------------------" rel="noopener follow"><div class="bf b bg z du fj fk ab q fl fm"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24" aria-label="Search"><path fill="currentColor" fill-rule="evenodd" d="M4.092 11.06a6.95 6.95 0 1 1 13.9 0 6.95 6.95 0 0 1-13.9 0m6.95-8.05a8.05 8.05 0 1 0 5.13 14.26l3.75 3.75a.56.56 0 1 0 .79-.79l-3.73-3.73A8.05 8.05 0 0 0 11.042 3z" clip-rule="evenodd"></path></svg></div></a></div></div><div class="fi h k j"><div class="ab q"><p class="bf b dx dy dz ea eb ec ed ee ef eg du"><span><a class="bf b dx dy eh dz ea ei eb ec ej ek ee el em eg eo ep eq er es et eu ev ew ex ey ez fa fb fc fd bm fe ff" data-testid="headerSignUpButton" href="https://medium.com/m/signin?operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;source=post_page---top_nav_layout_nav-----------------------global_nav-----------" rel="noopener follow">Sign up</a></span></p><div class="ax l"><p class="bf b dx dy dz ea eb ec ed ee ef eg du"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerSignInButton" href="https://medium.com/m/signin?operation=login&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;source=post_page---top_nav_layout_nav-----------------------global_nav-----------" rel="noopener follow">Sign in</a></span></p></div></div></div><div class="l" aria-hidden="false"><button class="ay fn am ab q ao fo fp fq" aria-label="user options menu" data-testid="headerUserIcon"><div class="l fj"><img alt="" class="l fd by bz ca cx" src="https://miro.medium.com/v2/resize:fill:64:64/1*dmbNkD5D-u45r44go_cf0g.png" width="32" height="32" loading="lazy" role="presentation"/><div class="fr by l bz ca fs n ay ft"></div></div></button></div></div></div><div class="l"><div class="fu fv fw fx fy l"><div class="ab cb"><div class="ci bh fz ga gb gc"></div></div><article><div class="l"><div class="l"><span class="l"></span><section><div><div class="fs gi gj gk gl gm"></div><div class="gn go gp gq gr"><div class="ab cb"><div class="ci bh fz ga gb gc"><div><h1 id="8f29" class="pw-post-title gs gt gu bf gv gw gx gy gz ha hb hc hd he hf hg hh hi hj hk hl hm hn ho hp hq hr hs ht hu bk" data-testid="storyTitle">Microsoft GraphRAG and Ollama: Code Your Way to Smarter Question Answering</h1><div><div class="speechify-ignore ab cp"><div class="speechify-ignore bh l"><div class="hv hw hx hy hz ab"><div><div class="ab ia"><div><div class="bm" aria-hidden="false"><a href="https://medium.com/@ayoubkirouane3?source=post_page---byline--45a57cc5c38b--------------------------------" rel="noopener follow"><div class="l ib ic by id ie"><div class="l fj"><img alt="kirouane Ayoub" class="l fd by dd de cx" src="https://miro.medium.com/v2/resize:fill:88:88/1*T-KWhmfASlLM3XMvRKZnWA.jpeg" width="44" height="44" loading="lazy" data-testid="authorPhoto"/><div class="if by l dd de fs n ig ft"></div></div></div></a></div></div><div class="ih ab fj"><div><div class="bm" aria-hidden="false"><a href="https://blog.gopenai.com/?source=post_page---byline--45a57cc5c38b--------------------------------" rel="noopener ugc nofollow"><div class="l ii ij by id ik"><div class="l fj"><img alt="GoPenAI" class="l fd by br il cx" src="https://miro.medium.com/v2/resize:fill:48:48/1*LUSEiP1BHPkkmH75e8eg_A.png" width="24" height="24" loading="lazy" data-testid="publicationPhoto"/><div class="if by l br il fs n ig ft"></div></div></div></a></div></div></div></div></div><div class="bn bh l"><div class="ab"><div style="flex:1"><span class="bf b bg z bk"><div class="im ab q"><div class="ab q in"><div class="ab q"><div><div class="bm" aria-hidden="false"><p class="bf b io ip bk"><a class="af ag ah ai aj ak al am an ao ap aq ar iq" data-testid="authorName" href="https://medium.com/@ayoubkirouane3?source=post_page---byline--45a57cc5c38b--------------------------------" rel="noopener follow">kirouane Ayoub</a></p></div></div></div><span class="ir is" aria-hidden="true"><span class="bf b bg z du">·</span></span><p class="bf b io ip du"><span><a class="it iu ah ai aj ak al am an ao ap aq ar ex iv iw" href="https://medium.com/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fsubscribe%2Fuser%2F4751fd7878c5&amp;operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;user=kirouane+Ayoub&amp;userId=4751fd7878c5&amp;source=post_page-4751fd7878c5--byline--45a57cc5c38b---------------------post_header-----------" rel="noopener follow">Follow</a></span></p></div></div></span></div></div><div class="l ix"><span class="bf b bg z du"><div class="ab cn iy iz ja"><div class="jb jc ab"><div class="bf b bg z du ab jd"><span class="je l ix">Published in</span><div><div class="l" aria-hidden="false"><a class="af ag ah ai aj ak al am an ao ap aq ar iq ab q" data-testid="publicationName" href="https://blog.gopenai.com/?source=post_page---byline--45a57cc5c38b--------------------------------" rel="noopener ugc nofollow"><p class="bf b bg z jf jg jh ji jj jk jl jm bk">GoPenAI</p></a></div></div></div><div class="h k"><span class="ir is" aria-hidden="true"><span class="bf b bg z du">·</span></span></div></div><span class="bf b bg z du"><div class="ab ae"><span data-testid="storyReadTime">15 min read</span><div class="jn jo l" aria-hidden="true"><span class="l" aria-hidden="true"><span class="bf b bg z du">·</span></span></div><span data-testid="storyPublishDate">Aug 29, 2024</span></div></span></div></span></div></div></div><div class="ab cp jp jq jr js jt ju jv jw jx jy jz ka kb kc kd ke"><div class="h k w fg fh q"><div class="ku l"><div class="ab q kv kw"><div class="pw-multi-vote-icon fj je kx ky kz"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerClapButton" href="https://medium.com/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fvote%2Fgopenai%2F45a57cc5c38b&amp;operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;user=kirouane+Ayoub&amp;userId=4751fd7878c5&amp;source=---header_actions--45a57cc5c38b---------------------clap_footer-----------" rel="noopener follow"><div><div class="bm" aria-hidden="false"><div class="la ao lb lc ld le am lf lg lh kz"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" aria-label="clap"><path fill-rule="evenodd" d="M11.37.828 12 3.282l.63-2.454zM13.916 3.953l1.523-2.112-1.184-.39zM8.589 1.84l1.522 2.112-.337-2.501zM18.523 18.92c-.86.86-1.75 1.246-2.62 1.33a6 6 0 0 0 .407-.372c2.388-2.389 2.86-4.951 1.399-7.623l-.912-1.603-.79-1.672c-.26-.56-.194-.98.203-1.288a.7.7 0 0 1 .546-.132c.283.046.546.231.728.5l2.363 4.157c.976 1.624 1.141 4.237-1.324 6.702m-10.999-.438L3.37 14.328a.828.828 0 0 1 .585-1.408.83.83 0 0 1 .585.242l2.158 2.157a.365.365 0 0 0 .516-.516l-2.157-2.158-1.449-1.449a.826.826 0 0 1 1.167-1.17l3.438 3.44a.363.363 0 0 0 .516 0 .364.364 0 0 0 0-.516L5.293 9.513l-.97-.97a.826.826 0 0 1 0-1.166.84.84 0 0 1 1.167 0l.97.968 3.437 3.436a.36.36 0 0 0 .517 0 .366.366 0 0 0 0-.516L6.977 7.83a.82.82 0 0 1-.241-.584.82.82 0 0 1 .824-.826c.219 0 .43.087.584.242l5.787 5.787a.366.366 0 0 0 .587-.415l-1.117-2.363c-.26-.56-.194-.98.204-1.289a.7.7 0 0 1 .546-.132c.283.046.545.232.727.501l2.193 3.86c1.302 2.38.883 4.59-1.277 6.75-1.156 1.156-2.602 1.627-4.19 1.367-1.418-.236-2.866-1.033-4.079-2.246M10.75 5.971l2.12 2.12c-.41.502-.465 1.17-.128 1.89l.22.465-3.523-3.523a.8.8 0 0 1-.097-.368c0-.22.086-.428.241-.584a.847.847 0 0 1 1.167 0m7.355 1.705c-.31-.461-.746-.758-1.23-.837a1.44 1.44 0 0 0-1.11.275c-.312.24-.505.543-.59.881a1.74 1.74 0 0 0-.906-.465 1.47 1.47 0 0 0-.82.106l-2.182-2.182a1.56 1.56 0 0 0-2.2 0 1.54 1.54 0 0 0-.396.701 1.56 1.56 0 0 0-2.21-.01 1.55 1.55 0 0 0-.416.753c-.624-.624-1.649-.624-2.237-.037a1.557 1.557 0 0 0 0 2.2c-.239.1-.501.238-.715.453a1.56 1.56 0 0 0 0 2.2l.516.515a1.556 1.556 0 0 0-.753 2.615L7.01 19c1.32 1.319 2.909 2.189 4.475 2.449q.482.08.971.08c.85 0 1.653-.198 2.393-.579.231.033.46.054.686.054 1.266 0 2.457-.52 3.505-1.567 2.763-2.763 2.552-5.734 1.439-7.586z" clip-rule="evenodd"></path></svg></div></div></div></a></span></div><div class="pw-multi-vote-count l li lj lk ll lm ln lo"><p class="bf b dv z du"><span class="lp">--</span></p></div></div></div><div><div class="bm" aria-hidden="false"><button class="ao la lq lr ab q fk ls lt" aria-label="responses"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" class="lu"><path d="M18.006 16.803c1.533-1.456 2.234-3.325 2.234-5.321C20.24 7.357 16.709 4 12.191 4S4 7.357 4 11.482c0 4.126 3.674 7.482 8.191 7.482.817 0 1.622-.111 2.393-.327.231.2.48.391.744.559 1.06.693 2.203 1.044 3.399 1.044.224-.008.4-.112.486-.287a.49.49 0 0 0-.042-.518c-.495-.67-.845-1.364-1.04-2.057a4 4 0 0 1-.125-.598zm-3.122 1.055-.067-.223-.315.096a8 8 0 0 1-2.311.338c-4.023 0-7.292-2.955-7.292-6.587 0-3.633 3.269-6.588 7.292-6.588 4.014 0 7.112 2.958 7.112 6.593 0 1.794-.608 3.469-2.027 4.72l-.195.168v.255c0 .056 0 .151.016.295.025.231.081.478.154.733.154.558.398 1.117.722 1.659a5.3 5.3 0 0 1-2.165-.845c-.276-.176-.714-.383-.941-.59z"></path></svg></button></div></div></div><div class="ab q kf kg kh ki kj kk kl km kn ko kp kq kr ks kt"><div class="lv k j i d"></div><div class="h k"><div><div class="bm" aria-hidden="false"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerBookmarkButton" href="https://medium.com/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fbookmark%2Fp%2F45a57cc5c38b&amp;operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;source=---header_actions--45a57cc5c38b---------------------bookmark_footer-----------" rel="noopener follow"><svg xmlns="http://www.w3.org/2000/svg" width="25" height="25" fill="none" viewBox="0 0 25 25" class="du lw" aria-label="Add to list bookmark button"><path fill="currentColor" d="M18 2.5a.5.5 0 0 1 1 0V5h2.5a.5.5 0 0 1 0 1H19v2.5a.5.5 0 1 1-1 0V6h-2.5a.5.5 0 0 1 0-1H18zM7 7a1 1 0 0 1 1-1h3.5a.5.5 0 0 0 0-1H8a2 2 0 0 0-2 2v14a.5.5 0 0 0 .805.396L12.5 17l5.695 4.396A.5.5 0 0 0 19 21v-8.5a.5.5 0 0 0-1 0v7.485l-5.195-4.012a.5.5 0 0 0-.61 0L7 19.985z"></path></svg></a></span></div></div></div><div class="fd lx cn"><div class="l ae"><div class="ab cb"><div class="ly lz ma mb mc md ci bh"><div class="ab"><div class="bm bh" aria-hidden="false"><div><div class="bm" aria-hidden="false"><button aria-label="Listen" data-testid="audioPlayButton" class="af fk ah ai aj ak al me an ao ap ex mf mg lt mh mi mj mk ml s mm mn mo mp mq mr ms u mt mu mv"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24"><path fill="currentColor" fill-rule="evenodd" d="M3 12a9 9 0 1 1 18 0 9 9 0 0 1-18 0m9-10C6.477 2 2 6.477 2 12s4.477 10 10 10 10-4.477 10-10S17.523 2 12 2m3.376 10.416-4.599 3.066a.5.5 0 0 1-.777-.416V8.934a.5.5 0 0 1 .777-.416l4.599 3.066a.5.5 0 0 1 0 .832" clip-rule="evenodd"></path></svg><div class="j i d"><p class="bf b bg z du">Listen</p></div></button></div></div></div></div></div></div></div></div><div class="bm" aria-hidden="false" aria-describedby="postFooterSocialMenu" aria-labelledby="postFooterSocialMenu"><div><div class="bm" aria-hidden="false"><button aria-controls="postFooterSocialMenu" aria-expanded="false" aria-label="Share Post" data-testid="headerSocialShareButton" class="af fk ah ai aj ak al me an ao ap ex mf mg lt mh mi mj mk ml s mm mn mo mp mq mr ms u mt mu mv"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24"><path fill="currentColor" fill-rule="evenodd" d="M15.218 4.931a.4.4 0 0 1-.118.132l.012.006a.45.45 0 0 1-.292.074.5.5 0 0 1-.3-.13l-2.02-2.02v7.07c0 .28-.23.5-.5.5s-.5-.22-.5-.5v-7.04l-2 2a.45.45 0 0 1-.57.04h-.02a.4.4 0 0 1-.16-.3.4.4 0 0 1 .1-.32l2.8-2.8a.5.5 0 0 1 .7 0l2.8 2.79a.42.42 0 0 1 .068.498m-.106.138.008.004v-.01zM16 7.063h1.5a2 2 0 0 1 2 2v10a2 2 0 0 1-2 2h-11c-1.1 0-2-.9-2-2v-10a2 2 0 0 1 2-2H8a.5.5 0 0 1 .35.15.5.5 0 0 1 .15.35.5.5 0 0 1-.15.35.5.5 0 0 1-.35.15H6.4c-.5 0-.9.4-.9.9v10.2a.9.9 0 0 0 .9.9h11.2c.5 0 .9-.4.9-.9v-10.2c0-.5-.4-.9-.9-.9H16a.5.5 0 0 1 0-1" clip-rule="evenodd"></path></svg><div class="j i d"><p class="bf b bg z du">Share</p></div></button></div></div></div></div></div></div></div></div></div><figure class="mz na nb nc nd ne mw mx paragraph-image"><div role="button" tabindex="0" class="nf ng fj nh bh ni"><div class="mw mx my"><picture><source srcSet="https://miro.medium.com/v2/resize:fit:640/format:webp/1*ocprwBCYr-Vt9AHAhMaZuA.png 640w, https://miro.medium.com/v2/resize:fit:720/format:webp/1*ocprwBCYr-Vt9AHAhMaZuA.png 720w, https://miro.medium.com/v2/resize:fit:750/format:webp/1*ocprwBCYr-Vt9AHAhMaZuA.png 750w, https://miro.medium.com/v2/resize:fit:786/format:webp/1*ocprwBCYr-Vt9AHAhMaZuA.png 786w, https://miro.medium.com/v2/resize:fit:828/format:webp/1*ocprwBCYr-Vt9AHAhMaZuA.png 828w, https://miro.medium.com/v2/resize:fit:1100/format:webp/1*ocprwBCYr-Vt9AHAhMaZuA.png 1100w, https://miro.medium.com/v2/resize:fit:1400/format:webp/1*ocprwBCYr-Vt9AHAhMaZuA.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px" type="image/webp"/><source data-testid="og" srcSet="https://miro.medium.com/v2/resize:fit:640/1*ocprwBCYr-Vt9AHAhMaZuA.png 640w, https://miro.medium.com/v2/resize:fit:720/1*ocprwBCYr-Vt9AHAhMaZuA.png 720w, https://miro.medium.com/v2/resize:fit:750/1*ocprwBCYr-Vt9AHAhMaZuA.png 750w, https://miro.medium.com/v2/resize:fit:786/1*ocprwBCYr-Vt9AHAhMaZuA.png 786w, https://miro.medium.com/v2/resize:fit:828/1*ocprwBCYr-Vt9AHAhMaZuA.png 828w, https://miro.medium.com/v2/resize:fit:1100/1*ocprwBCYr-Vt9AHAhMaZuA.png 1100w, https://miro.medium.com/v2/resize:fit:1400/1*ocprwBCYr-Vt9AHAhMaZuA.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px"/><img alt="" class="bh md nj c" width="700" height="361" loading="eager" role="presentation"/></picture></div></div></figure><p id="fcd5" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">Traditional RAG methods, which primarily rely on semantic similarity search, often fall short when faced with complex questions that require connecting disparate pieces of information or understanding the broader context of a large dataset. Enter GraphRAG, a novel approach that leverages the power of knowledge graphs to overcome these limitations and enhance the capabilities of RAG systems.</p><h1 id="ef20" class="oi oj gu bf ok ol om on oo op oq or os ot ou ov ow ox oy oz pa pb pc pd pe pf bk">Understanding the Problem with Baseline RAG</h1><p id="ebe2" class="pw-post-body-paragraph nk nl gu nm b nn pg np nq nr ph nt nu nv pi nx ny nz pj ob oc od pk of og oh gn bk">Baseline RAG systems, while useful for simple question answering, struggle when tasked with synthesizing information from various sources or understanding the overarching themes within a dataset. For example, if you ask a baseline RAG system “What are the main causes of climate change according to this research dataset?”, it might struggle to provide a comprehensive answer because it lacks the ability to connect the different pieces of information related to climate change scattered throughout the dataset. This highlights the need for a more structured and intelligent approach to RAG.</p><h1 id="f4c6" class="oi oj gu bf ok ol om on oo op oq or os ot ou ov ow ox oy oz pa pb pc pd pe pf bk">A Knowledge Graph-Powered Solution</h1><p id="ecbf" class="pw-post-body-paragraph nk nl gu nm b nn pg np nq nr ph nt nu nv pi nx ny nz pj ob oc od pk of og oh gn bk"><a class="af pl" href="https://arxiv.org/pdf/2404.16130" rel="noopener ugc nofollow" target="_blank">GraphRAG</a> addresses this need by utilizing LLMs to extract a knowledge graph from the raw text data. This knowledge graph represents the information as a network of interconnected entities and relationships, providing a richer representation of the data compared to simple text snippets. This structured representation allows GraphRAG to excel at answering complex questions that require reasoning and connecting different pieces of information.</p><h1 id="ddf3" class="oi oj gu bf ok ol om on oo op oq or os ot ou ov ow ox oy oz pa pb pc pd pe pf bk">GraphRAG Architecture</h1><p id="38b5" class="pw-post-body-paragraph nk nl gu nm b nn pg np nq nr ph nt nu nv pi nx ny nz pj ob oc od pk of og oh gn bk">GraphRAG’s architecture consists of several key components. The GraphRAG Knowledge Model defines a standardized data model for representing entities like documents, TextUnits, entities, relationships, and community reports. DataShaper Workflows, built on the DataShaper library, enable declarative data processing, making the pipeline flexible and customizable. LLM-Based Workflow Steps integrate LLMs into the indexing process, using custom verbs to perform tasks like entity extraction and summarization.</p><h1 id="8589" class="oi oj gu bf ok ol om on oo op oq or os ot ou ov ow ox oy oz pa pb pc pd pe pf bk">Deep Dive into the GraphRAG Process</h1><p id="71f1" class="pw-post-body-paragraph nk nl gu nm b nn pg np nq nr ph nt nu nv pi nx ny nz pj ob oc od pk of og oh gn bk">The GraphRAG process involves two main stages: <strong class="nm gv">indexing</strong> and <strong class="nm gv">querying</strong>.</p><p id="d111" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">Indexing:</strong></p><p id="b2ab" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">During indexing, the input text is divided into manageable chunks called <strong class="nm gv">TextUnits</strong>. LLMs then extract <strong class="nm gv">entities</strong>, <strong class="nm gv">relationships</strong>, and claims from these TextUnits, forming the knowledge graph.</p><figure class="pn po pp pq pr ne mw mx paragraph-image"><div role="button" tabindex="0" class="nf ng fj nh bh ni"><div class="mw mx pm"><picture><source srcSet="https://miro.medium.com/v2/resize:fit:640/format:webp/1*TQtCLUNtUN5lsaQykS2URg.png 640w, https://miro.medium.com/v2/resize:fit:720/format:webp/1*TQtCLUNtUN5lsaQykS2URg.png 720w, https://miro.medium.com/v2/resize:fit:750/format:webp/1*TQtCLUNtUN5lsaQykS2URg.png 750w, https://miro.medium.com/v2/resize:fit:786/format:webp/1*TQtCLUNtUN5lsaQykS2URg.png 786w, https://miro.medium.com/v2/resize:fit:828/format:webp/1*TQtCLUNtUN5lsaQykS2URg.png 828w, https://miro.medium.com/v2/resize:fit:1100/format:webp/1*TQtCLUNtUN5lsaQykS2URg.png 1100w, https://miro.medium.com/v2/resize:fit:1400/format:webp/1*TQtCLUNtUN5lsaQykS2URg.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px" type="image/webp"/><source data-testid="og" srcSet="https://miro.medium.com/v2/resize:fit:640/1*TQtCLUNtUN5lsaQykS2URg.png 640w, https://miro.medium.com/v2/resize:fit:720/1*TQtCLUNtUN5lsaQykS2URg.png 720w, https://miro.medium.com/v2/resize:fit:750/1*TQtCLUNtUN5lsaQykS2URg.png 750w, https://miro.medium.com/v2/resize:fit:786/1*TQtCLUNtUN5lsaQykS2URg.png 786w, https://miro.medium.com/v2/resize:fit:828/1*TQtCLUNtUN5lsaQykS2URg.png 828w, https://miro.medium.com/v2/resize:fit:1100/1*TQtCLUNtUN5lsaQykS2URg.png 1100w, https://miro.medium.com/v2/resize:fit:1400/1*TQtCLUNtUN5lsaQykS2URg.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px"/><img alt="" class="bh md nj c" width="700" height="394" loading="lazy" role="presentation"/></picture></div></div></figure><p id="62f8" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">Furthermore, a process called community detection identifies clusters of related entities, and summaries are generated for each community, providing high-level overviews of different topics within the dataset.</p><p id="632f" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">Querying:</strong></p><p id="b294" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">When a user submits a query, GraphRAG leverages the knowledge graph to retrieve relevant information. It offers two main search methods: <strong class="nm gv">Local Search</strong> and <strong class="nm gv">Global Search</strong>.</p><p id="e938" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">Local Search</strong> focuses on answering questions about specific entities, exploring their relationships, associated claims, and relevant text snippets.</p><figure class="pn po pp pq pr ne mw mx paragraph-image"><div role="button" tabindex="0" class="nf ng fj nh bh ni"><div class="mw mx ps"><picture><source srcSet="https://miro.medium.com/v2/resize:fit:640/format:webp/1*VPtk1QMLsIMMU_JdLHXeuQ.png 640w, https://miro.medium.com/v2/resize:fit:720/format:webp/1*VPtk1QMLsIMMU_JdLHXeuQ.png 720w, https://miro.medium.com/v2/resize:fit:750/format:webp/1*VPtk1QMLsIMMU_JdLHXeuQ.png 750w, https://miro.medium.com/v2/resize:fit:786/format:webp/1*VPtk1QMLsIMMU_JdLHXeuQ.png 786w, https://miro.medium.com/v2/resize:fit:828/format:webp/1*VPtk1QMLsIMMU_JdLHXeuQ.png 828w, https://miro.medium.com/v2/resize:fit:1100/format:webp/1*VPtk1QMLsIMMU_JdLHXeuQ.png 1100w, https://miro.medium.com/v2/resize:fit:1400/format:webp/1*VPtk1QMLsIMMU_JdLHXeuQ.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px" type="image/webp"/><source data-testid="og" srcSet="https://miro.medium.com/v2/resize:fit:640/1*VPtk1QMLsIMMU_JdLHXeuQ.png 640w, https://miro.medium.com/v2/resize:fit:720/1*VPtk1QMLsIMMU_JdLHXeuQ.png 720w, https://miro.medium.com/v2/resize:fit:750/1*VPtk1QMLsIMMU_JdLHXeuQ.png 750w, https://miro.medium.com/v2/resize:fit:786/1*VPtk1QMLsIMMU_JdLHXeuQ.png 786w, https://miro.medium.com/v2/resize:fit:828/1*VPtk1QMLsIMMU_JdLHXeuQ.png 828w, https://miro.medium.com/v2/resize:fit:1100/1*VPtk1QMLsIMMU_JdLHXeuQ.png 1100w, https://miro.medium.com/v2/resize:fit:1400/1*VPtk1QMLsIMMU_JdLHXeuQ.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px"/><img alt="" class="bh md nj c" width="700" height="338" loading="lazy" role="presentation"/></picture></div></div></figure><p id="6896" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">Global Search</strong>, on the other hand, tackles broader questions that require understanding the entire dataset. It analyzes the community summaries to identify overarching themes and synthesize information from across the dataset.</p><figure class="pn po pp pq pr ne mw mx paragraph-image"><div role="button" tabindex="0" class="nf ng fj nh bh ni"><div class="mw mx pt"><picture><source srcSet="https://miro.medium.com/v2/resize:fit:640/format:webp/1*bDdX_uEKJETUwgPBjKx5DQ.png 640w, https://miro.medium.com/v2/resize:fit:720/format:webp/1*bDdX_uEKJETUwgPBjKx5DQ.png 720w, https://miro.medium.com/v2/resize:fit:750/format:webp/1*bDdX_uEKJETUwgPBjKx5DQ.png 750w, https://miro.medium.com/v2/resize:fit:786/format:webp/1*bDdX_uEKJETUwgPBjKx5DQ.png 786w, https://miro.medium.com/v2/resize:fit:828/format:webp/1*bDdX_uEKJETUwgPBjKx5DQ.png 828w, https://miro.medium.com/v2/resize:fit:1100/format:webp/1*bDdX_uEKJETUwgPBjKx5DQ.png 1100w, https://miro.medium.com/v2/resize:fit:1400/format:webp/1*bDdX_uEKJETUwgPBjKx5DQ.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px" type="image/webp"/><source data-testid="og" srcSet="https://miro.medium.com/v2/resize:fit:640/1*bDdX_uEKJETUwgPBjKx5DQ.png 640w, https://miro.medium.com/v2/resize:fit:720/1*bDdX_uEKJETUwgPBjKx5DQ.png 720w, https://miro.medium.com/v2/resize:fit:750/1*bDdX_uEKJETUwgPBjKx5DQ.png 750w, https://miro.medium.com/v2/resize:fit:786/1*bDdX_uEKJETUwgPBjKx5DQ.png 786w, https://miro.medium.com/v2/resize:fit:828/1*bDdX_uEKJETUwgPBjKx5DQ.png 828w, https://miro.medium.com/v2/resize:fit:1100/1*bDdX_uEKJETUwgPBjKx5DQ.png 1100w, https://miro.medium.com/v2/resize:fit:1400/1*bDdX_uEKJETUwgPBjKx5DQ.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px"/><img alt="" class="bh md nj c" width="700" height="242" loading="lazy" role="presentation"/></picture></div></div></figure><h1 id="9fcf" class="oi oj gu bf ok ol om on oo op oq or os ot ou ov ow ox oy oz pa pb pc pd pe pf bk">Benefits and Advantages of GraphRAG</h1><p id="2f49" class="pw-post-body-paragraph nk nl gu nm b nn pg np nq nr ph nt nu nv pi nx ny nz pj ob oc od pk of og oh gn bk">By utilizing a knowledge graph, GraphRAG offers significant advantages over baseline RAG. It enhances reasoning capabilities by enabling the system to connect disparate pieces of information and synthesize new insights. It provides a holistic understanding of the dataset by organizing information into meaningful clusters and providing summaries for each cluster. Moreover, it improves overall RAG performance, particularly in complex question answering scenarios.</p><h1 id="0751" class="oi oj gu bf ok ol om on oo op oq or os ot ou ov ow ox oy oz pa pb pc pd pe pf bk">Applications and Use Cases</h1><p id="8c57" class="pw-post-body-paragraph nk nl gu nm b nn pg np nq nr ph nt nu nv pi nx ny nz pj ob oc od pk of og oh gn bk">The applications of GraphRAG are vast and span various domains. In research, it can help answer complex questions by synthesizing information from large datasets of scientific papers. In enterprise settings, it can power conversational AI systems that can reason about specific domains, such as customer support or internal knowledge bases. Furthermore, GraphRAG can be used to create knowledge exploration tools that facilitate deeper understanding of large datasets, enabling users to interactively explore the relationships between different concepts and discover new insights.</p><h1 id="36c9" class="oi oj gu bf ok ol om on oo op oq or os ot ou ov ow ox oy oz pa pb pc pd pe pf bk">Code Example</h1><p id="e544" class="pw-post-body-paragraph nk nl gu nm b nn pg np nq nr ph nt nu nv pi nx ny nz pj ob oc od pk of og oh gn bk">We begin by installing Ollama, a tool for running large language models locally, and starting its server.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="a81b" class="py oj gu pv b bg pz qa l qb qc">curl -fsSL https://ollama.com/install.sh | sh</span></pre><p id="cff1" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">Start the server :</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="2462" class="py oj gu pv b bg pz qa l qb qc">ollama serve</span></pre><p id="f9c4" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We then download the <code class="cx qd qe qf pv b">llama3.1</code> model for text generation and <code class="cx qd qe qf pv b">bge-large</code> for embeddings.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="99e1" class="py oj gu pv b bg pz qa l qb qc">ollama pull llama3.1<br/>ollama pull bge-large</span></pre><p id="83d0" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">You can use ollama using Docker :</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="d699" class="py oj gu pv b bg pz qa l qb qc">docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama<br/>docker exec -it ollama ollama pull llama3.1<br/>docker exec -it ollama ollama pull bge-large</span></pre><p id="cc02" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">Next, we install the GraphRAG library itself.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="c27d" class="py oj gu pv b bg pz qa l qb qc">pip install graphrag</span></pre><p id="0b6b" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We create a directory for our project and a subdirectory for input data.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="cde3" class="py oj gu pv b bg pz qa l qb qc">mkdir -p ./rag_graph/input</span></pre><p id="dedd" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">A sample story is saved as a text file within the input folder.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="e036" class="py oj gu pv b bg pz qa l qb qc">text = &quot;&quot;&quot;<br/>In the city of Novus, a renowned architect named Alice Johnson was busy working on her latest project. Alice had been designing buildings for over 15 years and was well-known for her collaboration with her mentor, Robert Lee, who was also a famous architect. Robert had taught Alice everything she knew, and they remained close friends.<br/><br/>Alice was married to David Johnson, a software engineer who worked at TechCorp. David was passionate about his work and often collaborated with his colleague, Emily Smith, a data scientist at TechCorp. Emily was also Alice’s best friend from college, where they studied together. She frequently visited Alice and David’s home, and they often discussed their work over dinner.<br/><br/>Alice and David had a daughter, Sophie Johnson, who was 8 years old and loved spending time with her grandparents, John and Mary Johnson. John was David’s father, a retired professor, and Mary was a retired nurse. They lived in a neighboring town called Greenville and visited their family in Novus every weekend.<br/><br/>One day, Alice received an invitation from the Novus City Council to present her latest building design. She was excited to showcase her work and immediately contacted Robert Lee to review her plans. Robert was delighted to help, as he had always admired Alice’s talent. Meanwhile, David was busy at TechCorp, where he and Emily were working on a new AI project under the supervision of their manager, Michael Brown.<br/><br/>As the day of the presentation approached, Alice prepared her designs with Robert’s guidance. David and Sophie also attended the event to support Alice. The Novus City Council was impressed with her work and decided to approve the project, marking another success for Alice. After the event, the family celebrated with a dinner at their favorite restaurant, The Green Olive, where they were joined by Emily and Robert.<br/>&quot;&quot;&quot;<br/>with open(&quot;./rag_graph/input/story.txt&quot;, &quot;w&quot;) as f:<br/> f.write(text)</span></pre><p id="fa69" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">GraphRAG is then initialized within the project directory, creating necessary configuration files.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="ac02" class="py oj gu pv b bg pz qa l qb qc">python -m graphrag.index --init --root ./rag_graph</span></pre><p id="346d" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">The <code class="cx qd qe qf pv b">.env</code> file is edited to include any required API keys : <code class="cx qd qe qf pv b">GRAPHRAG_API_KEY=EMPTY</code></p><p id="c43b" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">The <code class="cx qd qe qf pv b">settings.yaml</code> file is modified to specify the models downloaded earlier (<code class="cx qd qe qf pv b">llama2.1</code> and <code class="cx qd qe qf pv b">bge-large</code>), their local server endpoint, and other parameters like maximum tokens and concurrency. These settings configure GraphRAG to use Ollama for both text generation and embeddings.</p><p id="affd" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">LLMS:</strong></p><ul class=""><li id="f563" class="nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh qg qh qi bk">model : llama3.1</li><li id="92fe" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk">max_tokens: 2000</li><li id="8e9e" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk">api_base: <a class="af pl" href="http://127.0.0.1:11434/v1" rel="noopener ugc nofollow" target="_blank">http://127.0.0.1:11434/v1</a> (ollama server endpoint)</li><li id="aa39" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk">max_retries: 1</li><li id="2adf" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk">concurrent_requests: 1</li><li id="c300" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk">comment this line <code class="cx qd qe qf pv b">model_supports_json: true</code></li></ul><p id="a6cc" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">Embeddings:</strong></p><ul class=""><li id="57ed" class="nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh qg qh qi bk">model: bge-large:latest</li><li id="339f" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk">api_base: <a class="af pl" href="http://127.0.0.1:11434/v1" rel="noopener ugc nofollow" target="_blank">http://127.0.0.1:11434/v1</a></li><li id="eb59" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk">max_retries: 1</li><li id="e4b6" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk">concurrent_requests: 1</li><li id="5396" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk">batch_size: 1</li><li id="667e" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk">batch_max_tokens: 8191</li></ul><h2 id="9691" class="qo oj gu bf ok qp qq dy oo qr qs ea os nv qt qu qv nz qw qx qy od qz ra rb rc bk"><code class="cx qd qe qf pv b">settings.yaml</code> :</h2><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="23c0" class="py oj gu pv b bg pz qa l qb qc">encoding_model: cl100k_base<br/>skip_workflows: []<br/>llm:<br/> api_key: ${GRAPHRAG_API_KEY}<br/> type: openai_chat # or azure_openai_chat<br/> model: llama3.1<br/> # model_supports_json: true # recommended if this is available for your model.<br/> max_tokens: 2000<br/> # request_timeout: 180.0<br/> api_base: http://127.0.0.1:11434/v1<br/> # api_version: 2024-02-15-preview<br/> # organization: &lt;organization_id&gt;<br/> # deployment_name: &lt;azure_model_deployment_name&gt;<br/> # tokens_per_minute: 150_000 # set a leaky bucket throttle<br/> # requests_per_minute: 10_000 # set a leaky bucket throttle<br/> max_retries: 1<br/> # max_retry_wait: 10.0<br/> # sleep_on_rate_limit_recommendation: true # whether to sleep when azure suggests wait-times<br/> concurrent_requests: 1 # the number of parallel inflight requests that may be made<br/><br/>parallelization:<br/> stagger: 0.3<br/> # num_threads: 50 # the number of threads to use for parallel processing<br/><br/>async_mode: threaded # or asyncio<br/><br/>embeddings:<br/> ## parallelization: override the global parallelization settings for embeddings<br/> async_mode: threaded # or asyncio<br/> llm:<br/> api_key: ${GRAPHRAG_API_KEY}<br/> type: openai_embedding # or azure_openai_embedding<br/> model: bge-large:latest<br/> api_base: http://127.0.0.1:11434/v1<br/> # api_version: 2024-02-15-preview<br/> # organization: &lt;organization_id&gt;<br/> # deployment_name: &lt;azure_model_deployment_name&gt;<br/> # tokens_per_minute: 150_000 # set a leaky bucket throttle<br/> # requests_per_minute: 10_000 # set a leaky bucket throttle<br/> max_retries: 1<br/> # max_retry_wait: 10.0<br/> # sleep_on_rate_limit_recommendation: true # whether to sleep when azure suggests wait-times<br/> concurrent_requests: 1 # the number of parallel inflight requests that may be made<br/> batch_size: 1 # the number of documents to send in a single request<br/> batch_max_tokens: 8191 # the maximum number of tokens to send in a single request<br/> # target: required # or optional<br/><br/><br/><br/>chunks:<br/> size: 300<br/> overlap: 100<br/> group_by_columns: [id] # by default, we don&#x27;t allow chunks to cross documents<br/><br/>input:<br/> type: file # or blob<br/> file_type: text # or csv<br/> base_dir: &quot;input&quot;<br/> file_encoding: utf-8<br/> file_pattern: &quot;.*\\.txt$&quot;<br/><br/>cache:<br/> type: file # or blob<br/> base_dir: &quot;cache&quot;<br/> # connection_string: &lt;azure_blob_storage_connection_string&gt;<br/> # container_name: &lt;azure_blob_storage_container_name&gt;<br/><br/>storage:<br/> type: file # or blob<br/> base_dir: &quot;output/${timestamp}/artifacts&quot;<br/> # connection_string: &lt;azure_blob_storage_connection_string&gt;<br/> # container_name: &lt;azure_blob_storage_container_name&gt;<br/><br/>reporting:<br/> type: file # or console, blob<br/> base_dir: &quot;output/${timestamp}/reports&quot;<br/> # connection_string: &lt;azure_blob_storage_connection_string&gt;<br/> # container_name: &lt;azure_blob_storage_container_name&gt;<br/><br/>entity_extraction:<br/> ## llm: override the global llm settings for this task<br/> ## parallelization: override the global parallelization settings for this task<br/> ## async_mode: override the global async_mode settings for this task<br/> prompt: &quot;prompts/entity_extraction.txt&quot;<br/> entity_types: [organization,person,geo,event]<br/> max_gleanings: 0<br/><br/>summarize_descriptions:<br/> ## llm: override the global llm settings for this task<br/> ## parallelization: override the global parallelization settings for this task<br/> ## async_mode: override the global async_mode settings for this task<br/> prompt: &quot;prompts/summarize_descriptions.txt&quot;<br/> max_length: 500<br/><br/>claim_extraction:<br/> ## llm: override the global llm settings for this task<br/> ## parallelization: override the global parallelization settings for this task<br/> ## async_mode: override the global async_mode settings for this task<br/> # enabled: true<br/> prompt: &quot;prompts/claim_extraction.txt&quot;<br/> description: &quot;Any claims or facts that could be relevant to information discovery.&quot;<br/> max_gleanings: 0<br/><br/>community_report:<br/> ## llm: override the global llm settings for this task<br/> ## parallelization: override the global parallelization settings for this task<br/> ## async_mode: override the global async_mode settings for this task<br/> prompt: &quot;prompts/community_report.txt&quot;<br/> max_length: 2000<br/> max_input_length: 7000<br/><br/>cluster_graph:<br/> max_cluster_size: 10<br/><br/>embed_graph:<br/> enabled: false # if true, will generate node2vec embeddings for nodes<br/> # num_walks: 10<br/> # walk_length: 40<br/> # window_size: 2<br/> # iterations: 3<br/> # random_seed: 597832<br/><br/>umap:<br/> enabled: false # if true, will generate UMAP embeddings for nodes<br/><br/>snapshots:<br/> graphml: false<br/> raw_entities: false<br/> top_level_nodes: false<br/><br/>local_search:<br/> # text_unit_prop: 0.5<br/> # community_prop: 0.1<br/> # conversation_history_max_turns: 5<br/> # top_k_mapped_entities: 10<br/> # top_k_relationships: 10<br/> # max_tokens: 12000<br/><br/>global_search:<br/> # max_tokens: 12000<br/> # data_max_tokens: 12000<br/> # map_max_tokens: 1000<br/> # reduce_max_tokens: 2000<br/> # concurrency: 32</span></pre><p id="7f1e" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">Running the Indexing pipeline :</strong></p><p id="00bf" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">The GraphRAG indexing pipeline is executed, processing the story text. This involves creating TextUnits, extracting entities and relationships, building a community hierarchy, and generating summaries. The resulting knowledge graph and related data are stored in the specified output directory.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="091d" class="py oj gu pv b bg pz qa l qb qc">python -m graphrag.index --root ./rag_graph</span></pre><p id="07ad" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">Finally, we can query the indexed data using either Global or Local Search. Global Search is used to ask questions about the overall story (“What are the top themes…?”), while Local Search is suitable for questions about specific entities (“Who is Scrooge…?”). The chosen search method retrieves relevant information from the knowledge graph and generates a response based on the query.</p><p id="cae4" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">Global search using CLI :</strong></p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="1c08" class="py oj gu pv b bg pz qa l qb qc">python -m graphrag.query --root ./rag_graph --method global &quot;What are the top themes in this story?&quot;</span></pre><p id="a17d" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">Local search using CLI:</strong></p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="3003" class="py oj gu pv b bg pz qa l qb qc">python -m graphrag.query --root ./rag_graph --method local &quot;Who is Scrooge, and what are his main relationships?&quot;</span></pre><h1 id="b129" class="oi oj gu bf ok ol om on oo op oq or os ot ou ov ow ox oy oz pa pb pc pd pe pf bk">Global Search example :</h1><p id="7c1d" class="pw-post-body-paragraph nk nl gu nm b nn pg np nq nr ph nt nu nv pi nx ny nz pj ob oc od pk of og oh gn bk">Global search method generates answers by searching over all AI-generated community reports in a map-reduce fashion. This is a resource-intensive method, but often gives good responses for questions that require an understanding of the dataset as a whole (e.g. What are the most significant values of the herbs mentioned in this notebook?).</p><p id="b878" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">1. Importing Dependencies and Setting up the LLM:</strong></p><p id="a9c3" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We begin by importing necessary libraries, including <code class="cx qd qe qf pv b">pandas</code> for data manipulation, <code class="cx qd qe qf pv b">tiktoken</code> for tokenization, and components from the <code class="cx qd qe qf pv b">graphrag</code> library for query execution.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="d031" class="py oj gu pv b bg pz qa l qb qc">import os<br/>import pandas as pd<br/>import tiktoken<br/>from graphrag.query.indexer_adapters import read_indexer_entities, read_indexer_reports<br/>from graphrag.query.llm.oai.chat_openai import ChatOpenAI<br/>from graphrag.query.llm.oai.typing import OpenaiApiType<br/>from graphrag.query.structured_search.global_search.community_context import (<br/> GlobalCommunityContext,<br/>)<br/>from graphrag.query.structured_search.global_search.search import GlobalSearch</span></pre><p id="d947" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We then initialize an instance of <code class="cx qd qe qf pv b">ChatOpenAI</code>, configuring it to use the <code class="cx qd qe qf pv b">llama3.1</code> model hosted locally via <strong class="nm gv">Ollama</strong>.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="096a" class="py oj gu pv b bg pz qa l qb qc">api_key = &quot;EMPTY&quot;<br/>llm_model = &quot;llama3.1&quot;<br/><br/>llm = ChatOpenAI(<br/> api_base=&quot;http://127.0.0.1:11434/v1&quot;,<br/> api_key=api_key,<br/> model=llm_model,<br/> api_type=OpenaiApiType.OpenAI,<br/> max_retries=20,<br/>)<br/><br/>token_encoder = tiktoken.get_encoding(&quot;cl100k_base&quot;)</span></pre><p id="ce3d" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">A basic test ensures the LLM endpoint is functioning correctly.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="9537" class="py oj gu pv b bg pz qa l qb qc">messages = [<br/> {<br/> &quot;role&quot;: &quot;user&quot;,<br/> &quot;content&quot;: &quot;Hi&quot;<br/> }<br/>]<br/>response = llm.generate(messages=messages)<br/>print(response)</span></pre><p id="974c" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">2. Loading Data and Building the Context:</strong></p><p id="cd76" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">Next, we load the community reports and entity data generated during the indexing phase. These reports, organized hierarchically, represent different aspects of the dataset. We specify <code class="cx qd qe qf pv b">COMMUNITY_LEVEL</code> to determine the granularity of the reports used.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="60fb" class="py oj gu pv b bg pz qa l qb qc"># parquet files generated from indexing pipeline<br/>INPUT_DIR = &quot;./output/run-id&quot; # replace the run-id with the created one<br/>COMMUNITY_REPORT_TABLE = &quot;artifacts/create_final_community_reports&quot;<br/>ENTITY_TABLE = &quot;artifacts/create_final_nodes&quot;<br/>ENTITY_EMBEDDING_TABLE = &quot;artifacts/create_final_entities&quot;<br/><br/># community level in the Leiden community hierarchy from which we will load the community reports<br/># higher value means we use reports from more fine-grained communities (at the cost of higher computation cost)<br/>COMMUNITY_LEVEL = 2<br/><br/>entity_df = pd.read_parquet(f&quot;{INPUT_DIR}/{ENTITY_TABLE}.parquet&quot;)<br/>report_df = pd.read_parquet(f&quot;{INPUT_DIR}/{COMMUNITY_REPORT_TABLE}.parquet&quot;)<br/>entity_embedding_df = pd.read_parquet(f&quot;{INPUT_DIR}/{ENTITY_EMBEDDING_TABLE}.parquet&quot;)<br/><br/>reports = read_indexer_reports(report_df, entity_df, COMMUNITY_LEVEL)<br/>entities = read_indexer_entities(entity_df, entity_embedding_df, COMMUNITY_LEVEL)<br/><br/><br/># Build global context based on community reports<br/><br/>context_builder = GlobalCommunityContext(<br/> community_reports=reports,<br/> entities=entities, # default to None if you don&#x27;t want to use community weights for ranking<br/> token_encoder=token_encoder,<br/>)</span></pre><p id="0f08" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">The <code class="cx qd qe qf pv b">GlobalCommunityContext</code> is then initialized, responsible for selecting and formatting relevant community reports as context for the LLM.</p><p id="f19b" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">3. Configuring Global Search:</strong></p><p id="cd64" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We define parameters for the context builder, map stage, and reduce stage of Global Search. These parameters control aspects like context size, shuffling of reports, inclusion of community rankings and weights, and LLM parameters for each stage.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="eac3" class="py oj gu pv b bg pz qa l qb qc">context_builder_params = {<br/> &quot;use_community_summary&quot;: False, # False means using full community reports. True means using community short summaries.<br/> &quot;shuffle_data&quot;: True,<br/> &quot;include_community_rank&quot;: True,<br/> &quot;min_community_rank&quot;: 0,<br/> &quot;community_rank_name&quot;: &quot;rank&quot;,<br/> &quot;include_community_weight&quot;: True,<br/> &quot;community_weight_name&quot;: &quot;occurrence weight&quot;,<br/> &quot;normalize_community_weight&quot;: True,<br/> &quot;max_tokens&quot;: 1000, # change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 5000)<br/> &quot;context_name&quot;: &quot;Reports&quot;,<br/>}<br/><br/>map_llm_params = {<br/> &quot;max_tokens&quot;: 1000,<br/> &quot;temperature&quot;: 0.0,<br/> &quot;response_format&quot;: {&quot;type&quot;: &quot;json_object&quot;},<br/>}<br/><br/>reduce_llm_params = {<br/> &quot;max_tokens&quot;: 2000, # change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 1000-1500)<br/> &quot;temperature&quot;: 0.0,<br/>}<br/>search_engine = GlobalSearch(<br/> llm=llm,<br/> context_builder=context_builder,<br/> token_encoder=token_encoder,<br/> max_data_tokens=1000, # change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 5000)<br/> map_llm_params=map_llm_params,<br/> reduce_llm_params=reduce_llm_params,<br/> allow_general_knowledge=False, # set this to True will add instruction to encourage the LLM to incorporate general knowledge in the response, which may increase hallucinations, but could be useful in some use cases.<br/> json_mode=True, # set this to False if your LLM model does not support JSON mode.<br/> context_builder_params=context_builder_params,<br/> concurrent_coroutines=32,<br/> response_type=&quot;multiple paragraphs&quot;, # free form text describing the response type and format, can be anything, e.g. prioritized list, single paragraph, multiple paragraphs, multiple-page report<br/>)</span></pre><p id="2eef" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We then create the <code class="cx qd qe qf pv b">GlobalSearch</code> engine, passing in the LLM instance, context builder, tokenizer, and other configuration parameters.</p><p id="6638" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">4. Performing Global Search:</strong></p><p id="db4d" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We execute the Global Search using the <code class="cx qd qe qf pv b">asearch</code> method, providing the query &quot;Who has collaborated with Alice Johnson on any project?&quot;.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="4065" class="py oj gu pv b bg pz qa l qb qc">result = await search_engine.asearch(<br/> &quot;Who has collaborated with Alice Johnson on any project?&quot;<br/>)<br/>print(result.response)</span></pre><p id="fdcc" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">The search engine retrieves relevant community reports, extracts key points, aggregates them, and generates a final response based on the aggregated information.</p><p id="75e3" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">print the number of llms call and tokens :</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="8f84" class="py oj gu pv b bg pz qa l qb qc">print(f&quot;LLM calls: {result.llm_calls}. LLM tokens: {result.prompt_tokens}&quot;)</span></pre></div></div></div><div class="ab cb rd re rf rg" role="separator"><span class="rh by bm ri rj rk"></span><span class="rh by bm ri rj rk"></span><span class="rh by bm ri rj"></span></div><div class="gn go gp gq gr"><div class="ab cb"><div class="ci bh fz ga gb gc"><h1 id="1919" class="oi oj gu bf ok ol rl on oo op rm or os ot rn ov ow ox ro oz pa pb rp pd pe pf bk">Local Search Example</h1><p id="9c52" class="pw-post-body-paragraph nk nl gu nm b nn pg np nq nr ph nt nu nv pi nx ny nz pj ob oc od pk of og oh gn bk"><strong class="nm gv">1. Importing Dependencies and Setting up the Environment:</strong></p><p id="92ad" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We begin by importing necessary libraries, including components from the <code class="cx qd qe qf pv b">graphrag</code> library for handling various aspects of the query process. We set the stage for Local Search, which is ideal for questions requiring detailed information about particular entities.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="d79b" class="py oj gu pv b bg pz qa l qb qc">import os<br/><br/>import pandas as pd<br/>import tiktoken<br/><br/>from graphrag.query.context_builder.entity_extraction import EntityVectorStoreKey<br/>from graphrag.query.indexer_adapters import (<br/> read_indexer_covariates,<br/> read_indexer_entities,<br/> read_indexer_relationships,<br/> read_indexer_reports,<br/> read_indexer_text_units,<br/>)<br/>from graphrag.query.input.loaders.dfs import (<br/> store_entity_semantic_embeddings,<br/>)<br/>from graphrag.query.llm.oai.chat_openai import ChatOpenAI<br/>from graphrag.query.llm.oai.embedding import OpenAIEmbedding<br/>from graphrag.query.llm.oai.typing import OpenaiApiType<br/>from graphrag.query.question_gen.local_gen import LocalQuestionGen<br/>from graphrag.query.structured_search.local_search.mixed_context import (<br/> LocalSearchMixedContext,<br/>)<br/>from graphrag.query.structured_search.local_search.search import LocalSearch<br/>from graphrag.vector_stores.lancedb import LanceDBVectorStore</span></pre><p id="6cf2" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">2. Loading Data and Building the Context:</strong></p><p id="5037" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We load data from the indexing pipeline’s output, including entities, relationships, community reports, and text units. These data sources provide a comprehensive view of the indexed information.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="63c6" class="py oj gu pv b bg pz qa l qb qc"># Load tables to dataframes<br/><br/>INPUT_DIR = &quot;./output/run-id&quot; # replace the run id with the created one <br/><br/>LANCEDB_URI = f&quot;{INPUT_DIR}/lancedb&quot;<br/><br/>COMMUNITY_REPORT_TABLE = &quot;artifacts/create_final_community_reports&quot;<br/>ENTITY_TABLE = &quot;artifacts/create_final_nodes&quot;<br/>ENTITY_EMBEDDING_TABLE = &quot;artifacts/create_final_entities&quot;<br/>RELATIONSHIP_TABLE = &quot;artifacts/create_final_relationships&quot;<br/>COVARIATE_TABLE = &quot;artifacts/create_final_covariates&quot;<br/>TEXT_UNIT_TABLE = &quot;artifacts/create_final_text_units&quot;<br/>COMMUNITY_LEVEL = 2<br/><br/><br/># Read entities<br/><br/><br/># read nodes table to get community and degree data<br/>entity_df = pd.read_parquet(f&quot;{INPUT_DIR}/{ENTITY_TABLE}.parquet&quot;)<br/>entity_embedding_df = pd.read_parquet(f&quot;{INPUT_DIR}/{ENTITY_EMBEDDING_TABLE}.parquet&quot;)<br/><br/>entities = read_indexer_entities(entity_df, entity_embedding_df, COMMUNITY_LEVEL)<br/><br/># load description embeddings to an in-memory lancedb vectorstore<br/># to connect to a remote db, specify url and port values.<br/>description_embedding_store = LanceDBVectorStore(<br/> collection_name=&quot;entity_description_embeddings&quot;,<br/>)<br/>description_embedding_store.connect(db_uri=LANCEDB_URI)<br/>entity_description_embeddings = store_entity_semantic_embeddings(<br/> entities=entities, vectorstore=description_embedding_store<br/>)<br/><br/># Read relationships<br/><br/><br/>relationship_df = pd.read_parquet(f&quot;{INPUT_DIR}/{RELATIONSHIP_TABLE}.parquet&quot;)<br/>relationships = read_indexer_relationships(relationship_df)<br/><br/># NOTE: covariates are turned off by default, because they generally need prompt tuning to be valuable<br/># Please see the GRAPHRAG_CLAIM_* settings<br/># covariate_df = pd.read_parquet(f&quot;{INPUT_DIR}/{COVARIATE_TABLE}.parquet&quot;)<br/>#claims = read_indexer_covariates(covariate_df)<br/><br/><br/>report_df = pd.read_parquet(f&quot;{INPUT_DIR}/{COMMUNITY_REPORT_TABLE}.parquet&quot;)<br/>reports = read_indexer_reports(report_df, entity_df, COMMUNITY_LEVEL)<br/><br/><br/># Read text units<br/><br/>text_unit_df = pd.read_parquet(f&quot;{INPUT_DIR}/{TEXT_UNIT_TABLE}.parquet&quot;)<br/>text_units = read_indexer_text_units(text_unit_df)</span></pre><p id="5b1d" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We leverage LanceDB, a vector database, to store and efficiently retrieve entity embeddings, which are crucial for identifying entities related to the user’s query.</p><p id="22fc" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">3. Configuring the Embedding Model:</strong></p><p id="f714" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We initialize an <code class="cx qd qe qf pv b">OpenAIEmbedding</code> instance, configuring it to use the <code class="cx qd qe qf pv b">bge-large</code> model hosted locally via Ollama. This model will be used to generate embeddings for text, allowing us to find semantically similar entities and text units during the search process.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="49bc" class="py oj gu pv b bg pz qa l qb qc">embedding_model = &quot;bge-large:latest&quot;<br/>text_embedder = OpenAIEmbedding(<br/> api_key=api_key,<br/> api_base=&quot;http://127.0.0.1:11434/v1&quot;,<br/> api_type=OpenaiApiType.OpenAI,<br/> model=embedding_model,<br/> deployment_name=embedding_model,<br/> max_retries=20,<br/>)</span></pre><p id="cf2a" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">4. Creating the Local Search Context Builder:</strong></p><p id="7db3" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We create a <code class="cx qd qe qf pv b">LocalSearchMixedContext</code> instance, providing it with access to the loaded data (entities, relationships, reports, text units) and the embedding model. This context builder is responsible for selecting and formatting relevant information from these sources based on the user&#x27;s query.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="33b7" class="py oj gu pv b bg pz qa l qb qc">context_builder = LocalSearchMixedContext(<br/> community_reports=reports,<br/> text_units=text_units,<br/> entities=entities,<br/> relationships=relationships,<br/> # if you did not run covariates during indexing, set this to None<br/> covariates=None,<br/> entity_text_embeddings=description_embedding_store,<br/> embedding_vectorstore_key=EntityVectorStoreKey.ID, # if the vectorstore uses entity title as ids, set this to EntityVectorStoreKey.TITLE<br/> text_embedder=text_embedder,<br/> token_encoder=token_encoder,<br/>)</span></pre><p id="927a" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">5. Creating the Local Search Engine:</strong></p><p id="38e5" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">We initialize the <code class="cx qd qe qf pv b">LocalSearch</code> engine, providing the LLM, context builder, tokenizer, and specific parameters that control how the search is performed. These parameters determine factors like the proportion of the context window dedicated to different data types (e.g., text units vs. community reports), the number of related entities to retrieve, and the maximum context window size.</p><ul class=""><li id="5466" class="nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh qg qh qi bk"><code class="cx qd qe qf pv b">text_unit_prop</code>: proportion of context window dedicated to related text units</li><li id="ec35" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk"><code class="cx qd qe qf pv b">community_prop</code>: proportion of context window dedicated to community reports. The remaining proportion is dedicated to entities and relationships. Sum of text_unit_prop and community_prop should be &lt;= 1</li><li id="b6fc" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk"><code class="cx qd qe qf pv b">conversation_history_max_turns</code>: maximum number of turns to include in the conversation history.</li><li id="b9db" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk"><code class="cx qd qe qf pv b">conversation_history_user_turns_only</code>: if True, only include user queries in the conversation history.</li><li id="daf1" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk"><code class="cx qd qe qf pv b">top_k_mapped_entities</code>: number of related entities to retrieve from the entity description embedding store.</li><li id="35ed" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk"><code class="cx qd qe qf pv b">top_k_relationships</code>: control the number of out-of-network relationships to pull into the context window.</li><li id="6f7d" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk"><code class="cx qd qe qf pv b">include_entity_rank</code>: if True, include the entity rank in the entity table in the context window. Default entity rank = node degree.</li><li id="36c8" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk"><code class="cx qd qe qf pv b">include_relationship_weight</code>: if True, include the relationship weight in the context window.</li><li id="0bdf" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk"><code class="cx qd qe qf pv b">include_community_rank</code>: if True, include the community rank in the context window.</li><li id="f5b2" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk"><code class="cx qd qe qf pv b">return_candidate_context</code>: if True, return a set of dataframes containing all candidate entity/relationship/covariate records that could be relevant. Note that not all of these records will be included in the context window. The &quot;in_context&quot; column in these dataframes indicates whether the record is included in the context window.</li><li id="1e82" class="nk nl gu nm b nn qj np nq nr qk nt nu nv ql nx ny nz qm ob oc od qn of og oh qg qh qi bk"><code class="cx qd qe qf pv b">max_tokens</code>: maximum number of tokens to use for the context window.</li></ul><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="894b" class="py oj gu pv b bg pz qa l qb qc">local_context_params = {<br/> &quot;text_unit_prop&quot;: 0.5,<br/> &quot;community_prop&quot;: 0.1,<br/> &quot;conversation_history_max_turns&quot;: 5,<br/> &quot;conversation_history_user_turns_only&quot;: True,<br/> &quot;top_k_mapped_entities&quot;: 10,<br/> &quot;top_k_relationships&quot;: 10,<br/> &quot;include_entity_rank&quot;: True,<br/> &quot;include_relationship_weight&quot;: True,<br/> &quot;include_community_rank&quot;: False,<br/> &quot;return_candidate_context&quot;: False,<br/> &quot;embedding_vectorstore_key&quot;: EntityVectorStoreKey.ID, # set this to EntityVectorStoreKey.TITLE if the vectorstore uses entity title as ids<br/> &quot;max_tokens&quot;: 12_000, # change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 5000)<br/>}<br/><br/>llm_params = {<br/> &quot;max_tokens&quot;: 2_000, # change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 1000=1500)<br/> &quot;temperature&quot;: 0.0,<br/>}<br/><br/>search_engine = LocalSearch(<br/> llm=llm,<br/> context_builder=context_builder,<br/> token_encoder=token_encoder,<br/> llm_params=llm_params,<br/> context_builder_params=local_context_params,<br/> response_type=&quot;multiple paragraphs&quot;, # free form text describing the response type and format, can be anything, e.g. prioritized list, single paragraph, multiple paragraphs, multiple-page report<br/>)</span></pre><p id="abe4" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">6. Running Local Search:</strong></p><p id="4ad2" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">Finally, we execute the Local Search using the <code class="cx qd qe qf pv b">asearch</code> method, providing the query &quot;Tell me about Alice Johnson&quot;. The search engine identifies relevant entities (Alice Johnson in this case), retrieves related information from the various data sources, and generates a comprehensive response based on the combined context.</p><pre class="pn po pp pq pr pu pv pw bp px bb bk"><span id="aa3f" class="py oj gu pv b bg pz qa l qb qc">question = &quot;Tell me about Alice Johnson&quot;<br/>result = await search_engine.asearch(question)<br/>print(result.response)</span></pre></div></div></div><div class="ab cb rd re rf rg" role="separator"><span class="rh by bm ri rj rk"></span><span class="rh by bm ri rj rk"></span><span class="rh by bm ri rj"></span></div><div class="gn go gp gq gr"><div class="ab cb"><div class="ci bh fz ga gb gc"><p id="5627" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk">GraphRAG represents a significant advancement in the field of Retrieval Augmented Generation. By leveraging knowledge graphs, it overcomes the limitations of traditional RAG methods and empowers LLMs to reason more effectively, understand complex datasets holistically, and provide more accurate and insightful answers to a wide range of questions. As research and development in this area continue, we can expect GraphRAG to play an increasingly important role in shaping the future of AI-powered knowledge retrieval and exploration.</p></div></div></div><div class="ab cb rd re rf rg" role="separator"><span class="rh by bm ri rj rk"></span><span class="rh by bm ri rj rk"></span><span class="rh by bm ri rj"></span></div><div class="gn go gp gq gr"><div class="ab cb"><div class="ci bh fz ga gb gc"><p id="918a" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">My LinkedIn</strong> : <a class="af pl" href="https://www.linkedin.com/in/ayoub-kirouane3" rel="noopener ugc nofollow" target="_blank">https://www.linkedin.com/in/ayoub-kirouane3</a></p><p id="3085" class="pw-post-body-paragraph nk nl gu nm b nn no np nq nr ns nt nu nv nw nx ny nz oa ob oc od oe of og oh gn bk"><strong class="nm gv">My HuggingFace</strong> : <a class="af pl" href="https://huggingface.co/ayoubkirouane" rel="noopener ugc nofollow" target="_blank">https://huggingface.co/ayoubkirouane</a></p></div></div></div></div></section></div></div></article></div><div class="ab cb"><div class="ci bh fz ga gb gc"><div class="rq rr ab ja"><div class="rs ab"><a class="rt ay am ao" href="https://medium.com/tag/graphrag?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><div class="ru fj cx rv ge rw rx bf b bg z bk ry">Graphrag</div></a></div><div class="rs ab"><a class="rt ay am ao" href="https://medium.com/tag/ollama?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><div class="ru fj cx rv ge rw rx bf b bg z bk ry">Ollama</div></a></div><div class="rs ab"><a class="rt ay am ao" href="https://medium.com/tag/retrieval-augmented-gen?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><div class="ru fj cx rv ge rw rx bf b bg z bk ry">Retrieval Augmented Gen</div></a></div><div class="rs ab"><a class="rt ay am ao" href="https://medium.com/tag/large-language-models?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><div class="ru fj cx rv ge rw rx bf b bg z bk ry">Large Language Models</div></a></div><div class="rs ab"><a class="rt ay am ao" href="https://medium.com/tag/knowledge-graph?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><div class="ru fj cx rv ge rw rx bf b bg z bk ry">Knowledge Graph</div></a></div></div></div></div><div class="l"></div><footer class="rz re sa sb sc ab q sd ik c"><div class="l ae"><div class="ab cb"><div class="ci bh fz ga gb gc"><div class="ab cp se"><div class="ab q kv"><div class="sf l"><span class="l sg sh si e d"><div class="ab q kv kw"><div class="pw-multi-vote-icon fj je kx ky kz"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="footerClapButton" href="https://medium.com/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fvote%2Fgopenai%2F45a57cc5c38b&amp;operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;user=kirouane+Ayoub&amp;userId=4751fd7878c5&amp;source=---footer_actions--45a57cc5c38b---------------------clap_footer-----------" rel="noopener follow"><div><div class="bm" aria-hidden="false"><div class="la ao lb lc ld le am lf lg lh kz"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" aria-label="clap"><path fill-rule="evenodd" d="M11.37.828 12 3.282l.63-2.454zM13.916 3.953l1.523-2.112-1.184-.39zM8.589 1.84l1.522 2.112-.337-2.501zM18.523 18.92c-.86.86-1.75 1.246-2.62 1.33a6 6 0 0 0 .407-.372c2.388-2.389 2.86-4.951 1.399-7.623l-.912-1.603-.79-1.672c-.26-.56-.194-.98.203-1.288a.7.7 0 0 1 .546-.132c.283.046.546.231.728.5l2.363 4.157c.976 1.624 1.141 4.237-1.324 6.702m-10.999-.438L3.37 14.328a.828.828 0 0 1 .585-1.408.83.83 0 0 1 .585.242l2.158 2.157a.365.365 0 0 0 .516-.516l-2.157-2.158-1.449-1.449a.826.826 0 0 1 1.167-1.17l3.438 3.44a.363.363 0 0 0 .516 0 .364.364 0 0 0 0-.516L5.293 9.513l-.97-.97a.826.826 0 0 1 0-1.166.84.84 0 0 1 1.167 0l.97.968 3.437 3.436a.36.36 0 0 0 .517 0 .366.366 0 0 0 0-.516L6.977 7.83a.82.82 0 0 1-.241-.584.82.82 0 0 1 .824-.826c.219 0 .43.087.584.242l5.787 5.787a.366.366 0 0 0 .587-.415l-1.117-2.363c-.26-.56-.194-.98.204-1.289a.7.7 0 0 1 .546-.132c.283.046.545.232.727.501l2.193 3.86c1.302 2.38.883 4.59-1.277 6.75-1.156 1.156-2.602 1.627-4.19 1.367-1.418-.236-2.866-1.033-4.079-2.246M10.75 5.971l2.12 2.12c-.41.502-.465 1.17-.128 1.89l.22.465-3.523-3.523a.8.8 0 0 1-.097-.368c0-.22.086-.428.241-.584a.847.847 0 0 1 1.167 0m7.355 1.705c-.31-.461-.746-.758-1.23-.837a1.44 1.44 0 0 0-1.11.275c-.312.24-.505.543-.59.881a1.74 1.74 0 0 0-.906-.465 1.47 1.47 0 0 0-.82.106l-2.182-2.182a1.56 1.56 0 0 0-2.2 0 1.54 1.54 0 0 0-.396.701 1.56 1.56 0 0 0-2.21-.01 1.55 1.55 0 0 0-.416.753c-.624-.624-1.649-.624-2.237-.037a1.557 1.557 0 0 0 0 2.2c-.239.1-.501.238-.715.453a1.56 1.56 0 0 0 0 2.2l.516.515a1.556 1.556 0 0 0-.753 2.615L7.01 19c1.32 1.319 2.909 2.189 4.475 2.449q.482.08.971.08c.85 0 1.653-.198 2.393-.579.231.033.46.054.686.054 1.266 0 2.457-.52 3.505-1.567 2.763-2.763 2.552-5.734 1.439-7.586z" clip-rule="evenodd"></path></svg></div></div></div></a></span></div><div class="pw-multi-vote-count l li lj lk ll lm ln lo"><p class="bf b dv z du"><span class="lp">--</span></p></div></div></span><span class="l h g f sj sk"><div class="ab q kv kw"><div class="pw-multi-vote-icon fj je kx ky kz"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="footerClapButton" href="https://medium.com/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fvote%2Fgopenai%2F45a57cc5c38b&amp;operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;user=kirouane+Ayoub&amp;userId=4751fd7878c5&amp;source=---footer_actions--45a57cc5c38b---------------------clap_footer-----------" rel="noopener follow"><div><div class="bm" aria-hidden="false"><div class="la ao lb lc ld le am lf lg lh kz"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" aria-label="clap"><path fill-rule="evenodd" d="M11.37.828 12 3.282l.63-2.454zM13.916 3.953l1.523-2.112-1.184-.39zM8.589 1.84l1.522 2.112-.337-2.501zM18.523 18.92c-.86.86-1.75 1.246-2.62 1.33a6 6 0 0 0 .407-.372c2.388-2.389 2.86-4.951 1.399-7.623l-.912-1.603-.79-1.672c-.26-.56-.194-.98.203-1.288a.7.7 0 0 1 .546-.132c.283.046.546.231.728.5l2.363 4.157c.976 1.624 1.141 4.237-1.324 6.702m-10.999-.438L3.37 14.328a.828.828 0 0 1 .585-1.408.83.83 0 0 1 .585.242l2.158 2.157a.365.365 0 0 0 .516-.516l-2.157-2.158-1.449-1.449a.826.826 0 0 1 1.167-1.17l3.438 3.44a.363.363 0 0 0 .516 0 .364.364 0 0 0 0-.516L5.293 9.513l-.97-.97a.826.826 0 0 1 0-1.166.84.84 0 0 1 1.167 0l.97.968 3.437 3.436a.36.36 0 0 0 .517 0 .366.366 0 0 0 0-.516L6.977 7.83a.82.82 0 0 1-.241-.584.82.82 0 0 1 .824-.826c.219 0 .43.087.584.242l5.787 5.787a.366.366 0 0 0 .587-.415l-1.117-2.363c-.26-.56-.194-.98.204-1.289a.7.7 0 0 1 .546-.132c.283.046.545.232.727.501l2.193 3.86c1.302 2.38.883 4.59-1.277 6.75-1.156 1.156-2.602 1.627-4.19 1.367-1.418-.236-2.866-1.033-4.079-2.246M10.75 5.971l2.12 2.12c-.41.502-.465 1.17-.128 1.89l.22.465-3.523-3.523a.8.8 0 0 1-.097-.368c0-.22.086-.428.241-.584a.847.847 0 0 1 1.167 0m7.355 1.705c-.31-.461-.746-.758-1.23-.837a1.44 1.44 0 0 0-1.11.275c-.312.24-.505.543-.59.881a1.74 1.74 0 0 0-.906-.465 1.47 1.47 0 0 0-.82.106l-2.182-2.182a1.56 1.56 0 0 0-2.2 0 1.54 1.54 0 0 0-.396.701 1.56 1.56 0 0 0-2.21-.01 1.55 1.55 0 0 0-.416.753c-.624-.624-1.649-.624-2.237-.037a1.557 1.557 0 0 0 0 2.2c-.239.1-.501.238-.715.453a1.56 1.56 0 0 0 0 2.2l.516.515a1.556 1.556 0 0 0-.753 2.615L7.01 19c1.32 1.319 2.909 2.189 4.475 2.449q.482.08.971.08c.85 0 1.653-.198 2.393-.579.231.033.46.054.686.054 1.266 0 2.457-.52 3.505-1.567 2.763-2.763 2.552-5.734 1.439-7.586z" clip-rule="evenodd"></path></svg></div></div></div></a></span></div><div class="pw-multi-vote-count l li lj lk ll lm ln lo"><p class="bf b dv z du"><span class="lp">--</span></p></div></div></span></div><div class="bq ab"><div><div class="bm" aria-hidden="false"><button class="ao la lq lr ab q fk ls lt" aria-label="responses"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" class="lu"><path d="M18.006 16.803c1.533-1.456 2.234-3.325 2.234-5.321C20.24 7.357 16.709 4 12.191 4S4 7.357 4 11.482c0 4.126 3.674 7.482 8.191 7.482.817 0 1.622-.111 2.393-.327.231.2.48.391.744.559 1.06.693 2.203 1.044 3.399 1.044.224-.008.4-.112.486-.287a.49.49 0 0 0-.042-.518c-.495-.67-.845-1.364-1.04-2.057a4 4 0 0 1-.125-.598zm-3.122 1.055-.067-.223-.315.096a8 8 0 0 1-2.311.338c-4.023 0-7.292-2.955-7.292-6.587 0-3.633 3.269-6.588 7.292-6.588 4.014 0 7.112 2.958 7.112 6.593 0 1.794-.608 3.469-2.027 4.72l-.195.168v.255c0 .056 0 .151.016.295.025.231.081.478.154.733.154.558.398 1.117.722 1.659a5.3 5.3 0 0 1-2.165-.845c-.276-.176-.714-.383-.941-.59z"></path></svg></button></div></div></div></div><div class="ab q"><div class="rk l ix"><div><div class="bm" aria-hidden="false"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="footerBookmarkButton" href="https://medium.com/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fbookmark%2Fp%2F45a57cc5c38b&amp;operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;source=---footer_actions--45a57cc5c38b---------------------bookmark_footer-----------" rel="noopener follow"><svg xmlns="http://www.w3.org/2000/svg" width="25" height="25" fill="none" viewBox="0 0 25 25" class="du lw" aria-label="Add to list bookmark button"><path fill="currentColor" d="M18 2.5a.5.5 0 0 1 1 0V5h2.5a.5.5 0 0 1 0 1H19v2.5a.5.5 0 1 1-1 0V6h-2.5a.5.5 0 0 1 0-1H18zM7 7a1 1 0 0 1 1-1h3.5a.5.5 0 0 0 0-1H8a2 2 0 0 0-2 2v14a.5.5 0 0 0 .805.396L12.5 17l5.695 4.396A.5.5 0 0 0 19 21v-8.5a.5.5 0 0 0-1 0v7.485l-5.195-4.012a.5.5 0 0 0-.61 0L7 19.985z"></path></svg></a></span></div></div></div><div class="rk l ix"><div class="bm" aria-hidden="false" aria-describedby="postFooterSocialMenu" aria-labelledby="postFooterSocialMenu"><div><div class="bm" aria-hidden="false"><button aria-controls="postFooterSocialMenu" aria-expanded="false" aria-label="Share Post" data-testid="footerSocialShareButton" class="af fk ah ai aj ak al me an ao ap ex mf mg lt mh"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24"><path fill="currentColor" fill-rule="evenodd" d="M15.218 4.931a.4.4 0 0 1-.118.132l.012.006a.45.45 0 0 1-.292.074.5.5 0 0 1-.3-.13l-2.02-2.02v7.07c0 .28-.23.5-.5.5s-.5-.22-.5-.5v-7.04l-2 2a.45.45 0 0 1-.57.04h-.02a.4.4 0 0 1-.16-.3.4.4 0 0 1 .1-.32l2.8-2.8a.5.5 0 0 1 .7 0l2.8 2.79a.42.42 0 0 1 .068.498m-.106.138.008.004v-.01zM16 7.063h1.5a2 2 0 0 1 2 2v10a2 2 0 0 1-2 2h-11c-1.1 0-2-.9-2-2v-10a2 2 0 0 1 2-2H8a.5.5 0 0 1 .35.15.5.5 0 0 1 .15.35.5.5 0 0 1-.15.35.5.5 0 0 1-.35.15H6.4c-.5 0-.9.4-.9.9v10.2a.9.9 0 0 0 .9.9h11.2c.5 0 .9-.4.9-.9v-10.2c0-.5-.4-.9-.9-.9H16a.5.5 0 0 1 0-1" clip-rule="evenodd"></path></svg></button></div></div></div></div></div></div></div></div></div></footer><div class="sl sm sn so sp l"><div class="ab cb"><div class="ci bh fz ga gb gc"><div class="sq bh r sr"></div><div class="ss l"><div class="ab st su sv iz iy"><div class="sw sx sy sz ta tb tc td te tf ab cp"><div class="h k"><a href="https://blog.gopenai.com/?source=post_page---post_publication_info--45a57cc5c38b--------------------------------" rel="noopener follow"><div class="fj ab"><img alt="GoPenAI" class="tg ib ic cx" src="https://miro.medium.com/v2/resize:fill:96:96/1*LUSEiP1BHPkkmH75e8eg_A.png" width="48" height="48" loading="lazy"/><div class="tg l ic ib fs n fr th"></div></div></a></div><div class="j i d"><a href="https://blog.gopenai.com/?source=post_page---post_publication_info--45a57cc5c38b--------------------------------" rel="noopener follow"><div class="fj ab"><img alt="GoPenAI" class="tg tj ti cx" src="https://miro.medium.com/v2/resize:fill:128:128/1*LUSEiP1BHPkkmH75e8eg_A.png" width="64" height="64" loading="lazy"/><div class="tg l ti tj fs n fr th"></div></div></a></div><div class="j i d tk ix"><div class="ab"><span><a class="bf b bg z tl ru tm tn to tp tq ev ew tr ts tt fa fb fc fd bm fe ff" href="https://medium.com/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fsubscribe%2Fcollection%2Fgopenai&amp;operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;collection=GoPenAI&amp;collectionId=7adf3c3694ff&amp;source=post_page---post_publication_info--45a57cc5c38b---------------------follow_profile-----------" rel="noopener follow">Follow</a></span></div></div></div><div class="ab co tu"><div class="tv tw tx ty tz l"><a class="af ag ah aj ak al am an ao ap aq ar as at ab q" href="https://blog.gopenai.com/?source=post_page---post_publication_info--45a57cc5c38b--------------------------------" rel="noopener follow"><h2 class="pw-author-name bf ub uc ud ue uf ug uh nv qu qv nz qx qy od ra rb bk"><span class="gn ua">Published in <!-- -->GoPenAI</span></h2></a><div class="rs ab ia"><div class="l ix"><span class="pw-follower-count bf b bg z du"><a class="af ag ah ai aj ak al am an ao ap aq ar iq" rel="noopener follow" href="/followers?source=post_page---post_publication_info--45a57cc5c38b--------------------------------">1.5K Followers</a></span></div><div class="bf b bg z du ab jd"><span class="ir l" aria-hidden="true"><span class="bf b bg z du">·</span></span><a class="af ag ah ai aj ak al am an ao ap aq ar iq" rel="noopener follow" href="/robust-amzn-price-prediction-using-log-domain-ransac-linear-regression-06588f096c1e?source=post_page---post_publication_info--45a57cc5c38b--------------------------------">Last published <!-- -->1 day ago</a></div></div><div class="ui l"><p class="bf b bg z bk"><span class="gn">Where the ChatGPT community comes together to share insights and stories.</span></p></div></div></div><div class="h k"><div class="ab"><span><a class="bf b bg z tl ru tm tn to tp tq ev ew tr ts tt fa fb fc fd bm fe ff" href="https://medium.com/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fsubscribe%2Fcollection%2Fgopenai&amp;operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;collection=GoPenAI&amp;collectionId=7adf3c3694ff&amp;source=post_page---post_publication_info--45a57cc5c38b---------------------follow_profile-----------" rel="noopener follow">Follow</a></span></div></div></div></div><div class="ab st su sv iz iy"><div class="sw sx sy sz ta tb tc td te tf ab cp"><div class="h k"><a tabindex="0" href="https://medium.com/@ayoubkirouane3?source=post_page---post_author_info--45a57cc5c38b--------------------------------" rel="noopener follow"><div class="l fj"><img alt="kirouane Ayoub" class="l fd by ic ib cx" src="https://miro.medium.com/v2/resize:fill:96:96/1*T-KWhmfASlLM3XMvRKZnWA.jpeg" width="48" height="48" loading="lazy"/><div class="fr by l ic ib fs n ay th"></div></div></a></div><div class="j i d"><a tabindex="0" href="https://medium.com/@ayoubkirouane3?source=post_page---post_author_info--45a57cc5c38b--------------------------------" rel="noopener follow"><div class="l fj"><img alt="kirouane Ayoub" class="l fd by ti tj cx" src="https://miro.medium.com/v2/resize:fill:128:128/1*T-KWhmfASlLM3XMvRKZnWA.jpeg" width="64" height="64" loading="lazy"/><div class="fr by l ti tj fs n ay th"></div></div></a></div><div class="j i d tk ix"><div class="ab"><span><a class="bf b bg z tl ru tm tn to tp tq ev ew tr ts tt fa fb fc fd bm fe ff" href="https://medium.com/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fsubscribe%2Fuser%2F4751fd7878c5&amp;operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;user=kirouane+Ayoub&amp;userId=4751fd7878c5&amp;source=post_page-4751fd7878c5--post_author_info--45a57cc5c38b---------------------follow_profile-----------" rel="noopener follow">Follow</a></span></div></div></div><div class="ab co tu"><div class="tv tw tx ty tz l"><a class="af ag ah aj ak al am an ao ap aq ar as at ab q" href="https://medium.com/@ayoubkirouane3?source=post_page---post_author_info--45a57cc5c38b--------------------------------" rel="noopener follow"><h2 class="pw-author-name bf ub uc ud ue uf ug uh nv qu qv nz qx qy od ra rb bk"><span class="gn ua">Written by <!-- -->kirouane Ayoub</span></h2></a><div class="rs ab ia"><div class="l ix"><span class="pw-follower-count bf b bg z du"><a class="af ag ah ai aj ak al am an ao ap aq ar iq" href="https://medium.com/@ayoubkirouane3/followers?source=post_page---post_author_info--45a57cc5c38b--------------------------------" rel="noopener follow">310 Followers</a></span></div><div class="bf b bg z du ab jd"><span class="ir l" aria-hidden="true"><span class="bf b bg z du">·</span></span><a class="af ag ah ai aj ak al am an ao ap aq ar iq" href="https://medium.com/@ayoubkirouane3/following?source=post_page---post_author_info--45a57cc5c38b--------------------------------" rel="noopener follow">2 Following</a></div></div><div class="ui l"><p class="bf b bg z bk"><span class="gn">I Like building Machine Learning models from scratch .</span></p></div></div></div><div class="h k"><div class="ab"><span><a class="bf b bg z tl ru tm tn to tp tq ev ew tr ts tt fa fb fc fd bm fe ff" href="https://medium.com/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fsubscribe%2Fuser%2F4751fd7878c5&amp;operation=register&amp;redirect=https%3A%2F%2Fblog.gopenai.com%2Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b&amp;user=kirouane+Ayoub&amp;userId=4751fd7878c5&amp;source=post_page-4751fd7878c5--post_author_info--45a57cc5c38b---------------------follow_profile-----------" rel="noopener follow">Follow</a></span></div></div></div></div></div></div><div class="uj l"><div class="sq bh r uk ul um un uo"></div><div class="ab cb"><div class="ci bh fz ga gb gc"><div class="ab q cp"><h2 class="bf ub ol on oo op or os ot ov ow ox oz pa pb pd pe bk">No responses yet</h2><div class="ab up"><div><div class="bm" aria-hidden="false"><a class="uq ur" href="https://policy.medium.com/medium-rules-30e5502c4eb4?source=post_page---post_responses--45a57cc5c38b--------------------------------" rel="noopener follow" target="_blank"><svg xmlns="http://www.w3.org/2000/svg" width="25" height="25" viewBox="0 0 25 25"><path fill-rule="evenodd" d="M11.987 5.036a.754.754 0 0 1 .914-.01c.972.721 1.767 1.218 2.6 1.543.828.322 1.719.485 2.887.505a.755.755 0 0 1 .741.757c-.018 3.623-.43 6.256-1.449 8.21-1.034 1.984-2.662 3.209-4.966 4.083a.75.75 0 0 1-.537-.003c-2.243-.874-3.858-2.095-4.897-4.074-1.024-1.951-1.457-4.583-1.476-8.216a.755.755 0 0 1 .741-.757c1.195-.02 2.1-.182 2.923-.503.827-.322 1.6-.815 2.519-1.535m.468.903c-.897.69-1.717 1.21-2.623 1.564-.898.35-1.856.527-3.026.565.037 3.45.469 5.817 1.36 7.515.884 1.684 2.25 2.762 4.284 3.571 2.092-.81 3.465-1.89 4.344-3.575.886-1.698 1.299-4.065 1.334-7.512-1.149-.039-2.091-.217-2.99-.567-.906-.353-1.745-.873-2.683-1.561m-.009 9.155a2.672 2.672 0 1 0 0-5.344 2.672 2.672 0 0 0 0 5.344m0 1a3.672 3.672 0 1 0 0-7.344 3.672 3.672 0 0 0 0 7.344m-1.813-3.777.525-.526.916.917 1.623-1.625.526.526-2.149 2.152z" clip-rule="evenodd"></path></svg></a></div></div></div></div></div></div></div><div class="us ut uu uv uw l bx"><div class="h k j"><div class="sq bh ux uy"></div><div class="ab cb"><div class="ci bh fz ga gb gc"><div class="uz ab kv ja"><div class="va vb l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://help.medium.com/hc/en-us?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><p class="bf b dv z du">Help</p></a></div><div class="va vb l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://medium.statuspage.io/?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><p class="bf b dv z du">Status</p></a></div><div class="va vb l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://medium.com/about?autoplay=1&amp;source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><p class="bf b dv z du">About</p></a></div><div class="va vb l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://medium.com/jobs-at-medium/work-at-medium-959d1a85284e?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><p class="bf b dv z du">Careers</p></a></div><div class="va vb l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="pressinquiries@medium.com?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><p class="bf b dv z du">Press</p></a></div><div class="va vb l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://blog.medium.com/?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><p class="bf b dv z du">Blog</p></a></div><div class="va vb l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://policy.medium.com/medium-privacy-policy-f03bf92035c9?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><p class="bf b dv z du">Privacy</p></a></div><div class="va vb l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://policy.medium.com/medium-terms-of-service-9db0094a1e0f?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><p class="bf b dv z du">Terms</p></a></div><div class="va vb l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://speechify.com/medium?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><p class="bf b dv z du">Text to speech</p></a></div><div class="va l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://medium.com/business?source=post_page-----45a57cc5c38b--------------------------------" rel="noopener follow"><p class="bf b dv z du">Teams</p></a></div></div></div></div></div></div></div></div></div></div><script>window.__BUILD_ID__="main-20241126-181518-0cb59a020f"</script><script>window.__GRAPHQL_URI__ = "https://blog.gopenai.com/_/graphql"</script><script>window.__PRELOADED_STATE__ = {"algolia":{"queries":{}},"cache":{"experimentGroupSet":true,"reason":"This request is not using the cache middleware worker","group":"disabled","tags":["group-edgeCachePosts","post-45a57cc5c38b","user-4751fd7878c5","collection-7adf3c3694ff"],"serverVariantState":"","middlewareEnabled":false,"cacheStatus":"DYNAMIC","shouldUseCache":false,"vary":[],"lohpSummerUpsellEnabled":false,"publicationHierarchyEnabledWeb":false,"postBottomResponsesEnabled":false},"client":{"hydrated":false,"isUs":false,"isNativeMedium":false,"isSafariMobile":false,"isSafari":false,"isFirefox":false,"routingEntity":{"type":"COLLECTION","id":"7adf3c3694ff","explicit":true},"viewerIsBot":false},"debug":{"requestId":"2b6cee55-f2c6-42df-ae08-8b4e02d53063","hybridDevServices":[],"originalSpanCarrier":{"traceparent":"00-868456b5e50c0d54602b5383de363c27-921e4d757270fd2a-01"}},"multiVote":{"clapsPerPost":{}},"navigation":{"branch":{"show":null,"hasRendered":null,"blockedByCTA":false},"hideGoogleOneTap":false,"hasRenderedAlternateUserBanner":null,"currentLocation":"https:\u002F\u002Fblog.gopenai.com\u002Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b","host":"blog.gopenai.com","hostname":"blog.gopenai.com","referrer":"","hasSetReferrer":false,"susiModal":{"step":null,"operation":"register"},"postRead":false,"partnerProgram":{"selectedCountryCode":null},"queryString":"?source=user_profile_page---------8-------------4751fd7878c5---------------"},"config":{"nodeEnv":"production","version":"main-20241126-181518-0cb59a020f","target":"production","productName":"Medium","publicUrl":"https:\u002F\u002Fcdn-client.medium.com\u002Flite","authDomain":"medium.com","authGoogleClientId":"216296035834-k1k6qe060s2tp2a2jam4ljdcms00sttg.apps.googleusercontent.com","favicon":"production","glyphUrl":"https:\u002F\u002Fglyph.medium.com","branchKey":"key_live_ofxXr2qTrrU9NqURK8ZwEhknBxiI6KBm","algolia":{"appId":"MQ57UUUQZ2","apiKeySearch":"394474ced050e3911ae2249ecc774921","indexPrefix":"medium_","host":"-dsn.algolia.net"},"recaptchaKey":"6Lfc37IUAAAAAKGGtC6rLS13R1Hrw_BqADfS1LRk","recaptcha3Key":"6Lf8R9wUAAAAABMI_85Wb8melS7Zj6ziuf99Yot5","recaptchaEnterpriseKeyId":"6Le-uGgpAAAAAPprRaokM8AKthQ9KNGdoxaGUvVp","datadog":{"applicationId":"6702d87d-a7e0-42fe-bbcb-95b469547ea0","clientToken":"pub853ea8d17ad6821d9f8f11861d23dfed","rumToken":"pubf9cc52896502b9413b68ba36fc0c7162","context":{"deployment":{"target":"production","tag":"main-20241126-181518-0cb59a020f","commit":"0cb59a020f4453d0900f671f1a6576feecc55e74"}},"datacenter":"us"},"googleAnalyticsCode":"G-7JY7T788PK","googlePay":{"apiVersion":"2","apiVersionMinor":"0","merchantId":"BCR2DN6TV7EMTGBM","merchantName":"Medium","instanceMerchantId":"13685562959212738550"},"applePay":{"version":3},"signInWallCustomDomainCollectionIds":["3a8144eabfe3","336d898217ee","61061eb0c96b","138adf9c44c","819cc2aaeee0"],"mediumMastodonDomainName":"me.dm","mediumOwnedAndOperatedCollectionIds":["8a9336e5bb4","b7e45b22fec3","193b68bd4fba","8d6b8a439e32","54c98c43354d","3f6ecf56618","d944778ce714","92d2092dc598","ae2a65f35510","1285ba81cada","544c7006046e","fc8964313712","40187e704f1c","88d9857e584e","7b6769f2748b","bcc38c8f6edf","cef6983b292","cb8577c9149e","444d13b52878","713d7dbc99b0","ef8e90590e66","191186aaafa0","55760f21cdc5","9dc80918cc93","bdc4052bbdba","8ccfed20cbb2"],"tierOneDomains":["medium.com","thebolditalic.com","arcdigital.media","towardsdatascience.com","uxdesign.cc","codeburst.io","psiloveyou.xyz","writingcooperative.com","entrepreneurshandbook.co","prototypr.io","betterhumans.coach.me","theascent.pub"],"topicsToFollow":["d61cf867d93f","8a146bc21b28","1eca0103fff3","4d562ee63426","aef1078a3ef5","e15e46793f8d","6158eb913466","55f1c20aba7a","3d18b94f6858","4861fee224fd","63c6f1f93ee","1d98b3a9a871","decb52b64abf","ae5d4995e225","830cded25262"],"topicToTagMappings":{"accessibility":"accessibility","addiction":"addiction","android-development":"android-development","art":"art","artificial-intelligence":"artificial-intelligence","astrology":"astrology","basic-income":"basic-income","beauty":"beauty","biotech":"biotech","blockchain":"blockchain","books":"books","business":"business","cannabis":"cannabis","cities":"cities","climate-change":"climate-change","comics":"comics","coronavirus":"coronavirus","creativity":"creativity","cryptocurrency":"cryptocurrency","culture":"culture","cybersecurity":"cybersecurity","data-science":"data-science","design":"design","digital-life":"digital-life","disability":"disability","economy":"economy","education":"education","equality":"equality","family":"family","feminism":"feminism","fiction":"fiction","film":"film","fitness":"fitness","food":"food","freelancing":"freelancing","future":"future","gadgets":"gadgets","gaming":"gaming","gun-control":"gun-control","health":"health","history":"history","humor":"humor","immigration":"immigration","ios-development":"ios-development","javascript":"javascript","justice":"justice","language":"language","leadership":"leadership","lgbtqia":"lgbtqia","lifestyle":"lifestyle","machine-learning":"machine-learning","makers":"makers","marketing":"marketing","math":"math","media":"media","mental-health":"mental-health","mindfulness":"mindfulness","money":"money","music":"music","neuroscience":"neuroscience","nonfiction":"nonfiction","outdoors":"outdoors","parenting":"parenting","pets":"pets","philosophy":"philosophy","photography":"photography","podcasts":"podcast","poetry":"poetry","politics":"politics","privacy":"privacy","product-management":"product-management","productivity":"productivity","programming":"programming","psychedelics":"psychedelics","psychology":"psychology","race":"race","relationships":"relationships","religion":"religion","remote-work":"remote-work","san-francisco":"san-francisco","science":"science","self":"self","self-driving-cars":"self-driving-cars","sexuality":"sexuality","social-media":"social-media","society":"society","software-engineering":"software-engineering","space":"space","spirituality":"spirituality","sports":"sports","startups":"startup","style":"style","technology":"technology","transportation":"transportation","travel":"travel","true-crime":"true-crime","tv":"tv","ux":"ux","venture-capital":"venture-capital","visual-design":"visual-design","work":"work","world":"world","writing":"writing"},"defaultImages":{"avatar":{"imageId":"1*dmbNkD5D-u45r44go_cf0g.png","height":150,"width":150},"orgLogo":{"imageId":"7*V1_7XP4snlmqrc_0Njontw.png","height":110,"width":500},"postLogo":{"imageId":"bd978bb536350a710e8efb012513429cabdc4c28700604261aeda246d0f980b7","height":810,"width":1440},"postPreviewImage":{"imageId":"1*hn4v1tCaJy7cWMyb0bpNpQ.png","height":386,"width":579}},"collectionStructuredData":{"8d6b8a439e32":{"name":"Elemental","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fcdn-images-1.medium.com\u002Fmax\u002F980\u002F1*9ygdqoKprhwuTVKUM0DLPA@2x.png","width":980,"height":159}}},"3f6ecf56618":{"name":"Forge","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fcdn-images-1.medium.com\u002Fmax\u002F596\u002F1*uULpIlImcO5TDuBZ6lm7Lg@2x.png","width":596,"height":183}}},"ae2a65f35510":{"name":"GEN","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fmiro.medium.com\u002Fmax\u002F264\u002F1*RdVZMdvfV3YiZTw6mX7yWA.png","width":264,"height":140}}},"88d9857e584e":{"name":"LEVEL","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fmiro.medium.com\u002Fmax\u002F540\u002F1*JqYMhNX6KNNb2UlqGqO2WQ.png","width":540,"height":108}}},"7b6769f2748b":{"name":"Marker","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fcdn-images-1.medium.com\u002Fmax\u002F383\u002F1*haCUs0wF6TgOOvfoY-jEoQ@2x.png","width":383,"height":92}}},"444d13b52878":{"name":"OneZero","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fmiro.medium.com\u002Fmax\u002F540\u002F1*cw32fIqCbRWzwJaoQw6BUg.png","width":540,"height":123}}},"8ccfed20cbb2":{"name":"Zora","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fmiro.medium.com\u002Fmax\u002F540\u002F1*tZUQqRcCCZDXjjiZ4bDvgQ.png","width":540,"height":106}}}},"embeddedPostIds":{"coronavirus":"cd3010f9d81f"},"sharedCdcMessaging":{"COVID_APPLICABLE_TAG_SLUGS":[],"COVID_APPLICABLE_TOPIC_NAMES":[],"COVID_APPLICABLE_TOPIC_NAMES_FOR_TOPIC_PAGE":[],"COVID_MESSAGES":{"tierA":{"text":"For more information on the novel coronavirus and Covid-19, visit cdc.gov.","markups":[{"start":66,"end":73,"href":"https:\u002F\u002Fwww.cdc.gov\u002Fcoronavirus\u002F2019-nCoV"}]},"tierB":{"text":"Anyone can publish on Medium per our Policies, but we don’t fact-check every story. For more info about the coronavirus, see cdc.gov.","markups":[{"start":37,"end":45,"href":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Fcategories\u002F201931128-Policies-Safety"},{"start":125,"end":132,"href":"https:\u002F\u002Fwww.cdc.gov\u002Fcoronavirus\u002F2019-nCoV"}]},"paywall":{"text":"This article has been made free for everyone, thanks to Medium Members. For more information on the novel coronavirus and Covid-19, visit cdc.gov.","markups":[{"start":56,"end":70,"href":"https:\u002F\u002Fmedium.com\u002Fmembership"},{"start":138,"end":145,"href":"https:\u002F\u002Fwww.cdc.gov\u002Fcoronavirus\u002F2019-nCoV"}]},"unbound":{"text":"This article is free for everyone, thanks to Medium Members. For more information on the novel coronavirus and Covid-19, visit cdc.gov.","markups":[{"start":45,"end":59,"href":"https:\u002F\u002Fmedium.com\u002Fmembership"},{"start":127,"end":134,"href":"https:\u002F\u002Fwww.cdc.gov\u002Fcoronavirus\u002F2019-nCoV"}]}},"COVID_BANNER_POST_ID_OVERRIDE_WHITELIST":["3b31a67bff4a"]},"sharedVoteMessaging":{"TAGS":["politics","election-2020","government","us-politics","election","2020-presidential-race","trump","donald-trump","democrats","republicans","congress","republican-party","democratic-party","biden","joe-biden","maga"],"TOPICS":["politics","election"],"MESSAGE":{"text":"Find out more about the U.S. election results here.","markups":[{"start":46,"end":50,"href":"https:\u002F\u002Fcookpolitical.com\u002F2020-national-popular-vote-tracker"}]},"EXCLUDE_POSTS":["397ef29e3ca5"]},"embedPostRules":[],"recircOptions":{"v1":{"limit":3},"v2":{"limit":8}},"braintreeClientKey":"production_zjkj96jm_m56f8fqpf7ngnrd4","braintree":{"enabled":true,"merchantId":"m56f8fqpf7ngnrd4","merchantAccountId":{"usd":"AMediumCorporation_instant","eur":"amediumcorporation_EUR","cad":"amediumcorporation_CAD"},"publicKey":"ds2nn34bg2z7j5gd","braintreeEnvironment":"production","dashboardUrl":"https:\u002F\u002Fwww.braintreegateway.com\u002Fmerchants","gracePeriodDurationInDays":14,"mediumMembershipPlanId":{"monthly":"ce105f8c57a3","monthlyV2":"e8a5e126-792b-4ee6-8fba-d574c1b02fc5","monthlyWithTrial":"d5ee3dbe3db8","monthlyPremium":"fa741a9b47a2","yearly":"a40ad4a43185","yearlyV2":"3815d7d6-b8ca-4224-9b8c-182f9047866e","yearlyStaff":"d74fb811198a","yearlyWithTrial":"b3bc7350e5c7","yearlyPremium":"e21bd2c12166","monthlyOneYearFree":"e6c0637a-2bad-4171-ab4f-3c268633d83c","monthly25PercentOffFirstYear":"235ecc62-0cdb-49ae-9378-726cd21c504b","monthly20PercentOffFirstYear":"ba518864-9c13-4a99-91ca-411bf0cac756","monthly15PercentOffFirstYear":"594c029b-9f89-43d5-88f8-8173af4e070e","monthly10PercentOffFirstYear":"c6c7bc9a-40f2-4b51-8126-e28511d5bdb0","monthlyForStudents":"629ebe51-da7d-41fd-8293-34cd2f2030a8","yearlyOneYearFree":"78ba7be9-0d9f-4ece-aa3e-b54b826f2bf1","yearly25PercentOffFirstYear":"2dbb010d-bb8f-4eeb-ad5c-a08509f42d34","yearly20PercentOffFirstYear":"47565488-435b-47f8-bf93-40d5fbe0ebc8","yearly15PercentOffFirstYear":"8259809b-0881-47d9-acf7-6c001c7f720f","yearly10PercentOffFirstYear":"9dd694fb-96e1-472c-8d9e-3c868d5c1506","yearlyForStudents":"e29345ef-ab1c-4234-95c5-70e50fe6bc23","monthlyCad":"p52orjkaceei","yearlyCad":"h4q9g2up9ktt"},"braintreeDiscountId":{"oneMonthFree":"MONTHS_FREE_01","threeMonthsFree":"MONTHS_FREE_03","sixMonthsFree":"MONTHS_FREE_06","fiftyPercentOffOneYear":"FIFTY_PERCENT_OFF_ONE_YEAR"},"3DSecureVersion":"2","defaultCurrency":"usd","providerPlanIdCurrency":{"4ycw":"usd","rz3b":"usd","3kqm":"usd","jzw6":"usd","c2q2":"usd","nnsw":"usd","q8qw":"usd","d9y6":"usd","fx7w":"cad","nwf2":"cad"}},"paypalClientId":"AXj1G4fotC2GE8KzWX9mSxCH1wmPE3nJglf4Z2ig_amnhvlMVX87otaq58niAg9iuLktVNF_1WCMnN7v","paypal":{"host":"https:\u002F\u002Fapi.paypal.com:443","clientMode":"production","serverMode":"live","webhookId":"4G466076A0294510S","monthlyPlan":{"planId":"P-9WR0658853113943TMU5FDQA","name":"Medium Membership (Monthly) with setup fee","description":"Unlimited access to the best and brightest stories on Medium. Membership billed monthly."},"yearlyPlan":{"planId":"P-7N8963881P8875835MU5JOPQ","name":"Medium Membership (Annual) with setup fee","description":"Unlimited access to the best and brightest stories on Medium. Membership billed annually."},"oneYearGift":{"name":"Medium Membership (1 Year, Digital Gift Code)","description":"Unlimited access to the best and brightest stories on Medium. Gift codes can be redeemed at medium.com\u002Fredeem.","price":"50.00","currency":"USD","sku":"membership-gift-1-yr"},"oldMonthlyPlan":{"planId":"P-96U02458LM656772MJZUVH2Y","name":"Medium Membership (Monthly)","description":"Unlimited access to the best and brightest stories on Medium. Membership billed monthly."},"oldYearlyPlan":{"planId":"P-59P80963JF186412JJZU3SMI","name":"Medium Membership (Annual)","description":"Unlimited access to the best and brightest stories on Medium. Membership billed annually."},"monthlyPlanWithTrial":{"planId":"P-66C21969LR178604GJPVKUKY","name":"Medium Membership (Monthly) with setup fee","description":"Unlimited access to the best and brightest stories on Medium. Membership billed monthly."},"yearlyPlanWithTrial":{"planId":"P-6XW32684EX226940VKCT2MFA","name":"Medium Membership (Annual) with setup fee","description":"Unlimited access to the best and brightest stories on Medium. Membership billed annually."},"oldMonthlyPlanNoSetupFee":{"planId":"P-4N046520HR188054PCJC7LJI","name":"Medium Membership (Monthly)","description":"Unlimited access to the best and brightest stories on Medium. Membership billed monthly."},"oldYearlyPlanNoSetupFee":{"planId":"P-7A4913502Y5181304CJEJMXQ","name":"Medium Membership (Annual)","description":"Unlimited access to the best and brightest stories on Medium. Membership billed annually."},"sdkUrl":"https:\u002F\u002Fwww.paypal.com\u002Fsdk\u002Fjs"},"stripePublishableKey":"pk_live_7FReX44VnNIInZwrIIx6ghjl","log":{"json":true,"level":"info"},"imageUploadMaxSizeMb":25,"staffPicks":{"title":"Staff Picks","catalogId":"c7bc6e1ee00f"}},"session":{"xsrf":""}}</script><script>window.__APOLLO_STATE__ = {"ROOT_QUERY":{"__typename":"Query","viewer":null,"variantFlags":[{"__typename":"VariantFlag","name":"enable_android_miro_v2","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_boost_nia_v01","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_lo_homepage","valueType":{"__typename":"VariantFlagString","value":"control"}},{"__typename":"VariantFlag","name":"redefined_top_posts","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"ios_enable_lock_responses","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_conversion_model_v2","valueType":{"__typename":"VariantFlagString","value":"group_2"}},{"__typename":"VariantFlag","name":"enable_pre_pp_v4","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"can_receive_tips_v0","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_ios_dynamic_paywall_aspiriational","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_mastodon_for_members_username_selection","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_pp_country_expansion","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_aurora_pub_follower_page","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_moc_load_processor_c","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"glyph_font_set","valueType":{"__typename":"VariantFlagString","value":"m2-unbound-source-serif-pro"}},{"__typename":"VariantFlag","name":"android_two_hour_refresh","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_lite_response_markup","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_eventstats_event_processing","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_mastodon_for_members","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"ios_enable_verified_book_author","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_lite_archive_page","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_maim_the_meter","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_ranker_v10","valueType":{"__typename":"VariantFlagString","value":"control"}},{"__typename":"VariantFlag","name":"enable_starspace","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_sharer_create_post_share_key","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_ios_dynamic_paywall_programming","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_rex_reading_history","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_abandoned_cart_promotion_email","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"ios_display_paywall_after_onboarding","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"ios_enable_friend_links_creation","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"android_enable_friend_links_creation","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_braintree_webhook","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_new_manage_membership_flow","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_sprig","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_braintree_integration","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_new_stripe_customers","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_recommended_publishers_query","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"ios_enable_home_post_menu","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"signin_services","valueType":{"__typename":"VariantFlagString","value":"twitter,facebook,google,email,google-fastidv,google-one-tap,apple"}},{"__typename":"VariantFlag","name":"coronavirus_topic_recirc","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_android_offline_reading","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_update_topic_portals_wtf","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_billing_frequency_on_step2","valueType":{"__typename":"VariantFlagString","value":"group_1"}},{"__typename":"VariantFlag","name":"enable_conversion_ranker_v2","valueType":{"__typename":"VariantFlagString","value":"control"}},{"__typename":"VariantFlag","name":"enable_susi_redesign_ios","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_publication_hierarchy_web","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_recaptcha_enterprise","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_iceland_forced_android","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"available_monthly_premium_plan","valueType":{"__typename":"VariantFlagString","value":"12a660186432"}},{"__typename":"VariantFlag","name":"enable_braintree_paypal","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"ios_iceland_nux","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_post_bottom_responses","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_pp_v4","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_verifications_service","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_cache_less_following_feed","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_tick_landing_page","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_tipping_v0_android","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"android_enable_friend_links_postpage_banners","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"android_rating_prompt_stories_read_threshold","valueType":{"__typename":"VariantFlagNumber","value":2}},{"__typename":"VariantFlag","name":"enable_android_dynamic_programming_paywall","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"limit_post_referrers","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_intrinsic_automatic_actions","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_moc_load_processor_first_story","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_simplified_digest_v2_b","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_tipping_v0_ios","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_medium2_kbfd","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_import","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"available_annual_premium_plan","valueType":{"__typename":"VariantFlagString","value":"4a442ace1476"}},{"__typename":"VariantFlag","name":"enable_explicit_signals","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_footer_app_buttons","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_mastodon_avatar_upload","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_premium_tier_badge","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_updated_pub_recs_ui","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"allow_test_auth","valueType":{"__typename":"VariantFlagString","value":"disallow"}},{"__typename":"VariantFlag","name":"reengagement_notification_duration","valueType":{"__typename":"VariantFlagNumber","value":3}},{"__typename":"VariantFlag","name":"enable_seamless_social_sharing","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_sharer_validate_post_share_key","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_explicit_signals_updated_post_previews","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_braintree_client","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"android_enable_syntax_highlight","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_apple_sign_in","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_automod","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_google_webhook","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_update_explore_wtf","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_susi_redesign_android","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_app_flirty_thirty","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"can_send_tips_v0","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_bg_post_post","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_pill_based_home_feed","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"ios_remove_twitter_onboarding_step","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_lite_homepage","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"ios_enable_friend_links_postpage_banners","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_members_only_audio","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"mobile_custom_app_icon","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"price_smoke_test_yearly","valueType":{"__typename":"VariantFlagString","value":""}},{"__typename":"VariantFlag","name":"enable_tag_recs","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_braintree_apple_pay","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_auto_follow_on_subscribe","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_braintree_google_pay","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"skip_fs_cache_user_vals","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_branch_io","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_rito_upstream_deadlines","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"reader_fair_distribution_non_qp","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_speechify_ios","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_speechify_widget","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"allow_signup","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_configure_pronouns","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"browsable_stream_config_bucket","valueType":{"__typename":"VariantFlagString","value":"curated-topics"}},{"__typename":"VariantFlag","name":"disable_partner_program_enrollment","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_marketing_emails","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"android_enable_image_sharer","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_ios_autorefresh","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_google_one_tap","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_see_pronouns","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_switch_plan_premium_tier","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"allow_access","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_tribute_landing_page","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_creator_welcome_email","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_ios_offline_reading","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"android_enable_editor_new_publishing_flow","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_android_dynamic_aspirational_paywall","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_author_cards","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_author_cards_byline","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_entities_to_follow_v2","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_post_bottom_responses_input","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_recirc_model","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"available_monthly_plan","valueType":{"__typename":"VariantFlagString","value":"60e220181034"}},{"__typename":"VariantFlag","name":"enable_group_gifting","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"android_enable_lists_v2","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_android_verified_author","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_diversification_rex","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_braintree_trial_membership","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_deprecate_legacy_providers_v3","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"onboarding_tags_from_top_views","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"available_annual_plan","valueType":{"__typename":"VariantFlagString","value":"2c754bcc2995"}},{"__typename":"VariantFlag","name":"enable_apple_webhook","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"ios_in_app_free_trial","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"ios_social_share_sheet","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"android_enable_topic_portals","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_ios_easy_resubscribe","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_lite_server_upstream_deadlines","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"price_smoke_test_monthly","valueType":{"__typename":"VariantFlagString","value":""}},{"__typename":"VariantFlag","name":"enable_newsletter_lo_flow_custom_domains","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_premium_tier","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_rex_new_push_notification_endpoint","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"rex_generator_max_candidates","valueType":{"__typename":"VariantFlagNumber","value":1000}},{"__typename":"VariantFlag","name":"enable_legacy_feed_in_iceland","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"textshots_userid","valueType":{"__typename":"VariantFlagString","value":""}},{"__typename":"VariantFlag","name":"enable_moc_load_processor_all_recs_surfaces","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"limit_user_follows","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_lite_continue_this_thread","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_rex_aggregator_v2","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"num_post_bottom_responses_to_show","valueType":{"__typename":"VariantFlagString","value":"3"}},{"__typename":"VariantFlag","name":"enable_ml_rank_rex_anno","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"enable_bayesian_average_pub_search","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"goliath_externalsearch_enable_comment_deindexation","valueType":{"__typename":"VariantFlagBoolean","value":true}},{"__typename":"VariantFlag","name":"signup_services","valueType":{"__typename":"VariantFlagString","value":"twitter,facebook,google,email,google-fastidv,google-one-tap,apple"}}],"collectionByDomainOrSlug({\"domainOrSlug\":\"blog.gopenai.com\"})":{"__ref":"Collection:7adf3c3694ff"},"postResult({\"id\":\"45a57cc5c38b\"})":{"__ref":"Post:45a57cc5c38b"}},"ImageMetadata:1*K5XzilB_IShmRLSqZxFW2w.png":{"__typename":"ImageMetadata","id":"1*K5XzilB_IShmRLSqZxFW2w.png"},"Collection:7adf3c3694ff":{"__typename":"Collection","id":"7adf3c3694ff","favicon":{"__ref":"ImageMetadata:1*K5XzilB_IShmRLSqZxFW2w.png"},"customStyleSheet":null,"colorPalette":{"__typename":"ColorPalette","highlightSpectrum":{"__typename":"ColorSpectrum","backgroundColor":"#FFFFFFFF","colorPoints":[{"__typename":"ColorPoint","color":"#FFECF4FC","point":0},{"__typename":"ColorPoint","color":"#FFE8F3FC","point":0.1},{"__typename":"ColorPoint","color":"#FFE4F1FD","point":0.2},{"__typename":"ColorPoint","color":"#FFE1F0FD","point":0.3},{"__typename":"ColorPoint","color":"#FFDDEEFD","point":0.4},{"__typename":"ColorPoint","color":"#FFD9EDFD","point":0.5},{"__typename":"ColorPoint","color":"#FFD5EBFE","point":0.6},{"__typename":"ColorPoint","color":"#FFD1EAFE","point":0.7},{"__typename":"ColorPoint","color":"#FFCDE8FE","point":0.8},{"__typename":"ColorPoint","color":"#FFC9E7FF","point":0.9},{"__typename":"ColorPoint","color":"#FFC5E5FF","point":1}]},"defaultBackgroundSpectrum":{"__typename":"ColorSpectrum","backgroundColor":"#FFFFFFFF","colorPoints":[{"__typename":"ColorPoint","color":"#FF628BAA","point":0},{"__typename":"ColorPoint","color":"#FF5C819D","point":0.1},{"__typename":"ColorPoint","color":"#FF57778F","point":0.2},{"__typename":"ColorPoint","color":"#FF516D82","point":0.3},{"__typename":"ColorPoint","color":"#FF4A6275","point":0.4},{"__typename":"ColorPoint","color":"#FF435868","point":0.5},{"__typename":"ColorPoint","color":"#FF3B4D5A","point":0.6},{"__typename":"ColorPoint","color":"#FF33414C","point":0.7},{"__typename":"ColorPoint","color":"#FF2A353E","point":0.8},{"__typename":"ColorPoint","color":"#FF20292F","point":0.9},{"__typename":"ColorPoint","color":"#FF151B1F","point":1}]},"tintBackgroundSpectrum":{"__typename":"ColorSpectrum","backgroundColor":"#FF00334E","colorPoints":[{"__typename":"ColorPoint","color":"#FF00334E","point":0},{"__typename":"ColorPoint","color":"#FF294E67","point":0.1},{"__typename":"ColorPoint","color":"#FF45667D","point":0.2},{"__typename":"ColorPoint","color":"#FF5F7C92","point":0.3},{"__typename":"ColorPoint","color":"#FF7791A5","point":0.4},{"__typename":"ColorPoint","color":"#FF8FA5B7","point":0.5},{"__typename":"ColorPoint","color":"#FFA5B8C8","point":0.6},{"__typename":"ColorPoint","color":"#FFBBCBD8","point":0.7},{"__typename":"ColorPoint","color":"#FFD0DDE8","point":0.8},{"__typename":"ColorPoint","color":"#FFE5EFF8","point":0.9},{"__typename":"ColorPoint","color":"#FFFAFFFF","point":1}]}},"domain":"blog.gopenai.com","slug":"gopenai","googleAnalyticsId":null,"editors":[{"__typename":"CollectionMastheadUserItem","user":{"__ref":"User:1df0d4113fc9"}},{"__typename":"CollectionMastheadUserItem","user":{"__ref":"User:94355764f79"}},{"__typename":"CollectionMastheadUserItem","user":{"__ref":"User:f1491b7bd19"}},{"__typename":"CollectionMastheadUserItem","user":{"__ref":"User:45487092016b"}}],"name":"GoPenAI","avatar":{"__ref":"ImageMetadata:1*LUSEiP1BHPkkmH75e8eg_A.png"},"description":"Where the ChatGPT community comes together to share insights and stories.","subscriberCount":1550,"latestPostsConnection({\"paging\":{\"limit\":1}})":{"__typename":"PostConnection","posts":[{"__ref":"Post:06588f096c1e"}]},"viewerEdge":{"__ref":"CollectionViewerEdge:collectionId:7adf3c3694ff-viewerId:lo_7e4af1ccdac9"},"twitterUsername":null,"facebookPageId":null,"logo":{"__ref":"ImageMetadata:1*U9yO19cFJjaX8_S95AATgA.png"}},"User:1df0d4113fc9":{"__typename":"User","id":"1df0d4113fc9"},"User:94355764f79":{"__typename":"User","id":"94355764f79"},"User:f1491b7bd19":{"__typename":"User","id":"f1491b7bd19"},"User:45487092016b":{"__typename":"User","id":"45487092016b"},"ImageMetadata:1*LUSEiP1BHPkkmH75e8eg_A.png":{"__typename":"ImageMetadata","id":"1*LUSEiP1BHPkkmH75e8eg_A.png"},"User:51c5a37f79e9":{"__typename":"User","id":"51c5a37f79e9","customDomainState":{"__typename":"CustomDomainState","live":null},"hasSubdomain":false,"username":"alexzap922"},"Post:06588f096c1e":{"__typename":"Post","id":"06588f096c1e","firstPublishedAt":1732617624768,"creator":{"__ref":"User:51c5a37f79e9"},"collection":{"__ref":"Collection:7adf3c3694ff"},"isSeries":false,"mediumUrl":"https:\u002F\u002Fblog.gopenai.com\u002Frobust-amzn-price-prediction-using-log-domain-ransac-linear-regression-06588f096c1e","sequence":null,"uniqueSlug":"robust-amzn-price-prediction-using-log-domain-ransac-linear-regression-06588f096c1e"},"LinkedAccounts:4751fd7878c5":{"__typename":"LinkedAccounts","mastodon":null,"id":"4751fd7878c5"},"UserViewerEdge:userId:4751fd7878c5-viewerId:lo_7e4af1ccdac9":{"__typename":"UserViewerEdge","id":"userId:4751fd7878c5-viewerId:lo_7e4af1ccdac9","isFollowing":false,"isUser":false,"isMuting":false},"NewsletterV3:3c36e1d68dc9":{"__typename":"NewsletterV3","id":"3c36e1d68dc9","type":"NEWSLETTER_TYPE_AUTHOR","slug":"4751fd7878c5","name":"4751fd7878c5","collection":null,"user":{"__ref":"User:4751fd7878c5"}},"User:4751fd7878c5":{"__typename":"User","id":"4751fd7878c5","name":"kirouane Ayoub","username":"ayoubkirouane3","newsletterV3":{"__ref":"NewsletterV3:3c36e1d68dc9"},"linkedAccounts":{"__ref":"LinkedAccounts:4751fd7878c5"},"isSuspended":false,"imageId":"1*T-KWhmfASlLM3XMvRKZnWA.jpeg","mediumMemberAt":0,"verifications":{"__typename":"VerifiedInfo","isBookAuthor":false},"socialStats":{"__typename":"SocialStats","followerCount":310,"followingCount":0,"collectionFollowingCount":2},"customDomainState":null,"hasSubdomain":false,"bio":"I Like building Machine Learning models from scratch .","isPartnerProgramEnrolled":false,"viewerEdge":{"__ref":"UserViewerEdge:userId:4751fd7878c5-viewerId:lo_7e4af1ccdac9"},"viewerIsUser":false,"postSubscribeMembershipUpsellShownAt":0,"membership":null,"allowNotes":true,"twitterScreenName":""},"Paragraph:403de6fde677_0":{"__typename":"Paragraph","id":"403de6fde677_0","name":"8f29","type":"H3","href":null,"layout":null,"metadata":null,"text":"Microsoft GraphRAG and Ollama: Code Your Way to Smarter Question Answering","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"ImageMetadata:1*ocprwBCYr-Vt9AHAhMaZuA.png":{"__typename":"ImageMetadata","id":"1*ocprwBCYr-Vt9AHAhMaZuA.png","originalHeight":721,"originalWidth":1400,"focusPercentX":null,"focusPercentY":null,"alt":null},"Paragraph:403de6fde677_1":{"__typename":"Paragraph","id":"403de6fde677_1","name":"04b6","type":"IMG","href":null,"layout":"INSET_CENTER","metadata":{"__ref":"ImageMetadata:1*ocprwBCYr-Vt9AHAhMaZuA.png"},"text":"","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_2":{"__typename":"Paragraph","id":"403de6fde677_2","name":"fcd5","type":"P","href":null,"layout":null,"metadata":null,"text":"Traditional RAG methods, which primarily rely on semantic similarity search, often fall short when faced with complex questions that require connecting disparate pieces of information or understanding the broader context of a large dataset. Enter GraphRAG, a novel approach that leverages the power of knowledge graphs to overcome these limitations and enhance the capabilities of RAG systems.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_3":{"__typename":"Paragraph","id":"403de6fde677_3","name":"ef20","type":"H3","href":null,"layout":null,"metadata":null,"text":"Understanding the Problem with Baseline RAG","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_4":{"__typename":"Paragraph","id":"403de6fde677_4","name":"ebe2","type":"P","href":null,"layout":null,"metadata":null,"text":"Baseline RAG systems, while useful for simple question answering, struggle when tasked with synthesizing information from various sources or understanding the overarching themes within a dataset. For example, if you ask a baseline RAG system “What are the main causes of climate change according to this research dataset?”, it might struggle to provide a comprehensive answer because it lacks the ability to connect the different pieces of information related to climate change scattered throughout the dataset. This highlights the need for a more structured and intelligent approach to RAG.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_5":{"__typename":"Paragraph","id":"403de6fde677_5","name":"f4c6","type":"H3","href":null,"layout":null,"metadata":null,"text":"A Knowledge Graph-Powered Solution","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_6":{"__typename":"Paragraph","id":"403de6fde677_6","name":"ecbf","type":"P","href":null,"layout":null,"metadata":null,"text":"GraphRAG addresses this need by utilizing LLMs to extract a knowledge graph from the raw text data. This knowledge graph represents the information as a network of interconnected entities and relationships, providing a richer representation of the data compared to simple text snippets. This structured representation allows GraphRAG to excel at answering complex questions that require reasoning and connecting different pieces of information.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":0,"end":8,"href":"https:\u002F\u002Farxiv.org\u002Fpdf\u002F2404.16130","anchorType":"LINK","userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_7":{"__typename":"Paragraph","id":"403de6fde677_7","name":"ddf3","type":"H3","href":null,"layout":null,"metadata":null,"text":"GraphRAG Architecture","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_8":{"__typename":"Paragraph","id":"403de6fde677_8","name":"38b5","type":"P","href":null,"layout":null,"metadata":null,"text":"GraphRAG’s architecture consists of several key components. The GraphRAG Knowledge Model defines a standardized data model for representing entities like documents, TextUnits, entities, relationships, and community reports. DataShaper Workflows, built on the DataShaper library, enable declarative data processing, making the pipeline flexible and customizable. LLM-Based Workflow Steps integrate LLMs into the indexing process, using custom verbs to perform tasks like entity extraction and summarization.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_9":{"__typename":"Paragraph","id":"403de6fde677_9","name":"8589","type":"H3","href":null,"layout":null,"metadata":null,"text":"Deep Dive into the GraphRAG Process","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_10":{"__typename":"Paragraph","id":"403de6fde677_10","name":"71f1","type":"P","href":null,"layout":null,"metadata":null,"text":"The GraphRAG process involves two main stages: indexing and querying.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":47,"end":55,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":60,"end":68,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_11":{"__typename":"Paragraph","id":"403de6fde677_11","name":"d111","type":"P","href":null,"layout":null,"metadata":null,"text":"Indexing:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":9,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_12":{"__typename":"Paragraph","id":"403de6fde677_12","name":"b2ab","type":"P","href":null,"layout":null,"metadata":null,"text":"During indexing, the input text is divided into manageable chunks called TextUnits. LLMs then extract entities, relationships, and claims from these TextUnits, forming the knowledge graph.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":73,"end":82,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":102,"end":110,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":112,"end":125,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"ImageMetadata:1*TQtCLUNtUN5lsaQykS2URg.png":{"__typename":"ImageMetadata","id":"1*TQtCLUNtUN5lsaQykS2URg.png","originalHeight":720,"originalWidth":1282,"focusPercentX":null,"focusPercentY":null,"alt":null},"Paragraph:403de6fde677_13":{"__typename":"Paragraph","id":"403de6fde677_13","name":"0cb8","type":"IMG","href":null,"layout":"INSET_CENTER","metadata":{"__ref":"ImageMetadata:1*TQtCLUNtUN5lsaQykS2URg.png"},"text":"","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_14":{"__typename":"Paragraph","id":"403de6fde677_14","name":"62f8","type":"P","href":null,"layout":null,"metadata":null,"text":"Furthermore, a process called community detection identifies clusters of related entities, and summaries are generated for each community, providing high-level overviews of different topics within the dataset.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_15":{"__typename":"Paragraph","id":"403de6fde677_15","name":"632f","type":"P","href":null,"layout":null,"metadata":null,"text":"Querying:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":9,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_16":{"__typename":"Paragraph","id":"403de6fde677_16","name":"b294","type":"P","href":null,"layout":null,"metadata":null,"text":"When a user submits a query, GraphRAG leverages the knowledge graph to retrieve relevant information. It offers two main search methods: Local Search and Global Search.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":137,"end":149,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":154,"end":167,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_17":{"__typename":"Paragraph","id":"403de6fde677_17","name":"e938","type":"P","href":null,"layout":null,"metadata":null,"text":"Local Search focuses on answering questions about specific entities, exploring their relationships, associated claims, and relevant text snippets.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":12,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"ImageMetadata:1*VPtk1QMLsIMMU_JdLHXeuQ.png":{"__typename":"ImageMetadata","id":"1*VPtk1QMLsIMMU_JdLHXeuQ.png","originalHeight":626,"originalWidth":1298,"focusPercentX":null,"focusPercentY":null,"alt":null},"Paragraph:403de6fde677_18":{"__typename":"Paragraph","id":"403de6fde677_18","name":"cc3e","type":"IMG","href":null,"layout":"INSET_CENTER","metadata":{"__ref":"ImageMetadata:1*VPtk1QMLsIMMU_JdLHXeuQ.png"},"text":"","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_19":{"__typename":"Paragraph","id":"403de6fde677_19","name":"6896","type":"P","href":null,"layout":null,"metadata":null,"text":"Global Search, on the other hand, tackles broader questions that require understanding the entire dataset. It analyzes the community summaries to identify overarching themes and synthesize information from across the dataset.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":13,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"ImageMetadata:1*bDdX_uEKJETUwgPBjKx5DQ.png":{"__typename":"ImageMetadata","id":"1*bDdX_uEKJETUwgPBjKx5DQ.png","originalHeight":446,"originalWidth":1293,"focusPercentX":null,"focusPercentY":null,"alt":null},"Paragraph:403de6fde677_20":{"__typename":"Paragraph","id":"403de6fde677_20","name":"2d0c","type":"IMG","href":null,"layout":"INSET_CENTER","metadata":{"__ref":"ImageMetadata:1*bDdX_uEKJETUwgPBjKx5DQ.png"},"text":"","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_21":{"__typename":"Paragraph","id":"403de6fde677_21","name":"9fcf","type":"H3","href":null,"layout":null,"metadata":null,"text":"Benefits and Advantages of GraphRAG","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_22":{"__typename":"Paragraph","id":"403de6fde677_22","name":"2f49","type":"P","href":null,"layout":null,"metadata":null,"text":"By utilizing a knowledge graph, GraphRAG offers significant advantages over baseline RAG. It enhances reasoning capabilities by enabling the system to connect disparate pieces of information and synthesize new insights. It provides a holistic understanding of the dataset by organizing information into meaningful clusters and providing summaries for each cluster. Moreover, it improves overall RAG performance, particularly in complex question answering scenarios.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_23":{"__typename":"Paragraph","id":"403de6fde677_23","name":"0751","type":"H3","href":null,"layout":null,"metadata":null,"text":"Applications and Use Cases","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_24":{"__typename":"Paragraph","id":"403de6fde677_24","name":"8c57","type":"P","href":null,"layout":null,"metadata":null,"text":"The applications of GraphRAG are vast and span various domains. In research, it can help answer complex questions by synthesizing information from large datasets of scientific papers. In enterprise settings, it can power conversational AI systems that can reason about specific domains, such as customer support or internal knowledge bases. Furthermore, GraphRAG can be used to create knowledge exploration tools that facilitate deeper understanding of large datasets, enabling users to interactively explore the relationships between different concepts and discover new insights.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_25":{"__typename":"Paragraph","id":"403de6fde677_25","name":"36c9","type":"H3","href":null,"layout":null,"metadata":null,"text":"Code Example","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_26":{"__typename":"Paragraph","id":"403de6fde677_26","name":"e544","type":"P","href":null,"layout":null,"metadata":null,"text":"We begin by installing Ollama, a tool for running large language models locally, and starting its server.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_27":{"__typename":"Paragraph","id":"403de6fde677_27","name":"a81b","type":"PRE","href":null,"layout":null,"metadata":null,"text":"curl -fsSL https:\u002F\u002Follama.com\u002Finstall.sh | sh","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"bash"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_28":{"__typename":"Paragraph","id":"403de6fde677_28","name":"cff1","type":"P","href":null,"layout":null,"metadata":null,"text":"Start the server :","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_29":{"__typename":"Paragraph","id":"403de6fde677_29","name":"2462","type":"PRE","href":null,"layout":null,"metadata":null,"text":"ollama serve","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"typescript"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_30":{"__typename":"Paragraph","id":"403de6fde677_30","name":"f9c4","type":"P","href":null,"layout":null,"metadata":null,"text":"We then download the llama3.1 model for text generation and bge-large for embeddings.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":21,"end":29,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":60,"end":69,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_31":{"__typename":"Paragraph","id":"403de6fde677_31","name":"99e1","type":"PRE","href":null,"layout":null,"metadata":null,"text":"ollama pull llama3.1\nollama pull bge-large","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"sql"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_32":{"__typename":"Paragraph","id":"403de6fde677_32","name":"83d0","type":"P","href":null,"layout":null,"metadata":null,"text":"You can use ollama using Docker :","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_33":{"__typename":"Paragraph","id":"403de6fde677_33","name":"d699","type":"PRE","href":null,"layout":null,"metadata":null,"text":"docker run -d -v ollama:\u002Froot\u002F.ollama -p 11434:11434 --name ollama ollama\u002Follama\ndocker exec -it ollama ollama pull llama3.1\ndocker exec -it ollama ollama pull bge-large","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"bash"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_34":{"__typename":"Paragraph","id":"403de6fde677_34","name":"cc02","type":"P","href":null,"layout":null,"metadata":null,"text":"Next, we install the GraphRAG library itself.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_35":{"__typename":"Paragraph","id":"403de6fde677_35","name":"c27d","type":"PRE","href":null,"layout":null,"metadata":null,"text":"pip install graphrag","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"typescript"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_36":{"__typename":"Paragraph","id":"403de6fde677_36","name":"0b6b","type":"P","href":null,"layout":null,"metadata":null,"text":"We create a directory for our project and a subdirectory for input data.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_37":{"__typename":"Paragraph","id":"403de6fde677_37","name":"cde3","type":"PRE","href":null,"layout":null,"metadata":null,"text":"mkdir -p .\u002Frag_graph\u002Finput","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"bash"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_38":{"__typename":"Paragraph","id":"403de6fde677_38","name":"dedd","type":"P","href":null,"layout":null,"metadata":null,"text":"A sample story is saved as a text file within the input folder.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_39":{"__typename":"Paragraph","id":"403de6fde677_39","name":"e036","type":"PRE","href":null,"layout":null,"metadata":null,"text":"text = \"\"\"\nIn the city of Novus, a renowned architect named Alice Johnson was busy working on her latest project. Alice had been designing buildings for over 15 years and was well-known for her collaboration with her mentor, Robert Lee, who was also a famous architect. Robert had taught Alice everything she knew, and they remained close friends.\n\nAlice was married to David Johnson, a software engineer who worked at TechCorp. David was passionate about his work and often collaborated with his colleague, Emily Smith, a data scientist at TechCorp. Emily was also Alice’s best friend from college, where they studied together. She frequently visited Alice and David’s home, and they often discussed their work over dinner.\n\nAlice and David had a daughter, Sophie Johnson, who was 8 years old and loved spending time with her grandparents, John and Mary Johnson. John was David’s father, a retired professor, and Mary was a retired nurse. They lived in a neighboring town called Greenville and visited their family in Novus every weekend.\n\nOne day, Alice received an invitation from the Novus City Council to present her latest building design. She was excited to showcase her work and immediately contacted Robert Lee to review her plans. Robert was delighted to help, as he had always admired Alice’s talent. Meanwhile, David was busy at TechCorp, where he and Emily were working on a new AI project under the supervision of their manager, Michael Brown.\n\nAs the day of the presentation approached, Alice prepared her designs with Robert’s guidance. David and Sophie also attended the event to support Alice. The Novus City Council was impressed with her work and decided to approve the project, marking another success for Alice. After the event, the family celebrated with a dinner at their favorite restaurant, The Green Olive, where they were joined by Emily and Robert.\n\"\"\"\nwith open(\".\u002Frag_graph\u002Finput\u002Fstory.txt\", \"w\") as f:\n f.write(text)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_40":{"__typename":"Paragraph","id":"403de6fde677_40","name":"fa69","type":"P","href":null,"layout":null,"metadata":null,"text":"GraphRAG is then initialized within the project directory, creating necessary configuration files.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_41":{"__typename":"Paragraph","id":"403de6fde677_41","name":"ac02","type":"PRE","href":null,"layout":null,"metadata":null,"text":"python -m graphrag.index --init --root .\u002Frag_graph","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"EXPLICIT","lang":"bash"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_42":{"__typename":"Paragraph","id":"403de6fde677_42","name":"346d","type":"P","href":null,"layout":null,"metadata":null,"text":"The .env file is edited to include any required API keys : GRAPHRAG_API_KEY=EMPTY","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":4,"end":8,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":59,"end":81,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_43":{"__typename":"Paragraph","id":"403de6fde677_43","name":"c43b","type":"P","href":null,"layout":null,"metadata":null,"text":"The settings.yaml file is modified to specify the models downloaded earlier (llama2.1 and bge-large), their local server endpoint, and other parameters like maximum tokens and concurrency. These settings configure GraphRAG to use Ollama for both text generation and embeddings.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":4,"end":17,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":77,"end":85,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":90,"end":99,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_44":{"__typename":"Paragraph","id":"403de6fde677_44","name":"affd","type":"P","href":null,"layout":null,"metadata":null,"text":"LLMS:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":5,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_45":{"__typename":"Paragraph","id":"403de6fde677_45","name":"f563","type":"ULI","href":null,"layout":null,"metadata":null,"text":"model : llama3.1","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_46":{"__typename":"Paragraph","id":"403de6fde677_46","name":"92fe","type":"ULI","href":null,"layout":null,"metadata":null,"text":"max_tokens: 2000","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_47":{"__typename":"Paragraph","id":"403de6fde677_47","name":"8e9e","type":"ULI","href":null,"layout":null,"metadata":null,"text":"api_base: http:\u002F\u002F127.0.0.1:11434\u002Fv1 (ollama server endpoint)","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":10,"end":35,"href":"http:\u002F\u002F127.0.0.1:11434\u002Fv1","anchorType":"LINK","userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_48":{"__typename":"Paragraph","id":"403de6fde677_48","name":"aa39","type":"ULI","href":null,"layout":null,"metadata":null,"text":"max_retries: 1","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_49":{"__typename":"Paragraph","id":"403de6fde677_49","name":"2adf","type":"ULI","href":null,"layout":null,"metadata":null,"text":"concurrent_requests: 1","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_50":{"__typename":"Paragraph","id":"403de6fde677_50","name":"c300","type":"ULI","href":null,"layout":null,"metadata":null,"text":"comment this line model_supports_json: true","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":18,"end":43,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_51":{"__typename":"Paragraph","id":"403de6fde677_51","name":"a6cc","type":"P","href":null,"layout":null,"metadata":null,"text":"Embeddings:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":11,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_52":{"__typename":"Paragraph","id":"403de6fde677_52","name":"57ed","type":"ULI","href":null,"layout":null,"metadata":null,"text":"model: bge-large:latest","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_53":{"__typename":"Paragraph","id":"403de6fde677_53","name":"339f","type":"ULI","href":null,"layout":null,"metadata":null,"text":"api_base: http:\u002F\u002F127.0.0.1:11434\u002Fv1","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":10,"end":35,"href":"http:\u002F\u002F127.0.0.1:11434\u002Fv1","anchorType":"LINK","userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_54":{"__typename":"Paragraph","id":"403de6fde677_54","name":"eb59","type":"ULI","href":null,"layout":null,"metadata":null,"text":"max_retries: 1","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_55":{"__typename":"Paragraph","id":"403de6fde677_55","name":"e4b6","type":"ULI","href":null,"layout":null,"metadata":null,"text":"concurrent_requests: 1","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_56":{"__typename":"Paragraph","id":"403de6fde677_56","name":"5396","type":"ULI","href":null,"layout":null,"metadata":null,"text":"batch_size: 1","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_57":{"__typename":"Paragraph","id":"403de6fde677_57","name":"667e","type":"ULI","href":null,"layout":null,"metadata":null,"text":"batch_max_tokens: 8191","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_58":{"__typename":"Paragraph","id":"403de6fde677_58","name":"9691","type":"H4","href":null,"layout":null,"metadata":null,"text":"settings.yaml :","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":13,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_59":{"__typename":"Paragraph","id":"403de6fde677_59","name":"23c0","type":"PRE","href":null,"layout":null,"metadata":null,"text":"encoding_model: cl100k_base\nskip_workflows: []\nllm:\n api_key: ${GRAPHRAG_API_KEY}\n type: openai_chat # or azure_openai_chat\n model: llama3.1\n # model_supports_json: true # recommended if this is available for your model.\n max_tokens: 2000\n # request_timeout: 180.0\n api_base: http:\u002F\u002F127.0.0.1:11434\u002Fv1\n # api_version: 2024-02-15-preview\n # organization: \u003Corganization_id\u003E\n # deployment_name: \u003Cazure_model_deployment_name\u003E\n # tokens_per_minute: 150_000 # set a leaky bucket throttle\n # requests_per_minute: 10_000 # set a leaky bucket throttle\n max_retries: 1\n # max_retry_wait: 10.0\n # sleep_on_rate_limit_recommendation: true # whether to sleep when azure suggests wait-times\n concurrent_requests: 1 # the number of parallel inflight requests that may be made\n\nparallelization:\n stagger: 0.3\n # num_threads: 50 # the number of threads to use for parallel processing\n\nasync_mode: threaded # or asyncio\n\nembeddings:\n ## parallelization: override the global parallelization settings for embeddings\n async_mode: threaded # or asyncio\n llm:\n api_key: ${GRAPHRAG_API_KEY}\n type: openai_embedding # or azure_openai_embedding\n model: bge-large:latest\n api_base: http:\u002F\u002F127.0.0.1:11434\u002Fv1\n # api_version: 2024-02-15-preview\n # organization: \u003Corganization_id\u003E\n # deployment_name: \u003Cazure_model_deployment_name\u003E\n # tokens_per_minute: 150_000 # set a leaky bucket throttle\n # requests_per_minute: 10_000 # set a leaky bucket throttle\n max_retries: 1\n # max_retry_wait: 10.0\n # sleep_on_rate_limit_recommendation: true # whether to sleep when azure suggests wait-times\n concurrent_requests: 1 # the number of parallel inflight requests that may be made\n batch_size: 1 # the number of documents to send in a single request\n batch_max_tokens: 8191 # the maximum number of tokens to send in a single request\n # target: required # or optional\n\n\n\nchunks:\n size: 300\n overlap: 100\n group_by_columns: [id] # by default, we don't allow chunks to cross documents\n\ninput:\n type: file # or blob\n file_type: text # or csv\n base_dir: \"input\"\n file_encoding: utf-8\n file_pattern: \".*\\\\.txt$\"\n\ncache:\n type: file # or blob\n base_dir: \"cache\"\n # connection_string: \u003Cazure_blob_storage_connection_string\u003E\n # container_name: \u003Cazure_blob_storage_container_name\u003E\n\nstorage:\n type: file # or blob\n base_dir: \"output\u002F${timestamp}\u002Fartifacts\"\n # connection_string: \u003Cazure_blob_storage_connection_string\u003E\n # container_name: \u003Cazure_blob_storage_container_name\u003E\n\nreporting:\n type: file # or console, blob\n base_dir: \"output\u002F${timestamp}\u002Freports\"\n # connection_string: \u003Cazure_blob_storage_connection_string\u003E\n # container_name: \u003Cazure_blob_storage_container_name\u003E\n\nentity_extraction:\n ## llm: override the global llm settings for this task\n ## parallelization: override the global parallelization settings for this task\n ## async_mode: override the global async_mode settings for this task\n prompt: \"prompts\u002Fentity_extraction.txt\"\n entity_types: [organization,person,geo,event]\n max_gleanings: 0\n\nsummarize_descriptions:\n ## llm: override the global llm settings for this task\n ## parallelization: override the global parallelization settings for this task\n ## async_mode: override the global async_mode settings for this task\n prompt: \"prompts\u002Fsummarize_descriptions.txt\"\n max_length: 500\n\nclaim_extraction:\n ## llm: override the global llm settings for this task\n ## parallelization: override the global parallelization settings for this task\n ## async_mode: override the global async_mode settings for this task\n # enabled: true\n prompt: \"prompts\u002Fclaim_extraction.txt\"\n description: \"Any claims or facts that could be relevant to information discovery.\"\n max_gleanings: 0\n\ncommunity_report:\n ## llm: override the global llm settings for this task\n ## parallelization: override the global parallelization settings for this task\n ## async_mode: override the global async_mode settings for this task\n prompt: \"prompts\u002Fcommunity_report.txt\"\n max_length: 2000\n max_input_length: 7000\n\ncluster_graph:\n max_cluster_size: 10\n\nembed_graph:\n enabled: false # if true, will generate node2vec embeddings for nodes\n # num_walks: 10\n # walk_length: 40\n # window_size: 2\n # iterations: 3\n # random_seed: 597832\n\numap:\n enabled: false # if true, will generate UMAP embeddings for nodes\n\nsnapshots:\n graphml: false\n raw_entities: false\n top_level_nodes: false\n\nlocal_search:\n # text_unit_prop: 0.5\n # community_prop: 0.1\n # conversation_history_max_turns: 5\n # top_k_mapped_entities: 10\n # top_k_relationships: 10\n # max_tokens: 12000\n\nglobal_search:\n # max_tokens: 12000\n # data_max_tokens: 12000\n # map_max_tokens: 1000\n # reduce_max_tokens: 2000\n # concurrency: 32","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"yaml"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_60":{"__typename":"Paragraph","id":"403de6fde677_60","name":"7f1e","type":"P","href":null,"layout":null,"metadata":null,"text":"Running the Indexing pipeline :","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":31,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_61":{"__typename":"Paragraph","id":"403de6fde677_61","name":"00bf","type":"P","href":null,"layout":null,"metadata":null,"text":"The GraphRAG indexing pipeline is executed, processing the story text. This involves creating TextUnits, extracting entities and relationships, building a community hierarchy, and generating summaries. The resulting knowledge graph and related data are stored in the specified output directory.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_62":{"__typename":"Paragraph","id":"403de6fde677_62","name":"091d","type":"PRE","href":null,"layout":null,"metadata":null,"text":"python -m graphrag.index --root .\u002Frag_graph","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"bash"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_63":{"__typename":"Paragraph","id":"403de6fde677_63","name":"07ad","type":"P","href":null,"layout":null,"metadata":null,"text":"Finally, we can query the indexed data using either Global or Local Search. Global Search is used to ask questions about the overall story (“What are the top themes…?”), while Local Search is suitable for questions about specific entities (“Who is Scrooge…?”). The chosen search method retrieves relevant information from the knowledge graph and generates a response based on the query.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_64":{"__typename":"Paragraph","id":"403de6fde677_64","name":"cae4","type":"P","href":null,"layout":null,"metadata":null,"text":"Global search using CLI :","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":25,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_65":{"__typename":"Paragraph","id":"403de6fde677_65","name":"1c08","type":"PRE","href":null,"layout":null,"metadata":null,"text":"python -m graphrag.query --root .\u002Frag_graph --method global \"What are the top themes in this story?\"","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"EXPLICIT","lang":"bash"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_66":{"__typename":"Paragraph","id":"403de6fde677_66","name":"a17d","type":"P","href":null,"layout":null,"metadata":null,"text":"Local search using CLI:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":23,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_67":{"__typename":"Paragraph","id":"403de6fde677_67","name":"3003","type":"PRE","href":null,"layout":null,"metadata":null,"text":"python -m graphrag.query --root .\u002Frag_graph --method local \"Who is Scrooge, and what are his main relationships?\"","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"bash"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_68":{"__typename":"Paragraph","id":"403de6fde677_68","name":"b129","type":"H3","href":null,"layout":null,"metadata":null,"text":"Global Search example :","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_69":{"__typename":"Paragraph","id":"403de6fde677_69","name":"7c1d","type":"P","href":null,"layout":null,"metadata":null,"text":"Global search method generates answers by searching over all AI-generated community reports in a map-reduce fashion. This is a resource-intensive method, but often gives good responses for questions that require an understanding of the dataset as a whole (e.g. What are the most significant values of the herbs mentioned in this notebook?).","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_70":{"__typename":"Paragraph","id":"403de6fde677_70","name":"b878","type":"P","href":null,"layout":null,"metadata":null,"text":"1. Importing Dependencies and Setting up the LLM:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":49,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_71":{"__typename":"Paragraph","id":"403de6fde677_71","name":"a9c3","type":"P","href":null,"layout":null,"metadata":null,"text":"We begin by importing necessary libraries, including pandas for data manipulation, tiktoken for tokenization, and components from the graphrag library for query execution.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":53,"end":59,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":83,"end":91,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":134,"end":142,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_72":{"__typename":"Paragraph","id":"403de6fde677_72","name":"d031","type":"PRE","href":null,"layout":null,"metadata":null,"text":"import os\nimport pandas as pd\nimport tiktoken\nfrom graphrag.query.indexer_adapters import read_indexer_entities, read_indexer_reports\nfrom graphrag.query.llm.oai.chat_openai import ChatOpenAI\nfrom graphrag.query.llm.oai.typing import OpenaiApiType\nfrom graphrag.query.structured_search.global_search.community_context import (\n GlobalCommunityContext,\n)\nfrom graphrag.query.structured_search.global_search.search import GlobalSearch","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"EXPLICIT","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_73":{"__typename":"Paragraph","id":"403de6fde677_73","name":"d947","type":"P","href":null,"layout":null,"metadata":null,"text":"We then initialize an instance of ChatOpenAI, configuring it to use the llama3.1 model hosted locally via Ollama.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":34,"end":44,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":72,"end":80,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":106,"end":112,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_74":{"__typename":"Paragraph","id":"403de6fde677_74","name":"096a","type":"PRE","href":null,"layout":null,"metadata":null,"text":"api_key = \"EMPTY\"\nllm_model = \"llama3.1\"\n\nllm = ChatOpenAI(\n api_base=\"http:\u002F\u002F127.0.0.1:11434\u002Fv1\",\n api_key=api_key,\n model=llm_model,\n api_type=OpenaiApiType.OpenAI,\n max_retries=20,\n)\n\ntoken_encoder = tiktoken.get_encoding(\"cl100k_base\")","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"makefile"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_75":{"__typename":"Paragraph","id":"403de6fde677_75","name":"ce3d","type":"P","href":null,"layout":null,"metadata":null,"text":"A basic test ensures the LLM endpoint is functioning correctly.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_76":{"__typename":"Paragraph","id":"403de6fde677_76","name":"9537","type":"PRE","href":null,"layout":null,"metadata":null,"text":"messages = [\n {\n \"role\": \"user\",\n \"content\": \"Hi\"\n }\n]\nresponse = llm.generate(messages=messages)\nprint(response)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"makefile"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_77":{"__typename":"Paragraph","id":"403de6fde677_77","name":"974c","type":"P","href":null,"layout":null,"metadata":null,"text":"2. Loading Data and Building the Context:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":41,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_78":{"__typename":"Paragraph","id":"403de6fde677_78","name":"cd76","type":"P","href":null,"layout":null,"metadata":null,"text":"Next, we load the community reports and entity data generated during the indexing phase. These reports, organized hierarchically, represent different aspects of the dataset. We specify COMMUNITY_LEVEL to determine the granularity of the reports used.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":185,"end":200,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_79":{"__typename":"Paragraph","id":"403de6fde677_79","name":"60fb","type":"PRE","href":null,"layout":null,"metadata":null,"text":"# parquet files generated from indexing pipeline\nINPUT_DIR = \".\u002Foutput\u002Frun-id\" # replace the run-id with the created one\nCOMMUNITY_REPORT_TABLE = \"artifacts\u002Fcreate_final_community_reports\"\nENTITY_TABLE = \"artifacts\u002Fcreate_final_nodes\"\nENTITY_EMBEDDING_TABLE = \"artifacts\u002Fcreate_final_entities\"\n\n# community level in the Leiden community hierarchy from which we will load the community reports\n# higher value means we use reports from more fine-grained communities (at the cost of higher computation cost)\nCOMMUNITY_LEVEL = 2\n\nentity_df = pd.read_parquet(f\"{INPUT_DIR}\u002F{ENTITY_TABLE}.parquet\")\nreport_df = pd.read_parquet(f\"{INPUT_DIR}\u002F{COMMUNITY_REPORT_TABLE}.parquet\")\nentity_embedding_df = pd.read_parquet(f\"{INPUT_DIR}\u002F{ENTITY_EMBEDDING_TABLE}.parquet\")\n\nreports = read_indexer_reports(report_df, entity_df, COMMUNITY_LEVEL)\nentities = read_indexer_entities(entity_df, entity_embedding_df, COMMUNITY_LEVEL)\n\n\n# Build global context based on community reports\n\ncontext_builder = GlobalCommunityContext(\n community_reports=reports,\n entities=entities, # default to None if you don't want to use community weights for ranking\n token_encoder=token_encoder,\n)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"makefile"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_80":{"__typename":"Paragraph","id":"403de6fde677_80","name":"0f08","type":"P","href":null,"layout":null,"metadata":null,"text":"The GlobalCommunityContext is then initialized, responsible for selecting and formatting relevant community reports as context for the LLM.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":4,"end":26,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_81":{"__typename":"Paragraph","id":"403de6fde677_81","name":"f19b","type":"P","href":null,"layout":null,"metadata":null,"text":"3. Configuring Global Search:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":29,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_82":{"__typename":"Paragraph","id":"403de6fde677_82","name":"cd64","type":"P","href":null,"layout":null,"metadata":null,"text":"We define parameters for the context builder, map stage, and reduce stage of Global Search. These parameters control aspects like context size, shuffling of reports, inclusion of community rankings and weights, and LLM parameters for each stage.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_83":{"__typename":"Paragraph","id":"403de6fde677_83","name":"eac3","type":"PRE","href":null,"layout":null,"metadata":null,"text":"context_builder_params = {\n \"use_community_summary\": False, # False means using full community reports. True means using community short summaries.\n \"shuffle_data\": True,\n \"include_community_rank\": True,\n \"min_community_rank\": 0,\n \"community_rank_name\": \"rank\",\n \"include_community_weight\": True,\n \"community_weight_name\": \"occurrence weight\",\n \"normalize_community_weight\": True,\n \"max_tokens\": 1000, # change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 5000)\n \"context_name\": \"Reports\",\n}\n\nmap_llm_params = {\n \"max_tokens\": 1000,\n \"temperature\": 0.0,\n \"response_format\": {\"type\": \"json_object\"},\n}\n\nreduce_llm_params = {\n \"max_tokens\": 2000, # change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 1000-1500)\n \"temperature\": 0.0,\n}\nsearch_engine = GlobalSearch(\n llm=llm,\n context_builder=context_builder,\n token_encoder=token_encoder,\n max_data_tokens=1000, # change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 5000)\n map_llm_params=map_llm_params,\n reduce_llm_params=reduce_llm_params,\n allow_general_knowledge=False, # set this to True will add instruction to encourage the LLM to incorporate general knowledge in the response, which may increase hallucinations, but could be useful in some use cases.\n json_mode=True, # set this to False if your LLM model does not support JSON mode.\n context_builder_params=context_builder_params,\n concurrent_coroutines=32,\n response_type=\"multiple paragraphs\", # free form text describing the response type and format, can be anything, e.g. prioritized list, single paragraph, multiple paragraphs, multiple-page report\n)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"EXPLICIT","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_84":{"__typename":"Paragraph","id":"403de6fde677_84","name":"2eef","type":"P","href":null,"layout":null,"metadata":null,"text":"We then create the GlobalSearch engine, passing in the LLM instance, context builder, tokenizer, and other configuration parameters.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":19,"end":31,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_85":{"__typename":"Paragraph","id":"403de6fde677_85","name":"6638","type":"P","href":null,"layout":null,"metadata":null,"text":"4. Performing Global Search:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":28,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_86":{"__typename":"Paragraph","id":"403de6fde677_86","name":"db4d","type":"P","href":null,"layout":null,"metadata":null,"text":"We execute the Global Search using the asearch method, providing the query \"Who has collaborated with Alice Johnson on any project?\".","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":39,"end":46,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_87":{"__typename":"Paragraph","id":"403de6fde677_87","name":"4065","type":"PRE","href":null,"layout":null,"metadata":null,"text":"result = await search_engine.asearch(\n \"Who has collaborated with Alice Johnson on any project?\"\n)\nprint(result.response)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_88":{"__typename":"Paragraph","id":"403de6fde677_88","name":"fdcc","type":"P","href":null,"layout":null,"metadata":null,"text":"The search engine retrieves relevant community reports, extracts key points, aggregates them, and generates a final response based on the aggregated information.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_89":{"__typename":"Paragraph","id":"403de6fde677_89","name":"75e3","type":"P","href":null,"layout":null,"metadata":null,"text":"print the number of llms call and tokens :","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_90":{"__typename":"Paragraph","id":"403de6fde677_90","name":"8f84","type":"PRE","href":null,"layout":null,"metadata":null,"text":"print(f\"LLM calls: {result.llm_calls}. LLM tokens: {result.prompt_tokens}\")","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_91":{"__typename":"Paragraph","id":"403de6fde677_91","name":"1919","type":"H3","href":null,"layout":null,"metadata":null,"text":"Local Search Example","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_92":{"__typename":"Paragraph","id":"403de6fde677_92","name":"9c52","type":"P","href":null,"layout":null,"metadata":null,"text":"1. Importing Dependencies and Setting up the Environment:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":57,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_93":{"__typename":"Paragraph","id":"403de6fde677_93","name":"92ad","type":"P","href":null,"layout":null,"metadata":null,"text":"We begin by importing necessary libraries, including components from the graphrag library for handling various aspects of the query process. We set the stage for Local Search, which is ideal for questions requiring detailed information about particular entities.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":73,"end":81,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_94":{"__typename":"Paragraph","id":"403de6fde677_94","name":"d79b","type":"PRE","href":null,"layout":null,"metadata":null,"text":"import os\n\nimport pandas as pd\nimport tiktoken\n\nfrom graphrag.query.context_builder.entity_extraction import EntityVectorStoreKey\nfrom graphrag.query.indexer_adapters import (\n read_indexer_covariates,\n read_indexer_entities,\n read_indexer_relationships,\n read_indexer_reports,\n read_indexer_text_units,\n)\nfrom graphrag.query.input.loaders.dfs import (\n store_entity_semantic_embeddings,\n)\nfrom graphrag.query.llm.oai.chat_openai import ChatOpenAI\nfrom graphrag.query.llm.oai.embedding import OpenAIEmbedding\nfrom graphrag.query.llm.oai.typing import OpenaiApiType\nfrom graphrag.query.question_gen.local_gen import LocalQuestionGen\nfrom graphrag.query.structured_search.local_search.mixed_context import (\n LocalSearchMixedContext,\n)\nfrom graphrag.query.structured_search.local_search.search import LocalSearch\nfrom graphrag.vector_stores.lancedb import LanceDBVectorStore","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_95":{"__typename":"Paragraph","id":"403de6fde677_95","name":"6cf2","type":"P","href":null,"layout":null,"metadata":null,"text":"2. Loading Data and Building the Context:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":41,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_96":{"__typename":"Paragraph","id":"403de6fde677_96","name":"5037","type":"P","href":null,"layout":null,"metadata":null,"text":"We load data from the indexing pipeline’s output, including entities, relationships, community reports, and text units. These data sources provide a comprehensive view of the indexed information.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_97":{"__typename":"Paragraph","id":"403de6fde677_97","name":"63c6","type":"PRE","href":null,"layout":null,"metadata":null,"text":"# Load tables to dataframes\n\nINPUT_DIR = \".\u002Foutput\u002Frun-id\" # replace the run id with the created one \n\nLANCEDB_URI = f\"{INPUT_DIR}\u002Flancedb\"\n\nCOMMUNITY_REPORT_TABLE = \"artifacts\u002Fcreate_final_community_reports\"\nENTITY_TABLE = \"artifacts\u002Fcreate_final_nodes\"\nENTITY_EMBEDDING_TABLE = \"artifacts\u002Fcreate_final_entities\"\nRELATIONSHIP_TABLE = \"artifacts\u002Fcreate_final_relationships\"\nCOVARIATE_TABLE = \"artifacts\u002Fcreate_final_covariates\"\nTEXT_UNIT_TABLE = \"artifacts\u002Fcreate_final_text_units\"\nCOMMUNITY_LEVEL = 2\n\n\n# Read entities\n\n\n# read nodes table to get community and degree data\nentity_df = pd.read_parquet(f\"{INPUT_DIR}\u002F{ENTITY_TABLE}.parquet\")\nentity_embedding_df = pd.read_parquet(f\"{INPUT_DIR}\u002F{ENTITY_EMBEDDING_TABLE}.parquet\")\n\nentities = read_indexer_entities(entity_df, entity_embedding_df, COMMUNITY_LEVEL)\n\n# load description embeddings to an in-memory lancedb vectorstore\n# to connect to a remote db, specify url and port values.\ndescription_embedding_store = LanceDBVectorStore(\n collection_name=\"entity_description_embeddings\",\n)\ndescription_embedding_store.connect(db_uri=LANCEDB_URI)\nentity_description_embeddings = store_entity_semantic_embeddings(\n entities=entities, vectorstore=description_embedding_store\n)\n\n# Read relationships\n\n\nrelationship_df = pd.read_parquet(f\"{INPUT_DIR}\u002F{RELATIONSHIP_TABLE}.parquet\")\nrelationships = read_indexer_relationships(relationship_df)\n\n# NOTE: covariates are turned off by default, because they generally need prompt tuning to be valuable\n# Please see the GRAPHRAG_CLAIM_* settings\n# covariate_df = pd.read_parquet(f\"{INPUT_DIR}\u002F{COVARIATE_TABLE}.parquet\")\n#claims = read_indexer_covariates(covariate_df)\n\n\nreport_df = pd.read_parquet(f\"{INPUT_DIR}\u002F{COMMUNITY_REPORT_TABLE}.parquet\")\nreports = read_indexer_reports(report_df, entity_df, COMMUNITY_LEVEL)\n\n\n# Read text units\n\ntext_unit_df = pd.read_parquet(f\"{INPUT_DIR}\u002F{TEXT_UNIT_TABLE}.parquet\")\ntext_units = read_indexer_text_units(text_unit_df)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"EXPLICIT","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_98":{"__typename":"Paragraph","id":"403de6fde677_98","name":"5b1d","type":"P","href":null,"layout":null,"metadata":null,"text":"We leverage LanceDB, a vector database, to store and efficiently retrieve entity embeddings, which are crucial for identifying entities related to the user’s query.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_99":{"__typename":"Paragraph","id":"403de6fde677_99","name":"22fc","type":"P","href":null,"layout":null,"metadata":null,"text":"3. Configuring the Embedding Model:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":35,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_100":{"__typename":"Paragraph","id":"403de6fde677_100","name":"f714","type":"P","href":null,"layout":null,"metadata":null,"text":"We initialize an OpenAIEmbedding instance, configuring it to use the bge-large model hosted locally via Ollama. This model will be used to generate embeddings for text, allowing us to find semantically similar entities and text units during the search process.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":17,"end":32,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":69,"end":78,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_101":{"__typename":"Paragraph","id":"403de6fde677_101","name":"49bc","type":"PRE","href":null,"layout":null,"metadata":null,"text":"embedding_model = \"bge-large:latest\"\ntext_embedder = OpenAIEmbedding(\n api_key=api_key,\n api_base=\"http:\u002F\u002F127.0.0.1:11434\u002Fv1\",\n api_type=OpenaiApiType.OpenAI,\n model=embedding_model,\n deployment_name=embedding_model,\n max_retries=20,\n)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"makefile"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_102":{"__typename":"Paragraph","id":"403de6fde677_102","name":"cf2a","type":"P","href":null,"layout":null,"metadata":null,"text":"4. Creating the Local Search Context Builder:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":45,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_103":{"__typename":"Paragraph","id":"403de6fde677_103","name":"7db3","type":"P","href":null,"layout":null,"metadata":null,"text":"We create a LocalSearchMixedContext instance, providing it with access to the loaded data (entities, relationships, reports, text units) and the embedding model. This context builder is responsible for selecting and formatting relevant information from these sources based on the user's query.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":12,"end":35,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_104":{"__typename":"Paragraph","id":"403de6fde677_104","name":"33b7","type":"PRE","href":null,"layout":null,"metadata":null,"text":"context_builder = LocalSearchMixedContext(\n community_reports=reports,\n text_units=text_units,\n entities=entities,\n relationships=relationships,\n # if you did not run covariates during indexing, set this to None\n covariates=None,\n entity_text_embeddings=description_embedding_store,\n embedding_vectorstore_key=EntityVectorStoreKey.ID, # if the vectorstore uses entity title as ids, set this to EntityVectorStoreKey.TITLE\n text_embedder=text_embedder,\n token_encoder=token_encoder,\n)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"makefile"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_105":{"__typename":"Paragraph","id":"403de6fde677_105","name":"927a","type":"P","href":null,"layout":null,"metadata":null,"text":"5. Creating the Local Search Engine:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":36,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_106":{"__typename":"Paragraph","id":"403de6fde677_106","name":"38e5","type":"P","href":null,"layout":null,"metadata":null,"text":"We initialize the LocalSearch engine, providing the LLM, context builder, tokenizer, and specific parameters that control how the search is performed. These parameters determine factors like the proportion of the context window dedicated to different data types (e.g., text units vs. community reports), the number of related entities to retrieve, and the maximum context window size.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":18,"end":29,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_107":{"__typename":"Paragraph","id":"403de6fde677_107","name":"5466","type":"ULI","href":null,"layout":null,"metadata":null,"text":"text_unit_prop: proportion of context window dedicated to related text units","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":14,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_108":{"__typename":"Paragraph","id":"403de6fde677_108","name":"ec35","type":"ULI","href":null,"layout":null,"metadata":null,"text":"community_prop: proportion of context window dedicated to community reports. The remaining proportion is dedicated to entities and relationships. Sum of text_unit_prop and community_prop should be \u003C= 1","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":14,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_109":{"__typename":"Paragraph","id":"403de6fde677_109","name":"b6fc","type":"ULI","href":null,"layout":null,"metadata":null,"text":"conversation_history_max_turns: maximum number of turns to include in the conversation history.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":30,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_110":{"__typename":"Paragraph","id":"403de6fde677_110","name":"b9db","type":"ULI","href":null,"layout":null,"metadata":null,"text":"conversation_history_user_turns_only: if True, only include user queries in the conversation history.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":36,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_111":{"__typename":"Paragraph","id":"403de6fde677_111","name":"daf1","type":"ULI","href":null,"layout":null,"metadata":null,"text":"top_k_mapped_entities: number of related entities to retrieve from the entity description embedding store.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":21,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_112":{"__typename":"Paragraph","id":"403de6fde677_112","name":"35ed","type":"ULI","href":null,"layout":null,"metadata":null,"text":"top_k_relationships: control the number of out-of-network relationships to pull into the context window.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":19,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_113":{"__typename":"Paragraph","id":"403de6fde677_113","name":"6f7d","type":"ULI","href":null,"layout":null,"metadata":null,"text":"include_entity_rank: if True, include the entity rank in the entity table in the context window. Default entity rank = node degree.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":19,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_114":{"__typename":"Paragraph","id":"403de6fde677_114","name":"36c8","type":"ULI","href":null,"layout":null,"metadata":null,"text":"include_relationship_weight: if True, include the relationship weight in the context window.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":27,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_115":{"__typename":"Paragraph","id":"403de6fde677_115","name":"0bdf","type":"ULI","href":null,"layout":null,"metadata":null,"text":"include_community_rank: if True, include the community rank in the context window.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":22,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_116":{"__typename":"Paragraph","id":"403de6fde677_116","name":"f5b2","type":"ULI","href":null,"layout":null,"metadata":null,"text":"return_candidate_context: if True, return a set of dataframes containing all candidate entity\u002Frelationship\u002Fcovariate records that could be relevant. Note that not all of these records will be included in the context window. The \"in_context\" column in these dataframes indicates whether the record is included in the context window.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":24,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_117":{"__typename":"Paragraph","id":"403de6fde677_117","name":"1e82","type":"ULI","href":null,"layout":null,"metadata":null,"text":"max_tokens: maximum number of tokens to use for the context window.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":0,"end":10,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_118":{"__typename":"Paragraph","id":"403de6fde677_118","name":"894b","type":"PRE","href":null,"layout":null,"metadata":null,"text":"local_context_params = {\n \"text_unit_prop\": 0.5,\n \"community_prop\": 0.1,\n \"conversation_history_max_turns\": 5,\n \"conversation_history_user_turns_only\": True,\n \"top_k_mapped_entities\": 10,\n \"top_k_relationships\": 10,\n \"include_entity_rank\": True,\n \"include_relationship_weight\": True,\n \"include_community_rank\": False,\n \"return_candidate_context\": False,\n \"embedding_vectorstore_key\": EntityVectorStoreKey.ID, # set this to EntityVectorStoreKey.TITLE if the vectorstore uses entity title as ids\n \"max_tokens\": 12_000, # change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 5000)\n}\n\nllm_params = {\n \"max_tokens\": 2_000, # change this based on the token limit you have on your model (if you are using a model with 8k limit, a good setting could be 1000=1500)\n \"temperature\": 0.0,\n}\n\nsearch_engine = LocalSearch(\n llm=llm,\n context_builder=context_builder,\n token_encoder=token_encoder,\n llm_params=llm_params,\n context_builder_params=local_context_params,\n response_type=\"multiple paragraphs\", # free form text describing the response type and format, can be anything, e.g. prioritized list, single paragraph, multiple paragraphs, multiple-page report\n)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"graphql"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_119":{"__typename":"Paragraph","id":"403de6fde677_119","name":"abe4","type":"P","href":null,"layout":null,"metadata":null,"text":"6. Running Local Search:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":0,"end":24,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_120":{"__typename":"Paragraph","id":"403de6fde677_120","name":"4ad2","type":"P","href":null,"layout":null,"metadata":null,"text":"Finally, we execute the Local Search using the asearch method, providing the query \"Tell me about Alice Johnson\". The search engine identifies relevant entities (Alice Johnson in this case), retrieves related information from the various data sources, and generates a comprehensive response based on the combined context.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":47,"end":54,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_121":{"__typename":"Paragraph","id":"403de6fde677_121","name":"aa3f","type":"PRE","href":null,"layout":null,"metadata":null,"text":"question = \"Tell me about Alice Johnson\"\nresult = await search_engine.asearch(question)\nprint(result.response)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"makefile"},"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_122":{"__typename":"Paragraph","id":"403de6fde677_122","name":"5627","type":"P","href":null,"layout":null,"metadata":null,"text":"GraphRAG represents a significant advancement in the field of Retrieval Augmented Generation. By leveraging knowledge graphs, it overcomes the limitations of traditional RAG methods and empowers LLMs to reason more effectively, understand complex datasets holistically, and provide more accurate and insightful answers to a wide range of questions. As research and development in this area continue, we can expect GraphRAG to play an increasingly important role in shaping the future of AI-powered knowledge retrieval and exploration.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_123":{"__typename":"Paragraph","id":"403de6fde677_123","name":"918a","type":"P","href":null,"layout":null,"metadata":null,"text":"My LinkedIn : https:\u002F\u002Fwww.linkedin.com\u002Fin\u002Fayoub-kirouane3","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":14,"end":57,"href":"https:\u002F\u002Fwww.linkedin.com\u002Fin\u002Fayoub-kirouane3","anchorType":"LINK","userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":0,"end":11,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:403de6fde677_124":{"__typename":"Paragraph","id":"403de6fde677_124","name":"3085","type":"P","href":null,"layout":null,"metadata":null,"text":"My HuggingFace : https:\u002F\u002Fhuggingface.co\u002Fayoubkirouane","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":17,"end":53,"href":"https:\u002F\u002Fhuggingface.co\u002Fayoubkirouane","anchorType":"LINK","userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":0,"end":14,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"CollectionViewerEdge:collectionId:7adf3c3694ff-viewerId:lo_7e4af1ccdac9":{"__typename":"CollectionViewerEdge","id":"collectionId:7adf3c3694ff-viewerId:lo_7e4af1ccdac9","isEditor":false,"isMuting":false},"ImageMetadata:1*U9yO19cFJjaX8_S95AATgA.png":{"__typename":"ImageMetadata","id":"1*U9yO19cFJjaX8_S95AATgA.png","originalWidth":255,"originalHeight":84},"PostViewerEdge:postId:45a57cc5c38b-viewerId:lo_7e4af1ccdac9":{"__typename":"PostViewerEdge","shouldIndexPostForExternalSearch":true,"id":"postId:45a57cc5c38b-viewerId:lo_7e4af1ccdac9"},"Tag:graphrag":{"__typename":"Tag","id":"graphrag","displayTitle":"Graphrag","normalizedTagSlug":"graphrag"},"Tag:ollama":{"__typename":"Tag","id":"ollama","displayTitle":"Ollama","normalizedTagSlug":"ollama"},"Tag:retrieval-augmented-gen":{"__typename":"Tag","id":"retrieval-augmented-gen","displayTitle":"Retrieval Augmented Gen","normalizedTagSlug":"retrieval-augmented-gen"},"Tag:large-language-models":{"__typename":"Tag","id":"large-language-models","displayTitle":"Large Language Models","normalizedTagSlug":"large-language-models"},"Tag:knowledge-graph":{"__typename":"Tag","id":"knowledge-graph","displayTitle":"Knowledge Graph","normalizedTagSlug":"knowledge-graph"},"Post:45a57cc5c38b":{"__typename":"Post","id":"45a57cc5c38b","collection":{"__ref":"Collection:7adf3c3694ff"},"content({\"postMeteringOptions\":{}})":{"__typename":"PostContent","isLockedPreviewOnly":false,"bodyModel":{"__typename":"RichText","sections":[{"__typename":"Section","name":"5bcc","startIndex":0,"textLayout":null,"imageLayout":null,"backgroundImage":null,"videoLayout":null,"backgroundVideo":null},{"__typename":"Section","name":"7024","startIndex":91,"textLayout":null,"imageLayout":null,"backgroundImage":null,"videoLayout":null,"backgroundVideo":null},{"__typename":"Section","name":"5f54","startIndex":122,"textLayout":null,"imageLayout":null,"backgroundImage":null,"videoLayout":null,"backgroundVideo":null},{"__typename":"Section","name":"8621","startIndex":123,"textLayout":null,"imageLayout":null,"backgroundImage":null,"videoLayout":null,"backgroundVideo":null}],"paragraphs":[{"__ref":"Paragraph:403de6fde677_0"},{"__ref":"Paragraph:403de6fde677_1"},{"__ref":"Paragraph:403de6fde677_2"},{"__ref":"Paragraph:403de6fde677_3"},{"__ref":"Paragraph:403de6fde677_4"},{"__ref":"Paragraph:403de6fde677_5"},{"__ref":"Paragraph:403de6fde677_6"},{"__ref":"Paragraph:403de6fde677_7"},{"__ref":"Paragraph:403de6fde677_8"},{"__ref":"Paragraph:403de6fde677_9"},{"__ref":"Paragraph:403de6fde677_10"},{"__ref":"Paragraph:403de6fde677_11"},{"__ref":"Paragraph:403de6fde677_12"},{"__ref":"Paragraph:403de6fde677_13"},{"__ref":"Paragraph:403de6fde677_14"},{"__ref":"Paragraph:403de6fde677_15"},{"__ref":"Paragraph:403de6fde677_16"},{"__ref":"Paragraph:403de6fde677_17"},{"__ref":"Paragraph:403de6fde677_18"},{"__ref":"Paragraph:403de6fde677_19"},{"__ref":"Paragraph:403de6fde677_20"},{"__ref":"Paragraph:403de6fde677_21"},{"__ref":"Paragraph:403de6fde677_22"},{"__ref":"Paragraph:403de6fde677_23"},{"__ref":"Paragraph:403de6fde677_24"},{"__ref":"Paragraph:403de6fde677_25"},{"__ref":"Paragraph:403de6fde677_26"},{"__ref":"Paragraph:403de6fde677_27"},{"__ref":"Paragraph:403de6fde677_28"},{"__ref":"Paragraph:403de6fde677_29"},{"__ref":"Paragraph:403de6fde677_30"},{"__ref":"Paragraph:403de6fde677_31"},{"__ref":"Paragraph:403de6fde677_32"},{"__ref":"Paragraph:403de6fde677_33"},{"__ref":"Paragraph:403de6fde677_34"},{"__ref":"Paragraph:403de6fde677_35"},{"__ref":"Paragraph:403de6fde677_36"},{"__ref":"Paragraph:403de6fde677_37"},{"__ref":"Paragraph:403de6fde677_38"},{"__ref":"Paragraph:403de6fde677_39"},{"__ref":"Paragraph:403de6fde677_40"},{"__ref":"Paragraph:403de6fde677_41"},{"__ref":"Paragraph:403de6fde677_42"},{"__ref":"Paragraph:403de6fde677_43"},{"__ref":"Paragraph:403de6fde677_44"},{"__ref":"Paragraph:403de6fde677_45"},{"__ref":"Paragraph:403de6fde677_46"},{"__ref":"Paragraph:403de6fde677_47"},{"__ref":"Paragraph:403de6fde677_48"},{"__ref":"Paragraph:403de6fde677_49"},{"__ref":"Paragraph:403de6fde677_50"},{"__ref":"Paragraph:403de6fde677_51"},{"__ref":"Paragraph:403de6fde677_52"},{"__ref":"Paragraph:403de6fde677_53"},{"__ref":"Paragraph:403de6fde677_54"},{"__ref":"Paragraph:403de6fde677_55"},{"__ref":"Paragraph:403de6fde677_56"},{"__ref":"Paragraph:403de6fde677_57"},{"__ref":"Paragraph:403de6fde677_58"},{"__ref":"Paragraph:403de6fde677_59"},{"__ref":"Paragraph:403de6fde677_60"},{"__ref":"Paragraph:403de6fde677_61"},{"__ref":"Paragraph:403de6fde677_62"},{"__ref":"Paragraph:403de6fde677_63"},{"__ref":"Paragraph:403de6fde677_64"},{"__ref":"Paragraph:403de6fde677_65"},{"__ref":"Paragraph:403de6fde677_66"},{"__ref":"Paragraph:403de6fde677_67"},{"__ref":"Paragraph:403de6fde677_68"},{"__ref":"Paragraph:403de6fde677_69"},{"__ref":"Paragraph:403de6fde677_70"},{"__ref":"Paragraph:403de6fde677_71"},{"__ref":"Paragraph:403de6fde677_72"},{"__ref":"Paragraph:403de6fde677_73"},{"__ref":"Paragraph:403de6fde677_74"},{"__ref":"Paragraph:403de6fde677_75"},{"__ref":"Paragraph:403de6fde677_76"},{"__ref":"Paragraph:403de6fde677_77"},{"__ref":"Paragraph:403de6fde677_78"},{"__ref":"Paragraph:403de6fde677_79"},{"__ref":"Paragraph:403de6fde677_80"},{"__ref":"Paragraph:403de6fde677_81"},{"__ref":"Paragraph:403de6fde677_82"},{"__ref":"Paragraph:403de6fde677_83"},{"__ref":"Paragraph:403de6fde677_84"},{"__ref":"Paragraph:403de6fde677_85"},{"__ref":"Paragraph:403de6fde677_86"},{"__ref":"Paragraph:403de6fde677_87"},{"__ref":"Paragraph:403de6fde677_88"},{"__ref":"Paragraph:403de6fde677_89"},{"__ref":"Paragraph:403de6fde677_90"},{"__ref":"Paragraph:403de6fde677_91"},{"__ref":"Paragraph:403de6fde677_92"},{"__ref":"Paragraph:403de6fde677_93"},{"__ref":"Paragraph:403de6fde677_94"},{"__ref":"Paragraph:403de6fde677_95"},{"__ref":"Paragraph:403de6fde677_96"},{"__ref":"Paragraph:403de6fde677_97"},{"__ref":"Paragraph:403de6fde677_98"},{"__ref":"Paragraph:403de6fde677_99"},{"__ref":"Paragraph:403de6fde677_100"},{"__ref":"Paragraph:403de6fde677_101"},{"__ref":"Paragraph:403de6fde677_102"},{"__ref":"Paragraph:403de6fde677_103"},{"__ref":"Paragraph:403de6fde677_104"},{"__ref":"Paragraph:403de6fde677_105"},{"__ref":"Paragraph:403de6fde677_106"},{"__ref":"Paragraph:403de6fde677_107"},{"__ref":"Paragraph:403de6fde677_108"},{"__ref":"Paragraph:403de6fde677_109"},{"__ref":"Paragraph:403de6fde677_110"},{"__ref":"Paragraph:403de6fde677_111"},{"__ref":"Paragraph:403de6fde677_112"},{"__ref":"Paragraph:403de6fde677_113"},{"__ref":"Paragraph:403de6fde677_114"},{"__ref":"Paragraph:403de6fde677_115"},{"__ref":"Paragraph:403de6fde677_116"},{"__ref":"Paragraph:403de6fde677_117"},{"__ref":"Paragraph:403de6fde677_118"},{"__ref":"Paragraph:403de6fde677_119"},{"__ref":"Paragraph:403de6fde677_120"},{"__ref":"Paragraph:403de6fde677_121"},{"__ref":"Paragraph:403de6fde677_122"},{"__ref":"Paragraph:403de6fde677_123"},{"__ref":"Paragraph:403de6fde677_124"}]},"validatedShareKey":"","shareKeyCreator":null},"creator":{"__ref":"User:4751fd7878c5"},"inResponseToEntityType":null,"isLocked":false,"isMarkedPaywallOnly":false,"lockedSource":"LOCKED_POST_SOURCE_NONE","mediumUrl":"https:\u002F\u002Fblog.gopenai.com\u002Fmicrosoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b","primaryTopic":null,"topics":[{"__typename":"Topic","slug":"data-science"},{"__typename":"Topic","slug":"programming"}],"isPublished":true,"latestPublishedVersion":"403de6fde677","visibility":"PUBLIC","postResponses":{"__typename":"PostResponses","count":0},"clapCount":218,"allowResponses":true,"isLimitedState":false,"title":"Microsoft GraphRAG and Ollama: Code Your Way to Smarter Question Answering","isSeries":false,"sequence":null,"uniqueSlug":"microsoft-graphrag-and-ollama-code-your-way-to-smarter-question-answering-45a57cc5c38b","socialTitle":"","socialDek":"","canonicalUrl":"","metaDescription":"","latestPublishedAt":1724936242719,"readingTime":14.186792452830188,"previewContent":{"__typename":"PreviewContent","subtitle":"Traditional RAG methods, which primarily rely on semantic similarity search, often fall short when faced with complex questions that…"},"previewImage":{"__ref":"ImageMetadata:1*ocprwBCYr-Vt9AHAhMaZuA.png"},"isShortform":false,"seoTitle":"","firstPublishedAt":1724915014517,"updatedAt":1731851126484,"shortformType":"SHORTFORM_TYPE_LINK","seoDescription":"","viewerEdge":{"__ref":"PostViewerEdge:postId:45a57cc5c38b-viewerId:lo_7e4af1ccdac9"},"isSuspended":false,"license":"ALL_RIGHTS_RESERVED","tags":[{"__ref":"Tag:graphrag"},{"__ref":"Tag:ollama"},{"__ref":"Tag:retrieval-augmented-gen"},{"__ref":"Tag:large-language-models"},{"__ref":"Tag:knowledge-graph"}],"isNewsletter":false,"statusForCollection":"APPROVED","pendingCollection":null,"detectedLanguage":"en","wordCount":3574,"layerCake":6,"responsesLocked":false}}</script><script src="https://cdn-client.medium.com/lite/static/js/manifest.aa9242f7.js"></script><script src="https://cdn-client.medium.com/lite/static/js/9865.1496d74a.js"></script><script src="https://cdn-client.medium.com/lite/static/js/main.e556b4ac.js"></script><script src="https://cdn-client.medium.com/lite/static/js/instrumentation.d9108df7.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/reporting.ff22a7a5.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/9120.5df29668.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/5049.d1ead72d.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/4810.6318add7.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/6618.db187378.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2707.b0942613.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/9977.5b3eb23a.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/8599.1ab63137.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/5250.9f9e01d2.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/5787.e66a3a4d.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2648.26563adf.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/8393.826a25fb.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/3104.c3413b66.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/3735.afb7e926.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/5642.8ad8a900.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/6546.cd03f950.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/6834.08de95de.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/7346.72622eb9.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2420.2a5e2d95.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/839.ca7937c2.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/7975.d195c6f1.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2106.21ff89d3.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/7394.094844de.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2961.00a48598.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/8204.c4082863.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/4391.59acaed3.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/PostPage.MainContent.1387c5dc.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/8414.6565ad5f.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/3974.8d3e0217.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2527.a0afad8a.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/PostResponsesContent.36c2ecf4.chunk.js"></script><script>window.main();</script><script>(function(){function c(){var b=a.contentDocument||a.contentWindow.document;if(b){var d=b.createElement('script');d.innerHTML="window.__CF$cv$params={r:'8e95f0c5bc4181d4',t:'MTczMjc1MDU4OC4wMDAwMDA='};var a=document.createElement('script');a.nonce='';a.src='/cdn-cgi/challenge-platform/scripts/jsd/main.js';document.getElementsByTagName('head')[0].appendChild(a);";b.getElementsByTagName('head')[0].appendChild(d)}}if(document.body){var a=document.createElement('iframe');a.height=1;a.width=1;a.style.position='absolute';a.style.top=0;a.style.left=0;a.style.border='none';a.style.visibility='hidden';document.body.appendChild(a);if('loading'!==document.readyState)c();else if(window.addEventListener)document.addEventListener('DOMContentLoaded',c);else{var e=document.onreadystatechange||function(){};document.onreadystatechange=function(b){e(b);'loading'!==document.readyState&&(document.onreadystatechange=e,c())}}}})();</script></body></html>

Pages: 1 2 3 4 5 6 7 8 9 10