CINXE.COM

<!doctype html><html lang="en"><head><title data-rh="true">Building a robust GraphRAG System for a specific use case -Part Two- | by kirouane Ayoub | InfinitGraph | Medium</title><meta data-rh="true" charset="utf-8"/><meta data-rh="true" name="viewport" content="width=device-width,minimum-scale=1,initial-scale=1,maximum-scale=1"/><meta data-rh="true" name="theme-color" content="#000000"/><meta data-rh="true" name="twitter:app:name:iphone" content="Medium"/><meta data-rh="true" name="twitter:app:id:iphone" content="828256236"/><meta data-rh="true" property="al:ios:app_name" content="Medium"/><meta data-rh="true" property="al:ios:app_store_id" content="828256236"/><meta data-rh="true" property="al:android:package" content="com.medium.reader"/><meta data-rh="true" property="fb:app_id" content="542599432471018"/><meta data-rh="true" property="og:site_name" content="Medium"/><meta data-rh="true" property="og:type" content="article"/><meta data-rh="true" property="article:published_time" content="2024-09-11T09:16:29.641Z"/><meta data-rh="true" name="title" content="Building a robust GraphRAG System for a specific use case -Part Two- | by kirouane Ayoub | InfinitGraph | Medium"/><meta data-rh="true" property="og:title" content="Building a robust GraphRAG System for a specific use case -Part Two-"/><meta data-rh="true" property="al:android:url" content="medium://p/d48f58f8aefe"/><meta data-rh="true" property="al:ios:url" content="medium://p/d48f58f8aefe"/><meta data-rh="true" property="al:android:app_name" content="Medium"/><meta data-rh="true" name="description" content="In the first part of this series, we embarked on the crucial task of preparing a custom dataset for fine-tuning a large language model (LLM) for text-to-Cypher translation. We generated a diverse set…"/><meta data-rh="true" property="og:description" content="Part Two: The Fine-Tuning Process"/><meta data-rh="true" property="og:url" content="https://medium.com/infinitgraph/building-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe"/><meta data-rh="true" property="al:web:url" content="https://medium.com/infinitgraph/building-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe"/><meta data-rh="true" property="og:image" content="https://miro.medium.com/v2/resize:fit:1050/0*P6kJ1FqVWiSWEnRd.png"/><meta data-rh="true" property="article:author" content="https://medium.com/@ayoubkirouane3"/><meta data-rh="true" name="author" content="kirouane Ayoub"/><meta data-rh="true" name="robots" content="index,noarchive,follow,max-image-preview:large"/><meta data-rh="true" name="referrer" content="unsafe-url"/><meta data-rh="true" property="twitter:title" content="Building a robust GraphRAG System for a specific use case -Part Two-"/><meta data-rh="true" name="twitter:site" content="@Medium"/><meta data-rh="true" name="twitter:app:url:iphone" content="medium://p/d48f58f8aefe"/><meta data-rh="true" property="twitter:description" content="Part Two: The Fine-Tuning Process"/><meta data-rh="true" name="twitter:image:src" content="https://miro.medium.com/v2/resize:fit:1050/0*P6kJ1FqVWiSWEnRd.png"/><meta data-rh="true" name="twitter:card" content="summary_large_image"/><meta data-rh="true" name="twitter:label1" content="Reading time"/><meta data-rh="true" name="twitter:data1" content="8 min read"/><link data-rh="true" rel="icon" href="https://miro.medium.com/v2/5d8de952517e8160e40ef9841c781cdc14a5db313057fa3c3de41c6f5b494b19"/><link data-rh="true" rel="search" type="application/opensearchdescription+xml" title="Medium" href="/osd.xml"/><link data-rh="true" rel="apple-touch-icon" sizes="152x152" href="https://miro.medium.com/v2/resize:fill:304:304/10fd5c419ac61637245384e7099e131627900034828f4f386bdaa47a74eae156"/><link data-rh="true" rel="apple-touch-icon" sizes="120x120" href="https://miro.medium.com/v2/resize:fill:240:240/10fd5c419ac61637245384e7099e131627900034828f4f386bdaa47a74eae156"/><link data-rh="true" rel="apple-touch-icon" sizes="76x76" href="https://miro.medium.com/v2/resize:fill:152:152/10fd5c419ac61637245384e7099e131627900034828f4f386bdaa47a74eae156"/><link data-rh="true" rel="apple-touch-icon" sizes="60x60" href="https://miro.medium.com/v2/resize:fill:120:120/10fd5c419ac61637245384e7099e131627900034828f4f386bdaa47a74eae156"/><link data-rh="true" rel="mask-icon" href="https://miro.medium.com/v2/resize:fill:1000:1000/7*GAOKVe--MXbEJmV9230oOQ.png" color="#171717"/><link data-rh="true" rel="preconnect" href="https://glyph.medium.com" crossOrigin=""/><link data-rh="true" id="glyph_preload_link" rel="preload" as="style" type="text/css" href="https://glyph.medium.com/css/unbound.css"/><link data-rh="true" id="glyph_link" rel="stylesheet" type="text/css" href="https://glyph.medium.com/css/unbound.css"/><link data-rh="true" rel="author" href="https://medium.com/@ayoubkirouane3"/><link data-rh="true" rel="canonical" href="https://medium.com/infinitgraph/building-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe"/><link data-rh="true" rel="alternate" href="android-app://com.medium.reader/https/medium.com/p/d48f58f8aefe"/><script data-rh="true" type="application/ld+json">{"@context":"http:\u002F\u002Fschema.org","@type":"NewsArticle","image":["https:\u002F\u002Fmiro.medium.com\u002Fv2\u002Fresize:fit:1200\u002F0*P6kJ1FqVWiSWEnRd.png"],"url":"https:\u002F\u002Fmedium.com\u002Finfinitgraph\u002Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe","dateCreated":"2024-09-11T07:46:06.542Z","datePublished":"2024-09-11T07:46:06.542Z","dateModified":"2024-11-16T19:36:09.394Z","headline":"Building a robust GraphRAG System for a specific use case -Part Two-","name":"Building a robust GraphRAG System for a specific use case -Part Two-","description":"In the first part of this series, we embarked on the crucial task of preparing a custom dataset for fine-tuning a large language model (LLM) for text-to-Cypher translation. We generated a diverse set…","identifier":"d48f58f8aefe","author":{"@type":"Person","name":"kirouane Ayoub","url":"https:\u002F\u002Fmedium.com\u002F@ayoubkirouane3"},"creator":["kirouane Ayoub"],"publisher":{"@type":"Organization","name":"InfinitGraph","url":"https:\u002F\u002Fmedium.com\u002Finfinitgraph","logo":{"@type":"ImageObject","width":94,"height":60,"url":"https:\u002F\u002Fmiro.medium.com\u002Fv2\u002Fresize:fit:188\u002F1*sSrYGmX4voK-tio0-dNYRg.jpeg"}},"mainEntityOfPage":"https:\u002F\u002Fmedium.com\u002Finfinitgraph\u002Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe"}</script><style type="text/css" data-fela-rehydration="546" data-fela-type="STATIC">html{box-sizing:border-box;-webkit-text-size-adjust:100%}*, *:before, *:after{box-sizing:inherit}body{margin:0;padding:0;text-rendering:optimizeLegibility;-webkit-font-smoothing:antialiased;color:rgba(0,0,0,0.8);position:relative;min-height:100vh}h1, h2, h3, h4, h5, h6, dl, dd, ol, ul, menu, figure, blockquote, p, pre, form{margin:0}menu, ol, ul{padding:0;list-style:none;list-style-image:none}main{display:block}a{color:inherit;text-decoration:none}a, button, input{-webkit-tap-highlight-color:transparent}img, svg{vertical-align:middle}button{background:transparent;overflow:visible}button, input, optgroup, select, textarea{margin:0}:root{--reach-tabs:1;--reach-menu-button:1}#speechify-root{font-family:Sohne, sans-serif}div[data-popper-reference-hidden="true"]{visibility:hidden;pointer-events:none}.grecaptcha-badge{visibility:hidden} /*XCode style (c) Angel Garcia <angelgarcia.mail@gmail.com>*/.hljs {background: #fff;color: black; }/* Gray DOCTYPE selectors like WebKit */ .xml .hljs-meta {color: #c0c0c0; }.hljs-comment, .hljs-quote {color: #007400; }.hljs-tag, .hljs-attribute, .hljs-keyword, .hljs-selector-tag, .hljs-literal, .hljs-name {color: #aa0d91; }.hljs-variable, .hljs-template-variable {color: #3F6E74; }.hljs-code, .hljs-string, .hljs-meta .hljs-string {color: #c41a16; }.hljs-regexp, .hljs-link {color: #0E0EFF; }.hljs-title, .hljs-symbol, .hljs-bullet, .hljs-number {color: #1c00cf; }.hljs-section, .hljs-meta {color: #643820; }.hljs-title.class_, .hljs-class .hljs-title, .hljs-type, .hljs-built_in, .hljs-params {color: #5c2699; }.hljs-attr {color: #836C28; }.hljs-subst {color: #000; }.hljs-formula {background-color: #eee;font-style: italic; }.hljs-addition {background-color: #baeeba; }.hljs-deletion {background-color: #ffc8bd; }.hljs-selector-id, .hljs-selector-class {color: #9b703f; }.hljs-doctag, .hljs-strong {font-weight: bold; }.hljs-emphasis {font-style: italic; } </style><style type="text/css" data-fela-rehydration="546" data-fela-type="KEYFRAME">@-webkit-keyframes k1{0%{opacity:0.8}50%{opacity:0.5}100%{opacity:0.8}}@-moz-keyframes k1{0%{opacity:0.8}50%{opacity:0.5}100%{opacity:0.8}}@keyframes k1{0%{opacity:0.8}50%{opacity:0.5}100%{opacity:0.8}}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE">.a{font-family:medium-content-sans-serif-font, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, Cantarell, "Open Sans", "Helvetica Neue", sans-serif}.b{font-weight:400}.c{background-color:rgba(255, 255, 255, 1)}.l{display:block}.m{position:sticky}.n{top:0}.o{z-index:500}.p{padding:0 24px}.q{align-items:center}.r{border-bottom:solid 1px #F2F2F2}.y{height:41px}.z{line-height:20px}.ab{display:flex}.ac{height:57px}.ae{flex:1 0 auto}.af{color:inherit}.ag{fill:inherit}.ah{font-size:inherit}.ai{border:inherit}.aj{font-family:inherit}.ak{letter-spacing:inherit}.al{font-weight:inherit}.am{padding:0}.an{margin:0}.ao{cursor:pointer}.ap:disabled{cursor:not-allowed}.aq:disabled{color:#6B6B6B}.ar:disabled{fill:#6B6B6B}.au{width:auto}.av path{fill:#242424}.aw{height:25px}.ax{margin-left:16px}.ay{border:none}.az{border-radius:20px}.ba{width:240px}.bb{background:#F9F9F9}.bc path{fill:#6B6B6B}.be{outline:none}.bf{font-family:sohne, "Helvetica Neue", Helvetica, Arial, sans-serif}.bg{font-size:14px}.bh{width:100%}.bi{padding:10px 20px 10px 0}.bj{background-color:transparent}.bk{color:#242424}.bl::placeholder{color:#6B6B6B}.bm{display:inline-block}.bn{margin-left:12px}.bo{margin-right:12px}.bp{border-radius:4px}.bq{margin-left:24px}.br{height:24px}.bx{background-color:#F9F9F9}.by{border-radius:50%}.bz{height:32px}.ca{width:32px}.cb{justify-content:center}.ch{max-width:680px}.ci{min-width:0}.cj{animation:k1 1.2s ease-in-out infinite}.ck{height:100vh}.cl{margin-bottom:16px}.cm{margin-top:48px}.cn{align-items:flex-start}.co{flex-direction:column}.cp{justify-content:space-between}.cq{margin-bottom:24px}.cw{width:80%}.cx{background-color:#F2F2F2}.dd{height:44px}.de{width:44px}.df{margin:auto 0}.dg{margin-bottom:4px}.dh{height:16px}.di{width:120px}.dj{width:80px}.dp{margin-bottom:8px}.dq{width:96%}.dr{width:98%}.ds{width:81%}.dt{margin-left:8px}.du{color:#6B6B6B}.dv{font-size:13px}.dw{height:100%}.ep{color:#FFFFFF}.eq{fill:#FFFFFF}.er{background:rgba(26, 137, 23, 1)}.es{border-color:rgba(26, 137, 23, 1)}.ew:disabled{cursor:inherit !important}.ex:disabled{opacity:0.3}.ey:disabled:hover{background:rgba(26, 137, 23, 1)}.ez:disabled:hover{border-color:rgba(26, 137, 23, 1)}.fa{border-radius:99em}.fb{border-width:1px}.fc{border-style:solid}.fd{box-sizing:border-box}.fe{text-decoration:none}.ff{text-align:center}.fi{margin-right:32px}.fj{position:relative}.fk{fill:#6B6B6B}.fn{background:transparent}.fo svg{margin-left:4px}.fp svg{fill:#6B6B6B}.fr{box-shadow:inset 0 0 0 1px rgba(0, 0, 0, 0.05)}.fs{position:absolute}.fz{margin:0 24px}.gd{background:rgba(255, 255, 255, 1)}.ge{border:1px solid #F2F2F2}.gf{box-shadow:0 1px 4px #F2F2F2}.gg{max-height:100vh}.gh{overflow-y:auto}.gi{left:0}.gj{top:calc(100vh + 100px)}.gk{bottom:calc(100vh + 100px)}.gl{width:10px}.gm{pointer-events:none}.gn{word-break:break-word}.go{word-wrap:break-word}.gp:after{display:block}.gq:after{content:""}.gr:after{clear:both}.gs{line-height:1.23}.gt{letter-spacing:0}.gu{font-style:normal}.gv{font-weight:700}.ia{align-items:baseline}.ib{width:48px}.ic{height:48px}.id{border:2px solid rgba(255, 255, 255, 1)}.ie{z-index:0}.if{box-shadow:none}.ig{border:1px solid rgba(0, 0, 0, 0.05)}.ih{margin-left:-12px}.ii{width:28px}.ij{height:28px}.ik{z-index:1}.il{width:24px}.im{margin-bottom:2px}.in{flex-wrap:nowrap}.io{font-size:16px}.ip{line-height:24px}.ir{margin:0 8px}.is{display:inline}.it{color:rgba(26, 137, 23, 1)}.iu{fill:rgba(26, 137, 23, 1)}.ix{flex:0 0 auto}.ja{flex-wrap:wrap}.jd{white-space:pre-wrap}.je{margin-right:4px}.jf{overflow:hidden}.jg{max-height:20px}.jh{text-overflow:ellipsis}.ji{display:-webkit-box}.jj{-webkit-line-clamp:1}.jk{-webkit-box-orient:vertical}.jl{word-break:break-all}.jn{padding-left:8px}.jo{padding-right:8px}.kp> *{flex-shrink:0}.kq{overflow-x:scroll}.kr::-webkit-scrollbar{display:none}.ks{scrollbar-width:none}.kt{-ms-overflow-style:none}.ku{width:74px}.kv{flex-direction:row}.kw{z-index:2}.kz{-webkit-user-select:none}.la{border:0}.lb{fill:rgba(117, 117, 117, 1)}.le{outline:0}.lf{user-select:none}.lg> svg{pointer-events:none}.lp{cursor:progress}.lq{opacity:1}.lr{padding:4px 0}.lu{margin-top:0px}.lv{width:16px}.lx{display:inline-flex}.md{max-width:100%}.me{padding:8px 2px}.mf svg{color:#6B6B6B}.mw{line-height:1.12}.mx{letter-spacing:-0.022em}.my{font-weight:600}.nt{margin-bottom:-0.28em}.nu{line-height:1.58}.nv{letter-spacing:-0.004em}.nw{font-family:source-serif-pro, Georgia, Cambria, "Times New Roman", Times, serif}.or{margin-bottom:-0.46em}.os{text-decoration:underline}.ot{line-height:1.18}.ph{margin-bottom:-0.31em}.pi{margin-left:auto}.pj{margin-right:auto}.pk{max-width:1050px}.pq{clear:both}.ps{cursor:zoom-in}.pt{z-index:auto}.pv{height:auto}.qb{max-width:898px}.qc{overflow-x:auto}.qd{font-family:source-code-pro, Menlo, Monaco, "Courier New", Courier, monospace}.qe{padding:32px}.qf{border:1px solid #E5E5E5}.qg{line-height:1.4}.qh{margin-top:-0.2em}.qi{margin-bottom:-0.2em}.qj{white-space:pre}.qk{min-width:fit-content}.ql{padding:2px 4px}.qm{font-size:75%}.qn> strong{font-family:inherit}.qo{margin-top:16px}.qp{margin-top:32px}.qq{margin-bottom:14px}.qr{padding-top:24px}.qs{padding-bottom:10px}.qt{background-color:#000000}.qu{height:3px}.qv{width:3px}.qw{margin-right:20px}.qx{border-top:none}.qy{height:52px}.qz{max-height:52px}.ra{box-sizing:content-box}.rb{position:static}.rd{max-width:155px}.ro{height:0px}.rp{margin-bottom:40px}.rq{margin-bottom:48px}.se{border-radius:2px}.sg{height:64px}.sh{width:64px}.si{align-self:flex-end}.sj{flex:1 1 auto}.sp{padding-right:4px}.sq{font-weight:500}.sx{margin-top:8px}.sy{color:rgba(255, 255, 255, 1)}.sz{padding:8px 16px}.ta{fill:rgba(255, 255, 255, 1)}.tb{background:rgba(25, 25, 25, 1)}.tc{border-color:rgba(25, 25, 25, 1)}.tf:disabled{opacity:0.1}.tg:disabled:hover{background:rgba(25, 25, 25, 1)}.th:disabled:hover{border-color:rgba(25, 25, 25, 1)}.ti{margin-bottom:54px}.to{gap:18px}.tp{fill:rgba(61, 61, 61, 1)}.tw{border-bottom:solid 1px #E5E5E5}.tx{margin-top:72px}.ty{padding:24px 0}.tz{margin-bottom:0px}.ua{margin-right:16px}.as:hover:not(:disabled){color:rgba(25, 25, 25, 1)}.at:hover:not(:disabled){fill:rgba(25, 25, 25, 1)}.et:hover{background:rgba(15, 115, 12, 1)}.eu:hover{border-color:rgba(15, 115, 12, 1)}.ev:hover{cursor:pointer}.fl:hover{color:#242424}.fm:hover{fill:#242424}.fq:hover svg{fill:#242424}.ft:hover{background-color:rgba(0, 0, 0, 0.1)}.iq:hover{text-decoration:underline}.iv:hover:not(:disabled){color:rgba(15, 115, 12, 1)}.iw:hover:not(:disabled){fill:rgba(15, 115, 12, 1)}.ld:hover{fill:rgba(8, 8, 8, 1)}.ls:hover{fill:#000000}.lt:hover p{color:#000000}.lw:hover{color:#000000}.mg:hover svg{color:#000000}.sf:hover{background-color:none}.td:hover{background:#000000}.te:hover{border-color:#242424}.tq:hover{fill:rgba(25, 25, 25, 1)}.bd:focus-within path{fill:#242424}.lc:focus{fill:rgba(8, 8, 8, 1)}.mh:focus svg{color:#000000}.pu:focus{transform:scale(1.01)}.lh:active{border-style:none}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE" media="all and (min-width: 1080px)">.d{display:none}.bw{width:64px}.cg{margin:0 64px}.cv{height:48px}.dc{margin-bottom:52px}.do{margin-bottom:48px}.ef{font-size:14px}.eg{line-height:20px}.em{font-size:13px}.eo{padding:5px 12px}.fh{display:flex}.fy{margin-bottom:68px}.gc{max-width:680px}.hq{font-size:42px}.hr{margin-top:1.19em}.hs{margin-bottom:32px}.ht{line-height:52px}.hu{letter-spacing:-0.011em}.hz{align-items:center}.kb{border-top:solid 1px #F2F2F2}.kc{border-bottom:solid 1px #F2F2F2}.kd{margin:32px 0 0}.ke{padding:3px 8px}.kn> *{margin-right:24px}.ko> :last-child{margin-right:0}.lo{margin-top:0px}.mc{margin:0}.np{font-size:24px}.nq{margin-top:1.95em}.nr{line-height:30px}.ns{letter-spacing:-0.016em}.on{font-size:20px}.oo{margin-top:0.94em}.op{line-height:32px}.oq{letter-spacing:-0.003em}.pe{margin-top:1.72em}.pf{line-height:24px}.pg{letter-spacing:0}.pp{margin-top:56px}.qa{margin-top:2.14em}.ri{display:inline-block}.rn{margin-bottom:104px}.rr{flex-direction:row}.ru{margin-bottom:0}.rv{margin-right:20px}.sk{max-width:500px}.tn{margin-bottom:72px}.tv{padding-top:72px}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE" media="all and (max-width: 1079.98px)">.e{display:none}.ln{margin-top:0px}.rh{display:inline-block}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE" media="all and (max-width: 903.98px)">.f{display:none}.lm{margin-top:0px}.rg{display:inline-block}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE" media="all and (max-width: 727.98px)">.g{display:none}.lk{margin-top:0px}.ll{margin-right:0px}.rf{display:inline-block}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE" media="all and (max-width: 551.98px)">.h{display:none}.s{display:flex}.t{justify-content:space-between}.bs{width:24px}.cc{margin:0 24px}.cr{height:40px}.cy{margin-bottom:44px}.dk{margin-bottom:32px}.dx{font-size:13px}.dy{line-height:20px}.eh{padding:0px 8px 1px}.fu{margin-bottom:4px}.gw{font-size:32px}.gx{margin-top:1.01em}.gy{margin-bottom:24px}.gz{line-height:38px}.ha{letter-spacing:-0.014em}.hv{align-items:flex-start}.iy{flex-direction:column}.jb{margin-bottom:2px}.jp{margin:24px -24px 0}.jq{padding:0}.kf> *{margin-right:8px}.kg> :last-child{margin-right:24px}.kx{margin-left:0px}.li{margin-top:0px}.lj{margin-right:0px}.ly{margin:0}.mi{border:1px solid #F2F2F2}.mj{border-radius:99em}.mk{padding:0px 16px 0px 12px}.ml{height:38px}.mm{align-items:center}.mo svg{margin-right:8px}.mz{font-size:20px}.na{margin-top:1.2em}.nb{line-height:24px}.nc{letter-spacing:0}.nx{font-size:18px}.ny{margin-top:0.67em}.nz{line-height:28px}.oa{letter-spacing:-0.003em}.ou{font-size:16px}.ov{margin-top:1.23em}.pl{margin-top:40px}.pw{margin-top:1.56em}.re{display:inline-block}.rj{margin-bottom:96px}.sc{margin-bottom:20px}.sd{margin-right:0}.so{max-width:100%}.sr{font-size:24px}.ss{line-height:30px}.st{letter-spacing:-0.016em}.tj{margin-bottom:64px}.tr{padding-top:48px}.mn:hover{border-color:#E5E5E5}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE" media="all and (min-width: 904px) and (max-width: 1079.98px)">.i{display:none}.bv{width:64px}.cf{margin:0 64px}.cu{height:48px}.db{margin-bottom:52px}.dn{margin-bottom:48px}.ed{font-size:14px}.ee{line-height:20px}.ek{font-size:13px}.el{padding:5px 12px}.fg{display:flex}.fx{margin-bottom:68px}.gb{max-width:680px}.hl{font-size:42px}.hm{margin-top:1.19em}.hn{margin-bottom:32px}.ho{line-height:52px}.hp{letter-spacing:-0.011em}.hy{align-items:center}.jx{border-top:solid 1px #F2F2F2}.jy{border-bottom:solid 1px #F2F2F2}.jz{margin:32px 0 0}.ka{padding:3px 8px}.kl> *{margin-right:24px}.km> :last-child{margin-right:0}.mb{margin:0}.nl{font-size:24px}.nm{margin-top:1.95em}.nn{line-height:30px}.no{letter-spacing:-0.016em}.oj{font-size:20px}.ok{margin-top:0.94em}.ol{line-height:32px}.om{letter-spacing:-0.003em}.pb{margin-top:1.72em}.pc{line-height:24px}.pd{letter-spacing:0}.po{margin-top:56px}.pz{margin-top:2.14em}.rm{margin-bottom:104px}.rs{flex-direction:row}.rw{margin-bottom:0}.rx{margin-right:20px}.sl{max-width:500px}.tm{margin-bottom:72px}.tu{padding-top:72px}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE" media="all and (min-width: 728px) and (max-width: 903.98px)">.j{display:none}.w{display:flex}.x{justify-content:space-between}.bu{width:64px}.ce{margin:0 48px}.ct{height:48px}.da{margin-bottom:52px}.dm{margin-bottom:48px}.eb{font-size:13px}.ec{line-height:20px}.ej{padding:0px 8px 1px}.fw{margin-bottom:68px}.ga{max-width:680px}.hg{font-size:42px}.hh{margin-top:1.19em}.hi{margin-bottom:32px}.hj{line-height:52px}.hk{letter-spacing:-0.011em}.hx{align-items:center}.jt{border-top:solid 1px #F2F2F2}.ju{border-bottom:solid 1px #F2F2F2}.jv{margin:32px 0 0}.jw{padding:3px 8px}.kj> *{margin-right:24px}.kk> :last-child{margin-right:0}.ma{margin:0}.nh{font-size:24px}.ni{margin-top:1.95em}.nj{line-height:30px}.nk{letter-spacing:-0.016em}.of{font-size:20px}.og{margin-top:0.94em}.oh{line-height:32px}.oi{letter-spacing:-0.003em}.oy{margin-top:1.72em}.oz{line-height:24px}.pa{letter-spacing:0}.pn{margin-top:56px}.py{margin-top:2.14em}.rl{margin-bottom:104px}.rt{flex-direction:row}.ry{margin-bottom:0}.rz{margin-right:20px}.sm{max-width:500px}.tl{margin-bottom:72px}.tt{padding-top:72px}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE" media="all and (min-width: 552px) and (max-width: 727.98px)">.k{display:none}.u{display:flex}.v{justify-content:space-between}.bt{width:24px}.cd{margin:0 24px}.cs{height:40px}.cz{margin-bottom:44px}.dl{margin-bottom:32px}.dz{font-size:13px}.ea{line-height:20px}.ei{padding:0px 8px 1px}.fv{margin-bottom:4px}.hb{font-size:32px}.hc{margin-top:1.01em}.hd{margin-bottom:24px}.he{line-height:38px}.hf{letter-spacing:-0.014em}.hw{align-items:flex-start}.iz{flex-direction:column}.jc{margin-bottom:2px}.jr{margin:24px 0 0}.js{padding:0}.kh> *{margin-right:8px}.ki> :last-child{margin-right:8px}.ky{margin-left:0px}.lz{margin:0}.mp{border:1px solid #F2F2F2}.mq{border-radius:99em}.mr{padding:0px 16px 0px 12px}.ms{height:38px}.mt{align-items:center}.mv svg{margin-right:8px}.nd{font-size:20px}.ne{margin-top:1.2em}.nf{line-height:24px}.ng{letter-spacing:0}.ob{font-size:18px}.oc{margin-top:0.67em}.od{line-height:28px}.oe{letter-spacing:-0.003em}.ow{font-size:16px}.ox{margin-top:1.23em}.pm{margin-top:40px}.px{margin-top:1.56em}.rk{margin-bottom:96px}.sa{margin-bottom:20px}.sb{margin-right:0}.sn{max-width:100%}.su{font-size:24px}.sv{line-height:30px}.sw{letter-spacing:-0.016em}.tk{margin-bottom:64px}.ts{padding-top:48px}.mu:hover{border-color:#E5E5E5}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE" media="print">.rc{display:none}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE" media="(orientation: landscape) and (max-width: 903.98px)">.jm{max-height:none}</style><style type="text/css" data-fela-rehydration="546" data-fela-type="RULE" media="(prefers-reduced-motion: no-preference)">.pr{transition:transform 300ms cubic-bezier(0.2, 0, 0.2, 1)}</style></head><body><div id="root"><div class="a b c"><div class="d e f g h i j k"></div><script>document.domain = document.domain;</script><div class="l c"><div class="l m n o c"><div class="p q r s t u v w x i d y z"><a class="du ag dv bf ak b am an ao ap aq ar as at s u w i d q dw z" href="https://rsci.app.link/?%24canonical_url=https%3A%2F%2Fmedium.com%2Fp%2Fd48f58f8aefe&amp;%7Efeature=LoOpenInAppButton&amp;%7Echannel=ShowPostUnderCollection&amp;source=---top_nav_layout_nav----------------------------------" rel="noopener follow">Open in app<svg xmlns="http://www.w3.org/2000/svg" width="10" height="10" fill="none" viewBox="0 0 10 10" class="dt"><path fill="currentColor" d="M.985 8.485a.375.375 0 1 0 .53.53zM8.75 1.25h.375A.375.375 0 0 0 8.75.875zM8.375 6.5a.375.375 0 1 0 .75 0zM3.5.875a.375.375 0 1 0 0 .75zm-1.985 8.14 7.5-7.5-.53-.53-7.5 7.5zm6.86-7.765V6.5h.75V1.25zM3.5 1.625h5.25v-.75H3.5z"></path></svg></a><div class="ab q"><p class="bf b dx dy dz ea eb ec ed ee ef eg du"><span><button class="bf b dx dy eh dz ea ei eb ec ej ek ee el em eg eo ep eq er es et eu ev ew ex ey ez fa fb fc fd bm fe ff" data-testid="headerSignUpButton">Sign up</button></span></p><div class="ax l"><p class="bf b dx dy dz ea eb ec ed ee ef eg du"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerSignInButton" rel="noopener follow" href="/m/signin?operation=login&amp;redirect=https%3A%2F%2Fmedium.com%2Finfinitgraph%2Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe&amp;source=post_page---top_nav_layout_nav-----------------------global_nav-----------">Sign in</a></span></p></div></div></div><div class="p q r ab ac"><div class="ab q ae"><a class="af ag ah ai aj ak al am an ao ap aq ar as at ab" aria-label="Homepage" data-testid="headerMediumLogo" rel="noopener follow" href="/?source=---top_nav_layout_nav----------------------------------"><svg xmlns="http://www.w3.org/2000/svg" width="719" height="160" fill="none" viewBox="0 0 719 160" class="au av aw"><path fill="#242424" d="m174.104 9.734.215-.047V8.02H130.39L89.6 103.89 48.81 8.021H1.472v1.666l.212.047c8.018 1.81 12.09 4.509 12.09 14.242V137.93c0 9.734-4.087 12.433-12.106 14.243l-.212.047v1.671h32.118v-1.665l-.213-.048c-8.018-1.809-12.089-4.509-12.089-14.242V30.586l52.399 123.305h2.972l53.925-126.743V140.75c-.687 7.688-4.721 10.062-11.982 11.701l-.215.05v1.652h55.948v-1.652l-.215-.05c-7.269-1.639-11.4-4.013-12.087-11.701l-.037-116.774h.037c0-9.733 4.071-12.432 12.087-14.242m25.555 75.488c.915-20.474 8.268-35.252 20.606-35.507 3.806.063 6.998 1.312 9.479 3.714 5.272 5.118 7.751 15.812 7.368 31.793zm-.553 5.77h65.573v-.275c-.186-15.656-4.721-27.834-13.466-36.196-7.559-7.227-18.751-11.203-30.507-11.203h-.263c-6.101 0-13.584 1.48-18.909 4.16-6.061 2.807-11.407 7.003-15.855 12.511-7.161 8.874-11.499 20.866-12.554 34.343q-.05.606-.092 1.212a50 50 0 0 0-.065 1.151 85.807 85.807 0 0 0-.094 5.689c.71 30.524 17.198 54.917 46.483 54.917 25.705 0 40.675-18.791 44.407-44.013l-1.886-.664c-6.557 13.556-18.334 21.771-31.738 20.769-18.297-1.369-32.314-19.922-31.042-42.395m139.722 41.359c-2.151 5.101-6.639 7.908-12.653 7.908s-11.513-4.129-15.418-11.63c-4.197-8.053-6.405-19.436-6.405-32.92 0-28.067 8.729-46.22 22.24-46.22 5.657 0 10.111 2.807 12.236 7.704zm43.499 20.008c-8.019-1.897-12.089-4.722-12.089-14.951V1.309l-48.716 14.353v1.757l.299-.024c6.72-.543 11.278.386 13.925 2.83 2.072 1.915 3.082 4.853 3.082 8.987v18.66c-4.803-3.067-10.516-4.56-17.448-4.56-14.059 0-26.909 5.92-36.176 16.672-9.66 11.205-14.767 26.518-14.767 44.278-.003 31.72 15.612 53.039 38.851 53.039 13.595 0 24.533-7.449 29.54-20.013v16.865h43.711v-1.746zM424.1 19.819c0-9.904-7.468-17.374-17.375-17.374-9.859 0-17.573 7.632-17.573 17.374s7.721 17.374 17.573 17.374c9.907 0 17.375-7.47 17.375-17.374m11.499 132.546c-8.019-1.897-12.089-4.722-12.089-14.951h-.035V43.635l-43.714 12.551v1.705l.263.024c9.458.842 12.047 4.1 12.047 15.152v81.086h43.751v-1.746zm112.013 0c-8.018-1.897-12.089-4.722-12.089-14.951V43.635l-41.621 12.137v1.71l.246.026c7.733.813 9.967 4.257 9.967 15.36v59.279c-2.578 5.102-7.415 8.131-13.274 8.336-9.503 0-14.736-6.419-14.736-18.073V43.638l-43.714 12.55v1.703l.262.024c9.459.84 12.05 4.097 12.05 15.152v50.17a56.3 56.3 0 0 0 .91 10.444l.787 3.423c3.701 13.262 13.398 20.197 28.59 20.197 12.868 0 24.147-7.966 29.115-20.43v17.311h43.714v-1.747zm169.818 1.788v-1.749l-.213-.05c-8.7-2.006-12.089-5.789-12.089-13.49v-63.79c0-19.89-11.171-31.761-29.883-31.761-13.64 0-25.141 7.882-29.569 20.16-3.517-13.01-13.639-20.16-28.606-20.16-13.146 0-23.449 6.938-27.869 18.657V43.643L545.487 55.68v1.715l.263.024c9.345.829 12.047 4.181 12.047 14.95v81.784h40.787v-1.746l-.215-.053c-6.941-1.631-9.181-4.606-9.181-12.239V66.998c1.836-4.289 5.537-9.37 12.853-9.37 9.086 0 13.692 6.296 13.692 18.697v77.828h40.797v-1.746l-.215-.053c-6.94-1.631-9.18-4.606-9.18-12.239V75.066a42 42 0 0 0-.578-7.26c1.947-4.661 5.86-10.177 13.475-10.177 9.214 0 13.691 6.114 13.691 18.696v77.828z"></path></svg></a><div class="ax h"><div class="ab ay az ba bb q bc bd"><div class="bm" aria-hidden="false" aria-describedby="searchResults" aria-labelledby="searchResults"></div><div class="bn bo ab"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24"><path fill="currentColor" fill-rule="evenodd" d="M4.092 11.06a6.95 6.95 0 1 1 13.9 0 6.95 6.95 0 0 1-13.9 0m6.95-8.05a8.05 8.05 0 1 0 5.13 14.26l3.75 3.75a.56.56 0 1 0 .79-.79l-3.73-3.73A8.05 8.05 0 0 0 11.042 3z" clip-rule="evenodd"></path></svg></div><input role="combobox" aria-controls="searchResults" aria-expanded="false" aria-label="search" data-testid="headerSearchInput" tabindex="0" class="ay be bf bg z bh bi bj bk bl" placeholder="Search" value=""/></div></div></div><div class="h k w fg fh"><div class="fi ab"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerWriteButton" rel="noopener follow" href="/m/signin?operation=register&amp;redirect=https%3A%2F%2Fmedium.com%2Fnew-story&amp;source=---top_nav_layout_nav-----------------------new_post_topnav-----------"><div class="bf b bg z du fj fk ab q fl fm"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24" aria-label="Write"><path fill="currentColor" d="M14 4a.5.5 0 0 0 0-1zm7 6a.5.5 0 0 0-1 0zm-7-7H4v1h10zM3 4v16h1V4zm1 17h16v-1H4zm17-1V10h-1v10zm-1 1a1 1 0 0 0 1-1h-1zM3 20a1 1 0 0 0 1 1v-1zM4 3a1 1 0 0 0-1 1h1z"></path><path stroke="currentColor" d="m17.5 4.5-8.458 8.458a.25.25 0 0 0-.06.098l-.824 2.47a.25.25 0 0 0 .316.316l2.47-.823a.25.25 0 0 0 .098-.06L19.5 6.5m-2-2 2.323-2.323a.25.25 0 0 1 .354 0l1.646 1.646a.25.25 0 0 1 0 .354L19.5 6.5m-2-2 2 2"></path></svg><div class="dt l">Write</div></div></a></span></div></div><div class="k j i d"><div class="fi ab"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerSearchButton" rel="noopener follow" href="/search?source=---top_nav_layout_nav----------------------------------"><div class="bf b bg z du fj fk ab q fl fm"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24" aria-label="Search"><path fill="currentColor" fill-rule="evenodd" d="M4.092 11.06a6.95 6.95 0 1 1 13.9 0 6.95 6.95 0 0 1-13.9 0m6.95-8.05a8.05 8.05 0 1 0 5.13 14.26l3.75 3.75a.56.56 0 1 0 .79-.79l-3.73-3.73A8.05 8.05 0 0 0 11.042 3z" clip-rule="evenodd"></path></svg></div></a></div></div><div class="fi h k j"><div class="ab q"><p class="bf b dx dy dz ea eb ec ed ee ef eg du"><span><button class="bf b dx dy eh dz ea ei eb ec ej ek ee el em eg eo ep eq er es et eu ev ew ex ey ez fa fb fc fd bm fe ff" data-testid="headerSignUpButton">Sign up</button></span></p><div class="ax l"><p class="bf b dx dy dz ea eb ec ed ee ef eg du"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerSignInButton" rel="noopener follow" href="/m/signin?operation=login&amp;redirect=https%3A%2F%2Fmedium.com%2Finfinitgraph%2Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe&amp;source=post_page---top_nav_layout_nav-----------------------global_nav-----------">Sign in</a></span></p></div></div></div><div class="l" aria-hidden="false"><button class="ay fn am ab q ao fo fp fq" aria-label="user options menu" data-testid="headerUserIcon"><div class="l fj"><img alt="" class="l fd by bz ca cx" src="https://miro.medium.com/v2/resize:fill:64:64/1*dmbNkD5D-u45r44go_cf0g.png" width="32" height="32" loading="lazy" role="presentation"/><div class="fr by l bz ca fs n ay ft"></div></div></button></div></div></div><div class="l"><div class="fu fv fw fx fy l"><div class="ab cb"><div class="ci bh fz ga gb gc"></div></div><article><div class="l"><div class="l"><span class="l"></span><section><div><div class="fs gi gj gk gl gm"></div><div class="gn go gp gq gr"><div class="ab cb"><div class="ci bh fz ga gb gc"><div><h1 id="e40b" class="pw-post-title gs gt gu bf gv gw gx gy gz ha hb hc hd he hf hg hh hi hj hk hl hm hn ho hp hq hr hs ht hu bk" data-testid="storyTitle">Building a robust GraphRAG System for a specific use case -Part Two-</h1><div><div class="speechify-ignore ab cp"><div class="speechify-ignore bh l"><div class="hv hw hx hy hz ab"><div><div class="ab ia"><div><div class="bm" aria-hidden="false"><a rel="noopener follow" href="/@ayoubkirouane3?source=post_page---byline--d48f58f8aefe--------------------------------"><div class="l ib ic by id ie"><div class="l fj"><img alt="kirouane Ayoub" class="l fd by dd de cx" src="https://miro.medium.com/v2/resize:fill:88:88/1*T-KWhmfASlLM3XMvRKZnWA.jpeg" width="44" height="44" loading="lazy" data-testid="authorPhoto"/><div class="if by l dd de fs n ig ft"></div></div></div></a></div></div><div class="ih ab fj"><div><div class="bm" aria-hidden="false"><a href="https://medium.com/infinitgraph?source=post_page---byline--d48f58f8aefe--------------------------------" rel="noopener follow"><div class="l ii ij by id ik"><div class="l fj"><img alt="InfinitGraph" class="l fd by br il cx" src="https://miro.medium.com/v2/resize:fill:48:48/1*sSrYGmX4voK-tio0-dNYRg.jpeg" width="24" height="24" loading="lazy" data-testid="publicationPhoto"/><div class="if by l br il fs n ig ft"></div></div></div></a></div></div></div></div></div><div class="bn bh l"><div class="ab"><div style="flex:1"><span class="bf b bg z bk"><div class="im ab q"><div class="ab q in"><div class="ab q"><div><div class="bm" aria-hidden="false"><p class="bf b io ip bk"><a class="af ag ah ai aj ak al am an ao ap aq ar iq" data-testid="authorName" rel="noopener follow" href="/@ayoubkirouane3?source=post_page---byline--d48f58f8aefe--------------------------------">kirouane Ayoub</a></p></div></div></div><span class="ir is" aria-hidden="true"><span class="bf b bg z du">·</span></span><p class="bf b io ip du"><span><a class="it iu ah ai aj ak al am an ao ap aq ar ex iv iw" rel="noopener follow" href="/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fsubscribe%2Fuser%2F4751fd7878c5&amp;operation=register&amp;redirect=https%3A%2F%2Fmedium.com%2Finfinitgraph%2Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe&amp;user=kirouane+Ayoub&amp;userId=4751fd7878c5&amp;source=post_page-4751fd7878c5--byline--d48f58f8aefe---------------------post_header-----------">Follow</a></span></p></div></div></span></div></div><div class="l ix"><span class="bf b bg z du"><div class="ab cn iy iz ja"><div class="jb jc ab"><div class="bf b bg z du ab jd"><span class="je l ix">Published in</span><div><div class="l" aria-hidden="false"><a class="af ag ah ai aj ak al am an ao ap aq ar iq ab q" data-testid="publicationName" href="https://medium.com/infinitgraph?source=post_page---byline--d48f58f8aefe--------------------------------" rel="noopener follow"><p class="bf b bg z jf jg jh ji jj jk jl jm bk">InfinitGraph</p></a></div></div></div><div class="h k"><span class="ir is" aria-hidden="true"><span class="bf b bg z du">·</span></span></div></div><span class="bf b bg z du"><div class="ab ae"><span data-testid="storyReadTime">8 min read</span><div class="jn jo l" aria-hidden="true"><span class="l" aria-hidden="true"><span class="bf b bg z du">·</span></span></div><span data-testid="storyPublishDate">Sep 11, 2024</span></div></span></div></span></div></div></div><div class="ab cp jp jq jr js jt ju jv jw jx jy jz ka kb kc kd ke"><div class="h k w fg fh q"><div class="ku l"><div class="ab q kv kw"><div class="pw-multi-vote-icon fj je kx ky kz"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerClapButton" rel="noopener follow" href="/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fvote%2Finfinitgraph%2Fd48f58f8aefe&amp;operation=register&amp;redirect=https%3A%2F%2Fmedium.com%2Finfinitgraph%2Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe&amp;user=kirouane+Ayoub&amp;userId=4751fd7878c5&amp;source=---header_actions--d48f58f8aefe---------------------clap_footer-----------"><div><div class="bm" aria-hidden="false"><div class="la ao lb lc ld le am lf lg lh kz"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" aria-label="clap"><path fill-rule="evenodd" d="M11.37.828 12 3.282l.63-2.454zM13.916 3.953l1.523-2.112-1.184-.39zM8.589 1.84l1.522 2.112-.337-2.501zM18.523 18.92c-.86.86-1.75 1.246-2.62 1.33a6 6 0 0 0 .407-.372c2.388-2.389 2.86-4.951 1.399-7.623l-.912-1.603-.79-1.672c-.26-.56-.194-.98.203-1.288a.7.7 0 0 1 .546-.132c.283.046.546.231.728.5l2.363 4.157c.976 1.624 1.141 4.237-1.324 6.702m-10.999-.438L3.37 14.328a.828.828 0 0 1 .585-1.408.83.83 0 0 1 .585.242l2.158 2.157a.365.365 0 0 0 .516-.516l-2.157-2.158-1.449-1.449a.826.826 0 0 1 1.167-1.17l3.438 3.44a.363.363 0 0 0 .516 0 .364.364 0 0 0 0-.516L5.293 9.513l-.97-.97a.826.826 0 0 1 0-1.166.84.84 0 0 1 1.167 0l.97.968 3.437 3.436a.36.36 0 0 0 .517 0 .366.366 0 0 0 0-.516L6.977 7.83a.82.82 0 0 1-.241-.584.82.82 0 0 1 .824-.826c.219 0 .43.087.584.242l5.787 5.787a.366.366 0 0 0 .587-.415l-1.117-2.363c-.26-.56-.194-.98.204-1.289a.7.7 0 0 1 .546-.132c.283.046.545.232.727.501l2.193 3.86c1.302 2.38.883 4.59-1.277 6.75-1.156 1.156-2.602 1.627-4.19 1.367-1.418-.236-2.866-1.033-4.079-2.246M10.75 5.971l2.12 2.12c-.41.502-.465 1.17-.128 1.89l.22.465-3.523-3.523a.8.8 0 0 1-.097-.368c0-.22.086-.428.241-.584a.847.847 0 0 1 1.167 0m7.355 1.705c-.31-.461-.746-.758-1.23-.837a1.44 1.44 0 0 0-1.11.275c-.312.24-.505.543-.59.881a1.74 1.74 0 0 0-.906-.465 1.47 1.47 0 0 0-.82.106l-2.182-2.182a1.56 1.56 0 0 0-2.2 0 1.54 1.54 0 0 0-.396.701 1.56 1.56 0 0 0-2.21-.01 1.55 1.55 0 0 0-.416.753c-.624-.624-1.649-.624-2.237-.037a1.557 1.557 0 0 0 0 2.2c-.239.1-.501.238-.715.453a1.56 1.56 0 0 0 0 2.2l.516.515a1.556 1.556 0 0 0-.753 2.615L7.01 19c1.32 1.319 2.909 2.189 4.475 2.449q.482.08.971.08c.85 0 1.653-.198 2.393-.579.231.033.46.054.686.054 1.266 0 2.457-.52 3.505-1.567 2.763-2.763 2.552-5.734 1.439-7.586z" clip-rule="evenodd"></path></svg></div></div></div></a></span></div><div class="pw-multi-vote-count l li lj lk ll lm ln lo"><p class="bf b dv z du"><span class="lp">--</span></p></div></div></div><div><div class="bm" aria-hidden="false"><button class="ao la lq lr ab q fk ls lt" aria-label="responses"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" class="lu"><path d="M18.006 16.803c1.533-1.456 2.234-3.325 2.234-5.321C20.24 7.357 16.709 4 12.191 4S4 7.357 4 11.482c0 4.126 3.674 7.482 8.191 7.482.817 0 1.622-.111 2.393-.327.231.2.48.391.744.559 1.06.693 2.203 1.044 3.399 1.044.224-.008.4-.112.486-.287a.49.49 0 0 0-.042-.518c-.495-.67-.845-1.364-1.04-2.057a4 4 0 0 1-.125-.598zm-3.122 1.055-.067-.223-.315.096a8 8 0 0 1-2.311.338c-4.023 0-7.292-2.955-7.292-6.587 0-3.633 3.269-6.588 7.292-6.588 4.014 0 7.112 2.958 7.112 6.593 0 1.794-.608 3.469-2.027 4.72l-.195.168v.255c0 .056 0 .151.016.295.025.231.081.478.154.733.154.558.398 1.117.722 1.659a5.3 5.3 0 0 1-2.165-.845c-.276-.176-.714-.383-.941-.59z"></path></svg></button></div></div></div><div class="ab q kf kg kh ki kj kk kl km kn ko kp kq kr ks kt"><div class="lv k j i d"></div><div class="h k"><div><div class="bm" aria-hidden="false"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="headerBookmarkButton" rel="noopener follow" href="/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fbookmark%2Fp%2Fd48f58f8aefe&amp;operation=register&amp;redirect=https%3A%2F%2Fmedium.com%2Finfinitgraph%2Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe&amp;source=---header_actions--d48f58f8aefe---------------------bookmark_footer-----------"><svg xmlns="http://www.w3.org/2000/svg" width="25" height="25" fill="none" viewBox="0 0 25 25" class="du lw" aria-label="Add to list bookmark button"><path fill="currentColor" d="M18 2.5a.5.5 0 0 1 1 0V5h2.5a.5.5 0 0 1 0 1H19v2.5a.5.5 0 1 1-1 0V6h-2.5a.5.5 0 0 1 0-1H18zM7 7a1 1 0 0 1 1-1h3.5a.5.5 0 0 0 0-1H8a2 2 0 0 0-2 2v14a.5.5 0 0 0 .805.396L12.5 17l5.695 4.396A.5.5 0 0 0 19 21v-8.5a.5.5 0 0 0-1 0v7.485l-5.195-4.012a.5.5 0 0 0-.61 0L7 19.985z"></path></svg></a></span></div></div></div><div class="fd lx cn"><div class="l ae"><div class="ab cb"><div class="ly lz ma mb mc md ci bh"><div class="ab"><div class="bm bh" aria-hidden="false"><div><div class="bm" aria-hidden="false"><button aria-label="Listen" data-testid="audioPlayButton" class="af fk ah ai aj ak al me an ao ap ex mf mg lt mh mi mj mk ml s mm mn mo mp mq mr ms u mt mu mv"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24"><path fill="currentColor" fill-rule="evenodd" d="M3 12a9 9 0 1 1 18 0 9 9 0 0 1-18 0m9-10C6.477 2 2 6.477 2 12s4.477 10 10 10 10-4.477 10-10S17.523 2 12 2m3.376 10.416-4.599 3.066a.5.5 0 0 1-.777-.416V8.934a.5.5 0 0 1 .777-.416l4.599 3.066a.5.5 0 0 1 0 .832" clip-rule="evenodd"></path></svg><div class="j i d"><p class="bf b bg z du">Listen</p></div></button></div></div></div></div></div></div></div></div><div class="bm" aria-hidden="false" aria-describedby="postFooterSocialMenu" aria-labelledby="postFooterSocialMenu"><div><div class="bm" aria-hidden="false"><button aria-controls="postFooterSocialMenu" aria-expanded="false" aria-label="Share Post" data-testid="headerSocialShareButton" class="af fk ah ai aj ak al me an ao ap ex mf mg lt mh mi mj mk ml s mm mn mo mp mq mr ms u mt mu mv"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24"><path fill="currentColor" fill-rule="evenodd" d="M15.218 4.931a.4.4 0 0 1-.118.132l.012.006a.45.45 0 0 1-.292.074.5.5 0 0 1-.3-.13l-2.02-2.02v7.07c0 .28-.23.5-.5.5s-.5-.22-.5-.5v-7.04l-2 2a.45.45 0 0 1-.57.04h-.02a.4.4 0 0 1-.16-.3.4.4 0 0 1 .1-.32l2.8-2.8a.5.5 0 0 1 .7 0l2.8 2.79a.42.42 0 0 1 .068.498m-.106.138.008.004v-.01zM16 7.063h1.5a2 2 0 0 1 2 2v10a2 2 0 0 1-2 2h-11c-1.1 0-2-.9-2-2v-10a2 2 0 0 1 2-2H8a.5.5 0 0 1 .35.15.5.5 0 0 1 .15.35.5.5 0 0 1-.15.35.5.5 0 0 1-.35.15H6.4c-.5 0-.9.4-.9.9v10.2a.9.9 0 0 0 .9.9h11.2c.5 0 .9-.4.9-.9v-10.2c0-.5-.4-.9-.9-.9H16a.5.5 0 0 1 0-1" clip-rule="evenodd"></path></svg><div class="j i d"><p class="bf b bg z du">Share</p></div></button></div></div></div></div></div></div></div></div></div><h1 id="4ffc" class="mw mx gu bf my mz na nb nc nd ne nf ng nh ni nj nk nl nm nn no np nq nr ns nt bk">Part Two: The Fine-Tuning Process</h1><p id="4960" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">In the <a class="af os" rel="noopener" href="/infinitgraph/building-a-robust-graphrag-system-for-specific-use-case-part-one-0db34b6eea75">first part</a> of this series, we embarked on the crucial task of preparing a custom dataset for fine-tuning a large language model (LLM) for text-to-Cypher translation. We generated a diverse set of questions tailored to our specific Neo4j graph database schema and then leveraged another LLM to translate these questions into their corresponding Cypher queries. This process resulted in a high-quality dataset of question-Cypher pairs, which will serve as the foundation for fine-tuning our target LLM in this part.</p><h2 id="4255" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Fine-Tuning Llama 3.1 with QLoRA</h2><p id="589b" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Our objective in this part is to fine-tune a <a class="af os" href="https://huggingface.co/meta-llama/Meta-Llama-3.1-8B-Instruct" rel="noopener ugc nofollow" target="_blank">Llama 3.1 8b</a> model using the dataset generated in the previous step. To achieve this efficiently, we will leverage one of the Parameter-Efficient Fine-Tuning (PEFT) methods called QLoRA (Quantized Low-Rank Adaptation), implemented using the Unsloth framework.</p><h2 id="771f" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">PEFT (Parameter-Efficient Fine-Tuning)</h2><figure class="pl pm pn po pp pq pi pj paragraph-image"><div role="button" tabindex="0" class="pr ps fj pt bh pu"><div class="pi pj pk"><picture><source srcSet="https://miro.medium.com/v2/resize:fit:640/format:webp/0*P6kJ1FqVWiSWEnRd.png 640w, https://miro.medium.com/v2/resize:fit:720/format:webp/0*P6kJ1FqVWiSWEnRd.png 720w, https://miro.medium.com/v2/resize:fit:750/format:webp/0*P6kJ1FqVWiSWEnRd.png 750w, https://miro.medium.com/v2/resize:fit:786/format:webp/0*P6kJ1FqVWiSWEnRd.png 786w, https://miro.medium.com/v2/resize:fit:828/format:webp/0*P6kJ1FqVWiSWEnRd.png 828w, https://miro.medium.com/v2/resize:fit:1100/format:webp/0*P6kJ1FqVWiSWEnRd.png 1100w, https://miro.medium.com/v2/resize:fit:1400/format:webp/0*P6kJ1FqVWiSWEnRd.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px" type="image/webp"/><source data-testid="og" srcSet="https://miro.medium.com/v2/resize:fit:640/0*P6kJ1FqVWiSWEnRd.png 640w, https://miro.medium.com/v2/resize:fit:720/0*P6kJ1FqVWiSWEnRd.png 720w, https://miro.medium.com/v2/resize:fit:750/0*P6kJ1FqVWiSWEnRd.png 750w, https://miro.medium.com/v2/resize:fit:786/0*P6kJ1FqVWiSWEnRd.png 786w, https://miro.medium.com/v2/resize:fit:828/0*P6kJ1FqVWiSWEnRd.png 828w, https://miro.medium.com/v2/resize:fit:1100/0*P6kJ1FqVWiSWEnRd.png 1100w, https://miro.medium.com/v2/resize:fit:1400/0*P6kJ1FqVWiSWEnRd.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px"/><img alt="" class="bh md pv c" width="700" height="325" loading="lazy" role="presentation"/></picture></div></div></figure><p id="7498" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk"><a class="af os" href="https://www.leewayhertz.com/parameter-efficient-fine-tuning" rel="noopener ugc nofollow" target="_blank">Parameter-Efficient Fine-Tuning (PEFT)</a> techniques address the challenge of fine-tuning large language models, which can be computationally expensive and require significant memory resources. Instead of updating all the parameters of a pre-trained LLM, PEFT methods modify only a small subset of parameters, typically those that have the most significant impact on the target task. This approach drastically reduces the computational burden and memory footprint of the fine-tuning process, making it feasible to fine-tune large LLMs even on resource-constrained hardware.</p><h2 id="22e0" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">QLoRA (Quantized Low-Rank Adaptation)</h2><figure class="pl pm pn po pp pq pi pj paragraph-image"><div role="button" tabindex="0" class="pr ps fj pt bh pu"><div class="pi pj pk"><picture><source srcSet="https://miro.medium.com/v2/resize:fit:640/format:webp/0*lsSpKinoKZ-QwRes.png 640w, https://miro.medium.com/v2/resize:fit:720/format:webp/0*lsSpKinoKZ-QwRes.png 720w, https://miro.medium.com/v2/resize:fit:750/format:webp/0*lsSpKinoKZ-QwRes.png 750w, https://miro.medium.com/v2/resize:fit:786/format:webp/0*lsSpKinoKZ-QwRes.png 786w, https://miro.medium.com/v2/resize:fit:828/format:webp/0*lsSpKinoKZ-QwRes.png 828w, https://miro.medium.com/v2/resize:fit:1100/format:webp/0*lsSpKinoKZ-QwRes.png 1100w, https://miro.medium.com/v2/resize:fit:1400/format:webp/0*lsSpKinoKZ-QwRes.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px" type="image/webp"/><source data-testid="og" srcSet="https://miro.medium.com/v2/resize:fit:640/0*lsSpKinoKZ-QwRes.png 640w, https://miro.medium.com/v2/resize:fit:720/0*lsSpKinoKZ-QwRes.png 720w, https://miro.medium.com/v2/resize:fit:750/0*lsSpKinoKZ-QwRes.png 750w, https://miro.medium.com/v2/resize:fit:786/0*lsSpKinoKZ-QwRes.png 786w, https://miro.medium.com/v2/resize:fit:828/0*lsSpKinoKZ-QwRes.png 828w, https://miro.medium.com/v2/resize:fit:1100/0*lsSpKinoKZ-QwRes.png 1100w, https://miro.medium.com/v2/resize:fit:1400/0*lsSpKinoKZ-QwRes.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px"/><img alt="" class="bh md pv c" width="700" height="364" loading="lazy" role="presentation"/></picture></div></div></figure><p id="4b66" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk"><a class="af os" href="https://arxiv.org/abs/2305.14314" rel="noopener ugc nofollow" target="_blank">QLoRA</a> is a particularly efficient PEFT technique that combines the benefits of <a class="af os" href="https://arxiv.org/abs/2106.09685" rel="noopener ugc nofollow" target="_blank">Low-Rank Adaptation (LoRA)</a> with quantization. LoRA fine-tunes models by adding small, low-rank matrices to the existing layers, effectively injecting task-specific knowledge without modifying the original model’s weights. QLoRA further enhances this approach by applying <strong class="nw gv">4-bit quantization</strong> to the pre-trained model’s weights, drastically reducing memory consumption while maintaining performance comparable to full 16-bit fine-tuning. This combination of techniques allows for fine-tuning large LLMs, such as <strong class="nw gv">Llama 3.1</strong>, on relatively modest hardware, even a single GPU with limited memory.</p><h2 id="6ba0" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Unsloth Framework</h2><figure class="pl pm pn po pp pq pi pj paragraph-image"><div role="button" tabindex="0" class="pr ps fj pt bh pu"><div class="pi pj qb"><picture><source srcSet="https://miro.medium.com/v2/resize:fit:640/format:webp/0*Sa48aUfZwjgKAJda.png 640w, https://miro.medium.com/v2/resize:fit:720/format:webp/0*Sa48aUfZwjgKAJda.png 720w, https://miro.medium.com/v2/resize:fit:750/format:webp/0*Sa48aUfZwjgKAJda.png 750w, https://miro.medium.com/v2/resize:fit:786/format:webp/0*Sa48aUfZwjgKAJda.png 786w, https://miro.medium.com/v2/resize:fit:828/format:webp/0*Sa48aUfZwjgKAJda.png 828w, https://miro.medium.com/v2/resize:fit:1100/format:webp/0*Sa48aUfZwjgKAJda.png 1100w, https://miro.medium.com/v2/resize:fit:1400/format:webp/0*Sa48aUfZwjgKAJda.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px" type="image/webp"/><source data-testid="og" srcSet="https://miro.medium.com/v2/resize:fit:640/0*Sa48aUfZwjgKAJda.png 640w, https://miro.medium.com/v2/resize:fit:720/0*Sa48aUfZwjgKAJda.png 720w, https://miro.medium.com/v2/resize:fit:750/0*Sa48aUfZwjgKAJda.png 750w, https://miro.medium.com/v2/resize:fit:786/0*Sa48aUfZwjgKAJda.png 786w, https://miro.medium.com/v2/resize:fit:828/0*Sa48aUfZwjgKAJda.png 828w, https://miro.medium.com/v2/resize:fit:1100/0*Sa48aUfZwjgKAJda.png 1100w, https://miro.medium.com/v2/resize:fit:1400/0*Sa48aUfZwjgKAJda.png 1400w" sizes="(min-resolution: 4dppx) and (max-width: 700px) 50vw, (-webkit-min-device-pixel-ratio: 4) and (max-width: 700px) 50vw, (min-resolution: 3dppx) and (max-width: 700px) 67vw, (-webkit-min-device-pixel-ratio: 3) and (max-width: 700px) 65vw, (min-resolution: 2.5dppx) and (max-width: 700px) 80vw, (-webkit-min-device-pixel-ratio: 2.5) and (max-width: 700px) 80vw, (min-resolution: 2dppx) and (max-width: 700px) 100vw, (-webkit-min-device-pixel-ratio: 2) and (max-width: 700px) 100vw, 700px"/><img alt="" class="bh md pv c" width="700" height="246" loading="lazy" role="presentation"/></picture></div></div></figure><p id="351e" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">The <a class="af os" href="https://docs.unsloth.ai/" rel="noopener ugc nofollow" target="_blank">Unsloth</a> framework is an open-source solution designed to streamline and simplify the fine-tuning and training of LLMs like Llama 3, Mistral, and Gemma. Developed by a team with experience at NVIDIA, Unsloth focuses on making the fine-tuning process faster, more efficient, and less resource-intensive. It achieves this by incorporating advanced techniques like LoRA and quantization, providing a user-friendly interface, and offering seamless integration with popular tools like Google Colab.</p><p id="6e1e" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">Unsloth’s primary goal is to democratize the creation of custom AI models, enabling developers to efficiently build and deploy models tailored to specific needs, regardless of their computational resources. By utilizing Unsloth, we can leverage the power of QLoRA to fine-tune our Llama 3.1 model for text-to-Cypher translation effectively and efficiently.</p><h2 id="5d0a" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Installing Dependencies</h2><p id="a08d" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Here we install the necessary packages for fine-tuning.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="2410" class="qg mx gu qd b bg qh qi l qj qk"># Installs Unsloth, Xformers (Flash Attention) and all other packages!<br/>!pip install &quot;unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git&quot;<br/><br/># We have to check which Torch version for Xformers (2.3 -&gt; 0.0.27)<br/>from torch import __version__; from packaging.version import Version as V<br/>xformers = &quot;xformers==0.0.27&quot; if V(__version__) &lt; V(&quot;2.4.0&quot;) else &quot;xformers&quot;<br/>!pip install --no-deps {xformers} trl peft accelerate bitsandbytes triton</span></pre><h2 id="d4d0" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Loading the Model</h2><p id="4948" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Here we load the pre-trained Llama 3.1 8B model and its tokenizer using the <code class="cx ql qm qn qd b">FastLanguageModel</code> class from Unsloth. We specify the maximum sequence length (<code class="cx ql qm qn qd b">max_seq_length</code>) to restrict the model&#x27;s context window, which impacts both compute and memory usage.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="285e" class="qg mx gu qd b bg qh qi l qj qk">from unsloth import FastLanguageModel<br/>import torch<br/>max_seq_length = 2048 # to restricts its context window, most of model support larger, but make sure to use it based on your need since it will consumes more compute and VRAM<br/>dtype = None # None for auto detection. Float16 for Tesla T4, V100, Bfloat16 for Ampere+<br/>load_in_4bit = True # Use 4bit quantization to reduce memory usage. Can be False.<br/><br/>model, tokenizer = FastLanguageModel.from_pretrained(<br/> model_name = &quot;unsloth/Meta-Llama-3.1-8B&quot;,<br/> max_seq_length = max_seq_length,<br/> dtype = dtype,<br/> load_in_4bit = load_in_4bit,<br/>)</span></pre><p id="a75d" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">We set the data type (<code class="cx ql qm qn qd b">dtype</code>) to <code class="cx ql qm qn qd b">None</code> for automatic detection, or we can explicitly choose <code class="cx ql qm qn qd b">float16</code> for older GPUs or <code class="cx ql qm qn qd b">bfloat16</code> for newer Ampere GPUs. We enable 4-bit quantization (<code class="cx ql qm qn qd b">load_in_4bit</code>) to reduce memory usage during fine-tuning.</p><h2 id="c0fc" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Configuring the Model for Training</h2><p id="19ea" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Here we prepare the loaded model for PEFT using QLoRA. We use the <code class="cx ql qm qn qd b">get_peft_model</code> function to apply LoRA to specific layers of the model. We define the rank (<code class="cx ql qm qn qd b">r</code>) of the LoRA matrices, which determines the number of trainable parameters. Higher ranks can store more information but increase computational and memory costs.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="399f" class="qg mx gu qd b bg qh qi l qj qk">model = FastLanguageModel.get_peft_model(<br/> model,<br/> r = 16, # Suggested 8, 16, 32, 64, 128, higher ranks can store more information but increase the computational and memory cost of LoRA.<br/> target_modules = [&quot;q_proj&quot;, &quot;k_proj&quot;, &quot;v_proj&quot;, &quot;o_proj&quot;,<br/> &quot;gate_proj&quot;, &quot;up_proj&quot;, &quot;down_proj&quot;,],<br/> lora_alpha = 16, # rule of thumb, double the r or equal. alpha is a scaling factor for updates<br/> lora_dropout = 0, # Supports any, but = 0 is optimized. Probability of zeroing out elements in low-rank matrices for regularization.<br/> random_state = 3407,<br/> use_rslora = True, # has been proven to work better ( https://arxiv.org/pdf/2312.03732 )<br/>)</span></pre><p id="daf4" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">We specify the <code class="cx ql qm qn qd b">target_modules</code>, which are the layers where LoRA will be applied. We set <code class="cx ql qm qn qd b">lora_alpha</code> (scaling factor for updates), <code class="cx ql qm qn qd b">lora_dropout</code> (probability of dropout for regularization), and <code class="cx ql qm qn qd b">random_state</code> for reproducibility. We also enable <code class="cx ql qm qn qd b">use_rslora</code>, which has been shown to improve performance.</p><h2 id="0bdd" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Instruction Tuning Prompt Formatter</h2><p id="ee75" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Here we define the prompt format and a function for preparing training data. We use the Alpaca format, which consists of an instruction, input, and response. We customize the instruction to guide the model to convert text to Cypher queries based on the provided graph schema.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="ab2d" class="qg mx gu qd b bg qh qi l qj qk">prompt = &quot;&quot;&quot;Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.<br/><br/>### Instruction:<br/>{}<br/><br/>### Input:<br/>{}<br/><br/>### Response:<br/>{}&quot;&quot;&quot;<br/><br/>EOS_TOKEN = tokenizer.eos_token # Must add EOS_TOKEN<br/>def formatting_prompts_func(examples):<br/> instructions = f&quot;Convert text to cypher query based on this schema: {graph.schema}&quot; # examples[&quot;instructions&quot;]<br/> inputs = examples[&quot;input&quot;]<br/> outputs = examples[&quot;output&quot;]<br/> texts = []<br/> for input, output in zip(inputs, outputs):<br/> # Must add EOS_TOKEN, otherwise your generation will go on forever!<br/> text = prompt.format(instructions, input, output) + EOS_TOKEN<br/> texts.append(text)<br/> return { &quot;text&quot; : texts, }<br/>pass</span></pre><p id="74d7" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">The <code class="cx ql qm qn qd b">formatting_prompts_func</code> takes a dataset of examples and formats them according to the Alpaca prompt template, adding the end-of-sequence token (<code class="cx ql qm qn qd b">EOS_TOKEN</code>) to ensure proper termination of the generated sequences.</p><h2 id="1705" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Loading and Preparing the Dataset</h2><p id="24c4" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Here we load the dataset generated in the first part, filter out rows with syntax errors or timeouts, and rename the columns to match the expected format for the fine-tuning process.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="5226" class="qg mx gu qd b bg qh qi l qj qk">import pandas as pd<br/><br/>df = pd.read_csv(&#x27;final_text2cypher.csv&#x27;)<br/>df = df[(df[&#x27;syntax_error&#x27;] == False) &amp; (df[&#x27;timeout&#x27;] == False)]<br/>df = df[[&#x27;question&#x27;,&#x27;cypher&#x27;]]<br/>df.rename(columns={&#x27;question&#x27;: &#x27;input&#x27;,&#x27;cypher&#x27;:&#x27;output&#x27;}, inplace=True)<br/>df.reset_index(drop=True, inplace=True)</span></pre><p id="6ac5" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">We then convert the Pandas DataFrame into a Hugging Face <code class="cx ql qm qn qd b">Dataset</code> object and apply the <code class="cx ql qm qn qd b">formatting_prompts_func</code> to format the examples according to the Alpaca prompt template.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="045e" class="qg mx gu qd b bg qh qi l qj qk">from datasets import Dataset<br/>dataset = Dataset.from_pandas(df)<br/>dataset = dataset.map(formatting_prompts_func, batched = True)</span></pre><h2 id="56d9" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Creating the Supervised Fine-Tuning Trainer</h2><p id="3b1d" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Here we create the <a class="af os" href="https://huggingface.co/docs/trl/en/sft_trainer" rel="noopener ugc nofollow" target="_blank"><strong class="nw gv">SFTTrainer</strong></a> from the <a class="af os" href="https://huggingface.co/docs/trl/en/index" rel="noopener ugc nofollow" target="_blank">TRL</a> library to fine-tune the model using the prepared dataset. We provide the model, tokenizer, training dataset, text field name, maximum sequence length, and other configurations.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="f943" class="qg mx gu qd b bg qh qi l qj qk">from trl import SFTTrainer<br/>from transformers import TrainingArguments<br/><br/>trainer = SFTTrainer(<br/> model = model,<br/> tokenizer = tokenizer,<br/> train_dataset = dataset,<br/> dataset_text_field = &quot;text&quot;,<br/> max_seq_length = max_seq_length,<br/> dataset_num_proc = 2,<br/> packing = False, # Can make training 5x faster for short sequences.<br/> args = TrainingArguments(<br/> per_device_train_batch_size = 2,<br/> gradient_accumulation_steps = 4,<br/> warmup_steps = 5,<br/> # max_steps = 60,<br/> num_train_epochs=1,<br/> learning_rate = 2e-4, # the rate at which the model updates its parameters during training.<br/> fp16 = not torch.cuda.is_bf16_supported(),<br/> bf16 = torch.cuda.is_bf16_supported(),<br/> logging_steps = 1,<br/> optim = &quot;adamw_8bit&quot;,<br/> weight_decay = 0.01,<br/> lr_scheduler_type = &quot;linear&quot;,<br/> seed = 3407,<br/> output_dir = &quot;outputs&quot;,<br/> ),<br/>)</span></pre><p id="3f19" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">We use the <code class="cx ql qm qn qd b">TrainingArguments</code> class from Transformers to define the training parameters, including batch size, gradient accumulation steps, warmup steps, learning rate, optimizer, weight decay, and other hyperparameters.</p><h2 id="d2f9" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Starting the Training</h2><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="98c0" class="qg mx gu qd b bg qh qi l qj qk">trainer_stats = trainer.train()</span></pre><p id="5401" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">This will start the training loop, iterating over the training dataset and updating the model’s parameters based on the defined training arguments.</p><h2 id="1c10" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Inference</h2><p id="0952" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Here we enable native faster inference for the fine-tuned model and define a function for generating Cypher queries.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="9a0b" class="qg mx gu qd b bg qh qi l qj qk">FastLanguageModel.for_inference(model) # Enable native 2x faster inference<br/>def generate_cypher_query(question):<br/> inputs = tokenizer(<br/> [<br/> prompt.format(<br/> f&quot;Convert text to cypher query based on this schema: {graph.schema}&quot;, # instruction<br/> question, # input<br/> &quot;&quot;, # output - leave this blank for generation!<br/> )<br/> ], return_tensors = &quot;pt&quot;).to(&quot;cuda&quot;)<br/><br/> outputs = model.generate(**inputs, max_new_tokens = 64, use_cache = True)<br/> result = tokenizer.batch_decode(outputs)<br/> cypher_query = result[0].split(&quot;### Response:&quot;)[1].split(&quot;###&quot;)[0].strip().replace(&quot;&lt;|end_of_text|&gt;&quot;, &quot;&quot;).replace(&quot;&lt;eos&gt;&quot;, &quot;&quot;).replace(&quot;{{&quot;, &quot;{&quot;).replace(&quot;}}&quot;, &quot;}&quot;)<br/> return cypher_query<br/><br/>question = &quot;Write your question here ..&quot;<br/>cypher_query = generate_cypher_query(question)</span></pre><p id="9628" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">The <code class="cx ql qm qn qd b">generate_cypher_query</code> function takes a natural language question as input, formats it according to the Alpaca prompt template, and uses the fine-tuned model to generate a Cypher query. The generated query is then extracted from the model&#x27;s output and cleaned up.</p><h2 id="8a96" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Saving the Model</h2><p id="7f3e" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Here we save the fine-tuned model in the <a class="af os" href="https://huggingface.co/docs/hub/en/gguf" rel="noopener ugc nofollow" target="_blank">GGUF format</a>. We can choose to save the model in 8-bit quantized format (<strong class="nw gv">Q8_0</strong>), 16-bit format (f16), or other quantized formats like <strong class="nw gv">q4_k_m</strong>, depending on the desired trade-off between model size and performance.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="0761" class="qg mx gu qd b bg qh qi l qj qk"># Save to 8bit Q8_0<br/>if True: model.save_pretrained_gguf(&quot;model&quot;, tokenizer,)<br/># or Save to 16bit GGUF<br/>if False: model.save_pretrained_gguf(&quot;model&quot;, tokenizer, quantization_method = &quot;f16&quot;)<br/># or Save to q4_k_m GGUF<br/>if False: model.save_pretrained_gguf(&quot;model&quot;, tokenizer, quantization_method = &quot;q4_k_m&quot;) # you can use any other format not only &quot;q4_k_m&quot;</span></pre><h1 id="9cda" class="mw mx gu bf my mz na nb nc nd ne nf ng nh ni nj nk nl nm nn no np nq nr ns nt bk">Deploying the Model and Creating an OpenAI-Compatible API Endpoint</h1><h2 id="334d" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Installing and Creating the Model with Ollama :</h2><p id="e155" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Here we install <a class="af os" href="https://github.com/ollama/ollama" rel="noopener ugc nofollow" target="_blank">Ollama</a>, a tool for serving LLMs .</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="f8f3" class="qg mx gu qd b bg qh qi l qj qk">curl -fsSL https://ollama.com/install.sh | sh</span></pre><p id="3c50" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">And create a <code class="cx ql qm qn qd b">Modelfile</code> that specifies the path to the saved <strong class="nw gv">GGUF</strong> model.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="cad1" class="qg mx gu qd b bg qh qi l qj qk">nano Modelfile</span></pre><p id="035b" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">The <strong class="nw gv">Modelfile</strong> containg:</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="5b82" class="qg mx gu qd b bg qh qi l qj qk">FROM /path/to/model.gguf</span></pre><p id="0de4" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">We then use the <code class="cx ql qm qn qd b">ollama create</code> command to build the model for serving.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="f225" class="qg mx gu qd b bg qh qi l qj qk">ollama create llama3.1-cypher</span></pre><h2 id="4856" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Starting the Server :</h2><p id="2b54" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Here we start the Ollama server, which will make the fine-tuned model accessible via an API endpoint.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="81bb" class="qg mx gu qd b bg qh qi l qj qk">ollama serve</span></pre><h2 id="0f3f" class="ot mx gu bf my ou ov dy nc ow ox ea ng of oy oz pa oj pb pc pd on pe pf pg ph bk">Testing the API</h2><p id="f253" class="pw-post-body-paragraph nu nv gu nw b nx ny nz oa ob oc od oe of og oh oi oj ok ol om on oo op oq or gn bk">Here we demonstrate how to interact with the deployed model using the OpenAI API client. We initialize the client with the URL of the Ollama server and send a chat completion request with a natural language question.</p><pre class="pl pm pn po pp qc qd qe bp qf bb bk"><span id="43c1" class="qg mx gu qd b bg qh qi l qj qk">pip install openai</span></pre><pre class="qo qc qd qe bp qf bb bk"><span id="ff82" class="qg mx gu qd b bg qh qi l qj qk"><br/>from openai import OpenAI<br/><br/>client = OpenAI(<br/> base_url = &#x27;http://127.0.0.1:11434/v1&#x27;,<br/> api_key=&#x27;ollama&#x27;, # required, but unused<br/>)<br/><br/>response = client.chat.completions.create(<br/> model=&quot;llama3.1-cypher&quot;,<br/> messages=[<br/> {&quot;role&quot;: &quot;user&quot;, &quot;content&quot;: &quot;Write your question here .. &quot;},<br/> ]<br/>)<br/>print(response.choices[0].message.content)</span></pre><p id="2362" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">The server will use the fine-tuned model to generate a Cypher query and return it as part of the API response.</p></div></div></div><div class="ab cb qp qq qr qs" role="separator"><span class="qt by bm qu qv qw"></span><span class="qt by bm qu qv qw"></span><span class="qt by bm qu qv"></span></div><div class="gn go gp gq gr"><div class="ab cb"><div class="ci bh fz ga gb gc"><p id="8f5a" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk">With our Llama 3.1 model now fine-tuned and deployed as an OpenAI-compatible API endpoint, we possess a powerful tool for translating natural language questions into Cypher queries. This capability lays the groundwork for building a sophisticated question-answering system capable of extracting valuable insights from our graph database. In the final part of this series, we’ll explore how to integrate this fine-tuned model with a knowledge extraction component to create a comprehensive Q&amp;A system that empowers users to interact with their data using natural language.</p></div></div></div><div class="ab cb qp qq qr qs" role="separator"><span class="qt by bm qu qv qw"></span><span class="qt by bm qu qv qw"></span><span class="qt by bm qu qv"></span></div><div class="gn go gp gq gr"><div class="ab cb"><div class="ci bh fz ga gb gc"><p id="2deb" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk"><strong class="nw gv">My </strong><a class="af os" href="https://www.linkedin.com/in/ayoub-kirouane3" rel="noopener ugc nofollow" target="_blank"><strong class="nw gv">LinkedIn</strong></a> , <a class="af os" href="https://huggingface.co/ayoubkirouane" rel="noopener ugc nofollow" target="_blank"><strong class="nw gv">HuggingFace</strong></a></p><p id="e45e" class="pw-post-body-paragraph nu nv gu nw b nx pw nz oa ob px od oe of py oh oi oj pz ol om on qa op oq or gn bk"><strong class="nw gv">InfinitGraph</strong> <a class="af os" href="https://infinitgraph.ai/" rel="noopener ugc nofollow" target="_blank">website</a> , <a class="af os" href="https://www.linkedin.com/company/infinitgraph" rel="noopener ugc nofollow" target="_blank">Linkedin</a></p></div></div></div></div></section></div></div></article></div><div class="l"></div><footer class="qx qq qy qz ra ab q rb ik c"><div class="l ae"><div class="ab cb"><div class="ci bh fz ga gb gc"><div class="ab cp rc"><div class="ab q kv"><div class="rd l"><span class="l re rf rg e d"><div class="ab q kv kw"><div class="pw-multi-vote-icon fj je kx ky kz"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="footerClapButton" rel="noopener follow" href="/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fvote%2Finfinitgraph%2Fd48f58f8aefe&amp;operation=register&amp;redirect=https%3A%2F%2Fmedium.com%2Finfinitgraph%2Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe&amp;user=kirouane+Ayoub&amp;userId=4751fd7878c5&amp;source=---footer_actions--d48f58f8aefe---------------------clap_footer-----------"><div><div class="bm" aria-hidden="false"><div class="la ao lb lc ld le am lf lg lh kz"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" aria-label="clap"><path fill-rule="evenodd" d="M11.37.828 12 3.282l.63-2.454zM13.916 3.953l1.523-2.112-1.184-.39zM8.589 1.84l1.522 2.112-.337-2.501zM18.523 18.92c-.86.86-1.75 1.246-2.62 1.33a6 6 0 0 0 .407-.372c2.388-2.389 2.86-4.951 1.399-7.623l-.912-1.603-.79-1.672c-.26-.56-.194-.98.203-1.288a.7.7 0 0 1 .546-.132c.283.046.546.231.728.5l2.363 4.157c.976 1.624 1.141 4.237-1.324 6.702m-10.999-.438L3.37 14.328a.828.828 0 0 1 .585-1.408.83.83 0 0 1 .585.242l2.158 2.157a.365.365 0 0 0 .516-.516l-2.157-2.158-1.449-1.449a.826.826 0 0 1 1.167-1.17l3.438 3.44a.363.363 0 0 0 .516 0 .364.364 0 0 0 0-.516L5.293 9.513l-.97-.97a.826.826 0 0 1 0-1.166.84.84 0 0 1 1.167 0l.97.968 3.437 3.436a.36.36 0 0 0 .517 0 .366.366 0 0 0 0-.516L6.977 7.83a.82.82 0 0 1-.241-.584.82.82 0 0 1 .824-.826c.219 0 .43.087.584.242l5.787 5.787a.366.366 0 0 0 .587-.415l-1.117-2.363c-.26-.56-.194-.98.204-1.289a.7.7 0 0 1 .546-.132c.283.046.545.232.727.501l2.193 3.86c1.302 2.38.883 4.59-1.277 6.75-1.156 1.156-2.602 1.627-4.19 1.367-1.418-.236-2.866-1.033-4.079-2.246M10.75 5.971l2.12 2.12c-.41.502-.465 1.17-.128 1.89l.22.465-3.523-3.523a.8.8 0 0 1-.097-.368c0-.22.086-.428.241-.584a.847.847 0 0 1 1.167 0m7.355 1.705c-.31-.461-.746-.758-1.23-.837a1.44 1.44 0 0 0-1.11.275c-.312.24-.505.543-.59.881a1.74 1.74 0 0 0-.906-.465 1.47 1.47 0 0 0-.82.106l-2.182-2.182a1.56 1.56 0 0 0-2.2 0 1.54 1.54 0 0 0-.396.701 1.56 1.56 0 0 0-2.21-.01 1.55 1.55 0 0 0-.416.753c-.624-.624-1.649-.624-2.237-.037a1.557 1.557 0 0 0 0 2.2c-.239.1-.501.238-.715.453a1.56 1.56 0 0 0 0 2.2l.516.515a1.556 1.556 0 0 0-.753 2.615L7.01 19c1.32 1.319 2.909 2.189 4.475 2.449q.482.08.971.08c.85 0 1.653-.198 2.393-.579.231.033.46.054.686.054 1.266 0 2.457-.52 3.505-1.567 2.763-2.763 2.552-5.734 1.439-7.586z" clip-rule="evenodd"></path></svg></div></div></div></a></span></div><div class="pw-multi-vote-count l li lj lk ll lm ln lo"><p class="bf b dv z du"><span class="lp">--</span></p></div></div></span><span class="l h g f rh ri"><div class="ab q kv kw"><div class="pw-multi-vote-icon fj je kx ky kz"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="footerClapButton" rel="noopener follow" href="/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fvote%2Finfinitgraph%2Fd48f58f8aefe&amp;operation=register&amp;redirect=https%3A%2F%2Fmedium.com%2Finfinitgraph%2Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe&amp;user=kirouane+Ayoub&amp;userId=4751fd7878c5&amp;source=---footer_actions--d48f58f8aefe---------------------clap_footer-----------"><div><div class="bm" aria-hidden="false"><div class="la ao lb lc ld le am lf lg lh kz"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" aria-label="clap"><path fill-rule="evenodd" d="M11.37.828 12 3.282l.63-2.454zM13.916 3.953l1.523-2.112-1.184-.39zM8.589 1.84l1.522 2.112-.337-2.501zM18.523 18.92c-.86.86-1.75 1.246-2.62 1.33a6 6 0 0 0 .407-.372c2.388-2.389 2.86-4.951 1.399-7.623l-.912-1.603-.79-1.672c-.26-.56-.194-.98.203-1.288a.7.7 0 0 1 .546-.132c.283.046.546.231.728.5l2.363 4.157c.976 1.624 1.141 4.237-1.324 6.702m-10.999-.438L3.37 14.328a.828.828 0 0 1 .585-1.408.83.83 0 0 1 .585.242l2.158 2.157a.365.365 0 0 0 .516-.516l-2.157-2.158-1.449-1.449a.826.826 0 0 1 1.167-1.17l3.438 3.44a.363.363 0 0 0 .516 0 .364.364 0 0 0 0-.516L5.293 9.513l-.97-.97a.826.826 0 0 1 0-1.166.84.84 0 0 1 1.167 0l.97.968 3.437 3.436a.36.36 0 0 0 .517 0 .366.366 0 0 0 0-.516L6.977 7.83a.82.82 0 0 1-.241-.584.82.82 0 0 1 .824-.826c.219 0 .43.087.584.242l5.787 5.787a.366.366 0 0 0 .587-.415l-1.117-2.363c-.26-.56-.194-.98.204-1.289a.7.7 0 0 1 .546-.132c.283.046.545.232.727.501l2.193 3.86c1.302 2.38.883 4.59-1.277 6.75-1.156 1.156-2.602 1.627-4.19 1.367-1.418-.236-2.866-1.033-4.079-2.246M10.75 5.971l2.12 2.12c-.41.502-.465 1.17-.128 1.89l.22.465-3.523-3.523a.8.8 0 0 1-.097-.368c0-.22.086-.428.241-.584a.847.847 0 0 1 1.167 0m7.355 1.705c-.31-.461-.746-.758-1.23-.837a1.44 1.44 0 0 0-1.11.275c-.312.24-.505.543-.59.881a1.74 1.74 0 0 0-.906-.465 1.47 1.47 0 0 0-.82.106l-2.182-2.182a1.56 1.56 0 0 0-2.2 0 1.54 1.54 0 0 0-.396.701 1.56 1.56 0 0 0-2.21-.01 1.55 1.55 0 0 0-.416.753c-.624-.624-1.649-.624-2.237-.037a1.557 1.557 0 0 0 0 2.2c-.239.1-.501.238-.715.453a1.56 1.56 0 0 0 0 2.2l.516.515a1.556 1.556 0 0 0-.753 2.615L7.01 19c1.32 1.319 2.909 2.189 4.475 2.449q.482.08.971.08c.85 0 1.653-.198 2.393-.579.231.033.46.054.686.054 1.266 0 2.457-.52 3.505-1.567 2.763-2.763 2.552-5.734 1.439-7.586z" clip-rule="evenodd"></path></svg></div></div></div></a></span></div><div class="pw-multi-vote-count l li lj lk ll lm ln lo"><p class="bf b dv z du"><span class="lp">--</span></p></div></div></span></div><div class="bq ab"><div><div class="bm" aria-hidden="false"><button class="ao la lq lr ab q fk ls lt" aria-label="responses"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" class="lu"><path d="M18.006 16.803c1.533-1.456 2.234-3.325 2.234-5.321C20.24 7.357 16.709 4 12.191 4S4 7.357 4 11.482c0 4.126 3.674 7.482 8.191 7.482.817 0 1.622-.111 2.393-.327.231.2.48.391.744.559 1.06.693 2.203 1.044 3.399 1.044.224-.008.4-.112.486-.287a.49.49 0 0 0-.042-.518c-.495-.67-.845-1.364-1.04-2.057a4 4 0 0 1-.125-.598zm-3.122 1.055-.067-.223-.315.096a8 8 0 0 1-2.311.338c-4.023 0-7.292-2.955-7.292-6.587 0-3.633 3.269-6.588 7.292-6.588 4.014 0 7.112 2.958 7.112 6.593 0 1.794-.608 3.469-2.027 4.72l-.195.168v.255c0 .056 0 .151.016.295.025.231.081.478.154.733.154.558.398 1.117.722 1.659a5.3 5.3 0 0 1-2.165-.845c-.276-.176-.714-.383-.941-.59z"></path></svg></button></div></div></div></div><div class="ab q"><div class="qw l ix"><div><div class="bm" aria-hidden="false"><span><a class="af ag ah ai aj ak al am an ao ap aq ar as at" data-testid="footerBookmarkButton" rel="noopener follow" href="/m/signin?actionUrl=https%3A%2F%2Fmedium.com%2F_%2Fbookmark%2Fp%2Fd48f58f8aefe&amp;operation=register&amp;redirect=https%3A%2F%2Fmedium.com%2Finfinitgraph%2Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe&amp;source=---footer_actions--d48f58f8aefe---------------------bookmark_footer-----------"><svg xmlns="http://www.w3.org/2000/svg" width="25" height="25" fill="none" viewBox="0 0 25 25" class="du lw" aria-label="Add to list bookmark button"><path fill="currentColor" d="M18 2.5a.5.5 0 0 1 1 0V5h2.5a.5.5 0 0 1 0 1H19v2.5a.5.5 0 1 1-1 0V6h-2.5a.5.5 0 0 1 0-1H18zM7 7a1 1 0 0 1 1-1h3.5a.5.5 0 0 0 0-1H8a2 2 0 0 0-2 2v14a.5.5 0 0 0 .805.396L12.5 17l5.695 4.396A.5.5 0 0 0 19 21v-8.5a.5.5 0 0 0-1 0v7.485l-5.195-4.012a.5.5 0 0 0-.61 0L7 19.985z"></path></svg></a></span></div></div></div><div class="qw l ix"><div class="bm" aria-hidden="false" aria-describedby="postFooterSocialMenu" aria-labelledby="postFooterSocialMenu"><div><div class="bm" aria-hidden="false"><button aria-controls="postFooterSocialMenu" aria-expanded="false" aria-label="Share Post" data-testid="footerSocialShareButton" class="af fk ah ai aj ak al me an ao ap ex mf mg lt mh"><svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="none" viewBox="0 0 24 24"><path fill="currentColor" fill-rule="evenodd" d="M15.218 4.931a.4.4 0 0 1-.118.132l.012.006a.45.45 0 0 1-.292.074.5.5 0 0 1-.3-.13l-2.02-2.02v7.07c0 .28-.23.5-.5.5s-.5-.22-.5-.5v-7.04l-2 2a.45.45 0 0 1-.57.04h-.02a.4.4 0 0 1-.16-.3.4.4 0 0 1 .1-.32l2.8-2.8a.5.5 0 0 1 .7 0l2.8 2.79a.42.42 0 0 1 .068.498m-.106.138.008.004v-.01zM16 7.063h1.5a2 2 0 0 1 2 2v10a2 2 0 0 1-2 2h-11c-1.1 0-2-.9-2-2v-10a2 2 0 0 1 2-2H8a.5.5 0 0 1 .35.15.5.5 0 0 1 .15.35.5.5 0 0 1-.15.35.5.5 0 0 1-.35.15H6.4c-.5 0-.9.4-.9.9v10.2a.9.9 0 0 0 .9.9h11.2c.5 0 .9-.4.9-.9v-10.2c0-.5-.4-.9-.9-.9H16a.5.5 0 0 1 0-1" clip-rule="evenodd"></path></svg></button></div></div></div></div></div></div></div></div></div></footer><div class="rj rk rl rm rn l"><div class="ab cb"><div class="ci bh fz ga gb gc"><div class="ro bh r rp"></div><div class="rq l"><div class="ab rr rs rt iz iy"><div class="ru rv rw rx ry rz sa sb sc sd ab cp"><div class="h k"><a href="https://medium.com/infinitgraph?source=post_page---post_publication_info--d48f58f8aefe--------------------------------" rel="noopener follow"><div class="fj ab"><img alt="InfinitGraph" class="se ib ic cx" src="https://miro.medium.com/v2/resize:fill:96:96/1*sSrYGmX4voK-tio0-dNYRg.jpeg" width="48" height="48" loading="lazy"/><div class="se l ic ib fs n fr sf"></div></div></a></div><div class="j i d"><a href="https://medium.com/infinitgraph?source=post_page---post_publication_info--d48f58f8aefe--------------------------------" rel="noopener follow"><div class="fj ab"><img alt="InfinitGraph" class="se sh sg cx" src="https://miro.medium.com/v2/resize:fill:128:128/1*sSrYGmX4voK-tio0-dNYRg.jpeg" width="64" height="64" loading="lazy"/><div class="se l sg sh fs n fr sf"></div></div></a></div><div class="j i d si ix"><div class="ab"></div></div></div><div class="ab co sj"><div class="sk sl sm sn so l"><a class="af ag ah aj ak al am an ao ap aq ar as at ab q" href="https://medium.com/infinitgraph?source=post_page---post_publication_info--d48f58f8aefe--------------------------------" rel="noopener follow"><h2 class="pw-author-name bf sq sr ss st su sv sw of oz pa oj pc pd on pf pg bk"><span class="gn sp">Published in <!-- -->InfinitGraph</span></h2></a><div class="sx ab ia"><div class="l ix"><span class="pw-follower-count bf b bg z du"><a class="af ag ah ai aj ak al am an ao ap aq ar iq" rel="noopener follow" href="/infinitgraph/followers?source=post_page---post_publication_info--d48f58f8aefe--------------------------------">6 Followers</a></span></div><div class="bf b bg z du ab jd"><span class="ir l" aria-hidden="true"><span class="bf b bg z du">·</span></span><a class="af ag ah ai aj ak al am an ao ap aq ar iq" rel="noopener follow" href="/infinitgraph/evaluating-large-language-models-llms-a-comprehensive-approach-7e752bf1c57a?source=post_page---post_publication_info--d48f58f8aefe--------------------------------">Last published <span>Sep 26, 2024</span></a></div></div><div class="qo l"><p class="bf b bg z bk"><span class="gn">InfinitGraph mission is to pioneer the future of AI, crafting cutting edge GenAI products that redefine possibilities.</span></p></div></div></div><div class="h k"><div class="ab"></div></div></div></div><div class="ab rr rs rt iz iy"><div class="ru rv rw rx ry rz sa sb sc sd ab cp"><div class="h k"><a tabindex="0" rel="noopener follow" href="/@ayoubkirouane3?source=post_page---post_author_info--d48f58f8aefe--------------------------------"><div class="l fj"><img alt="kirouane Ayoub" class="l fd by ic ib cx" src="https://miro.medium.com/v2/resize:fill:96:96/1*T-KWhmfASlLM3XMvRKZnWA.jpeg" width="48" height="48" loading="lazy"/><div class="fr by l ic ib fs n ay sf"></div></div></a></div><div class="j i d"><a tabindex="0" rel="noopener follow" href="/@ayoubkirouane3?source=post_page---post_author_info--d48f58f8aefe--------------------------------"><div class="l fj"><img alt="kirouane Ayoub" class="l fd by sg sh cx" src="https://miro.medium.com/v2/resize:fill:128:128/1*T-KWhmfASlLM3XMvRKZnWA.jpeg" width="64" height="64" loading="lazy"/><div class="fr by l sg sh fs n ay sf"></div></div></a></div><div class="j i d si ix"><div class="ab"><span><button class="bf b bg z sy sz ta tb tc td te ev ew tf tg th fa fb fc fd bm fe ff">Follow</button></span></div></div></div><div class="ab co sj"><div class="sk sl sm sn so l"><a class="af ag ah aj ak al am an ao ap aq ar as at ab q" rel="noopener follow" href="/@ayoubkirouane3?source=post_page---post_author_info--d48f58f8aefe--------------------------------"><h2 class="pw-author-name bf sq sr ss st su sv sw of oz pa oj pc pd on pf pg bk"><span class="gn sp">Written by <!-- -->kirouane Ayoub</span></h2></a><div class="sx ab ia"><div class="l ix"><span class="pw-follower-count bf b bg z du"><a class="af ag ah ai aj ak al am an ao ap aq ar iq" rel="noopener follow" href="/@ayoubkirouane3/followers?source=post_page---post_author_info--d48f58f8aefe--------------------------------">309 Followers</a></span></div><div class="bf b bg z du ab jd"><span class="ir l" aria-hidden="true"><span class="bf b bg z du">·</span></span><a class="af ag ah ai aj ak al am an ao ap aq ar iq" rel="noopener follow" href="/@ayoubkirouane3/following?source=post_page---post_author_info--d48f58f8aefe--------------------------------">2 Following</a></div></div><div class="qo l"><p class="bf b bg z bk"><span class="gn">I Like building Machine Learning models from scratch .</span></p></div></div></div><div class="h k"><div class="ab"><span><button class="bf b bg z sy sz ta tb tc td te ev ew tf tg th fa fb fc fd bm fe ff">Follow</button></span></div></div></div></div></div></div><div class="ti l"><div class="ro bh r tj tk tl tm tn"></div><div class="ab cb"><div class="ci bh fz ga gb gc"><div class="ab q cp"><h2 class="bf sq mz nb nc nd nf ng nh nj nk nl nn no np nr ns bk">No responses yet</h2><div class="ab to"><div><div class="bm" aria-hidden="false"><a class="tp tq" href="https://policy.medium.com/medium-rules-30e5502c4eb4?source=post_page---post_responses--d48f58f8aefe--------------------------------" rel="noopener follow" target="_blank"><svg xmlns="http://www.w3.org/2000/svg" width="25" height="25" viewBox="0 0 25 25"><path fill-rule="evenodd" d="M11.987 5.036a.754.754 0 0 1 .914-.01c.972.721 1.767 1.218 2.6 1.543.828.322 1.719.485 2.887.505a.755.755 0 0 1 .741.757c-.018 3.623-.43 6.256-1.449 8.21-1.034 1.984-2.662 3.209-4.966 4.083a.75.75 0 0 1-.537-.003c-2.243-.874-3.858-2.095-4.897-4.074-1.024-1.951-1.457-4.583-1.476-8.216a.755.755 0 0 1 .741-.757c1.195-.02 2.1-.182 2.923-.503.827-.322 1.6-.815 2.519-1.535m.468.903c-.897.69-1.717 1.21-2.623 1.564-.898.35-1.856.527-3.026.565.037 3.45.469 5.817 1.36 7.515.884 1.684 2.25 2.762 4.284 3.571 2.092-.81 3.465-1.89 4.344-3.575.886-1.698 1.299-4.065 1.334-7.512-1.149-.039-2.091-.217-2.99-.567-.906-.353-1.745-.873-2.683-1.561m-.009 9.155a2.672 2.672 0 1 0 0-5.344 2.672 2.672 0 0 0 0 5.344m0 1a3.672 3.672 0 1 0 0-7.344 3.672 3.672 0 0 0 0 7.344m-1.813-3.777.525-.526.916.917 1.623-1.625.526.526-2.149 2.152z" clip-rule="evenodd"></path></svg></a></div></div></div></div></div></div></div><div class="tr ts tt tu tv l bx"><div class="h k j"><div class="ro bh tw tx"></div><div class="ab cb"><div class="ci bh fz ga gb gc"><div class="ty ab kv ja"><div class="tz ua l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://help.medium.com/hc/en-us?source=post_page-----d48f58f8aefe--------------------------------" rel="noopener follow"><p class="bf b dv z du">Help</p></a></div><div class="tz ua l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://medium.statuspage.io/?source=post_page-----d48f58f8aefe--------------------------------" rel="noopener follow"><p class="bf b dv z du">Status</p></a></div><div class="tz ua l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" rel="noopener follow" href="/about?autoplay=1&amp;source=post_page-----d48f58f8aefe--------------------------------"><p class="bf b dv z du">About</p></a></div><div class="tz ua l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" rel="noopener follow" href="/jobs-at-medium/work-at-medium-959d1a85284e?source=post_page-----d48f58f8aefe--------------------------------"><p class="bf b dv z du">Careers</p></a></div><div class="tz ua l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="pressinquiries@medium.com?source=post_page-----d48f58f8aefe--------------------------------" rel="noopener follow"><p class="bf b dv z du">Press</p></a></div><div class="tz ua l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://blog.medium.com/?source=post_page-----d48f58f8aefe--------------------------------" rel="noopener follow"><p class="bf b dv z du">Blog</p></a></div><div class="tz ua l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://policy.medium.com/medium-privacy-policy-f03bf92035c9?source=post_page-----d48f58f8aefe--------------------------------" rel="noopener follow"><p class="bf b dv z du">Privacy</p></a></div><div class="tz ua l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://policy.medium.com/medium-terms-of-service-9db0094a1e0f?source=post_page-----d48f58f8aefe--------------------------------" rel="noopener follow"><p class="bf b dv z du">Terms</p></a></div><div class="tz ua l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" href="https://speechify.com/medium?source=post_page-----d48f58f8aefe--------------------------------" rel="noopener follow"><p class="bf b dv z du">Text to speech</p></a></div><div class="tz l"><a class="af ag ah ai aj ak al am an ao ap aq ar as at" rel="noopener follow" href="/business?source=post_page-----d48f58f8aefe--------------------------------"><p class="bf b dv z du">Teams</p></a></div></div></div></div></div></div></div></div></div></div><script>window.__BUILD_ID__="main-20241126-181518-0cb59a020f"</script><script>window.__GRAPHQL_URI__ = "https://medium.com/_/graphql"</script><script>window.__PRELOADED_STATE__ = {"algolia":{"queries":{}},"cache":{"experimentGroupSet":true,"reason":"","group":"enabled","tags":["group-edgeCachePosts","post-d48f58f8aefe","user-4751fd7878c5","collection-12ed059500cc"],"serverVariantState":"44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a","middlewareEnabled":true,"cacheStatus":"DYNAMIC","shouldUseCache":true,"vary":[],"lohpSummerUpsellEnabled":false,"publicationHierarchyEnabledWeb":false,"postBottomResponsesEnabled":false},"client":{"hydrated":false,"isUs":false,"isNativeMedium":false,"isSafariMobile":false,"isSafari":false,"isFirefox":false,"routingEntity":{"type":"DEFAULT","explicit":false},"viewerIsBot":false},"debug":{"requestId":"cedc1128-f5af-4da4-99fd-b690a6837756","hybridDevServices":[],"originalSpanCarrier":{"traceparent":"00-dc484127ddad2a0e929999a2c2ad3e3c-b8ec84c1dfcfc8db-01"}},"multiVote":{"clapsPerPost":{}},"navigation":{"branch":{"show":null,"hasRendered":null,"blockedByCTA":false},"hideGoogleOneTap":false,"hasRenderedAlternateUserBanner":null,"currentLocation":"https:\u002F\u002Fmedium.com\u002Finfinitgraph\u002Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe","host":"medium.com","hostname":"medium.com","referrer":"","hasSetReferrer":false,"susiModal":{"step":null,"operation":"register"},"postRead":false,"partnerProgram":{"selectedCountryCode":null},"queryString":"","currentHash":""},"config":{"nodeEnv":"production","version":"main-20241126-181518-0cb59a020f","target":"production","productName":"Medium","publicUrl":"https:\u002F\u002Fcdn-client.medium.com\u002Flite","authDomain":"medium.com","authGoogleClientId":"216296035834-k1k6qe060s2tp2a2jam4ljdcms00sttg.apps.googleusercontent.com","favicon":"production","glyphUrl":"https:\u002F\u002Fglyph.medium.com","branchKey":"key_live_ofxXr2qTrrU9NqURK8ZwEhknBxiI6KBm","algolia":{"appId":"MQ57UUUQZ2","apiKeySearch":"394474ced050e3911ae2249ecc774921","indexPrefix":"medium_","host":"-dsn.algolia.net"},"recaptchaKey":"6Lfc37IUAAAAAKGGtC6rLS13R1Hrw_BqADfS1LRk","recaptcha3Key":"6Lf8R9wUAAAAABMI_85Wb8melS7Zj6ziuf99Yot5","recaptchaEnterpriseKeyId":"6Le-uGgpAAAAAPprRaokM8AKthQ9KNGdoxaGUvVp","datadog":{"applicationId":"6702d87d-a7e0-42fe-bbcb-95b469547ea0","clientToken":"pub853ea8d17ad6821d9f8f11861d23dfed","rumToken":"pubf9cc52896502b9413b68ba36fc0c7162","context":{"deployment":{"target":"production","tag":"main-20241126-181518-0cb59a020f","commit":"0cb59a020f4453d0900f671f1a6576feecc55e74"}},"datacenter":"us"},"googleAnalyticsCode":"G-7JY7T788PK","googlePay":{"apiVersion":"2","apiVersionMinor":"0","merchantId":"BCR2DN6TV7EMTGBM","merchantName":"Medium","instanceMerchantId":"13685562959212738550"},"applePay":{"version":3},"signInWallCustomDomainCollectionIds":["3a8144eabfe3","336d898217ee","61061eb0c96b","138adf9c44c","819cc2aaeee0"],"mediumMastodonDomainName":"me.dm","mediumOwnedAndOperatedCollectionIds":["8a9336e5bb4","b7e45b22fec3","193b68bd4fba","8d6b8a439e32","54c98c43354d","3f6ecf56618","d944778ce714","92d2092dc598","ae2a65f35510","1285ba81cada","544c7006046e","fc8964313712","40187e704f1c","88d9857e584e","7b6769f2748b","bcc38c8f6edf","cef6983b292","cb8577c9149e","444d13b52878","713d7dbc99b0","ef8e90590e66","191186aaafa0","55760f21cdc5","9dc80918cc93","bdc4052bbdba","8ccfed20cbb2"],"tierOneDomains":["medium.com","thebolditalic.com","arcdigital.media","towardsdatascience.com","uxdesign.cc","codeburst.io","psiloveyou.xyz","writingcooperative.com","entrepreneurshandbook.co","prototypr.io","betterhumans.coach.me","theascent.pub"],"topicsToFollow":["d61cf867d93f","8a146bc21b28","1eca0103fff3","4d562ee63426","aef1078a3ef5","e15e46793f8d","6158eb913466","55f1c20aba7a","3d18b94f6858","4861fee224fd","63c6f1f93ee","1d98b3a9a871","decb52b64abf","ae5d4995e225","830cded25262"],"topicToTagMappings":{"accessibility":"accessibility","addiction":"addiction","android-development":"android-development","art":"art","artificial-intelligence":"artificial-intelligence","astrology":"astrology","basic-income":"basic-income","beauty":"beauty","biotech":"biotech","blockchain":"blockchain","books":"books","business":"business","cannabis":"cannabis","cities":"cities","climate-change":"climate-change","comics":"comics","coronavirus":"coronavirus","creativity":"creativity","cryptocurrency":"cryptocurrency","culture":"culture","cybersecurity":"cybersecurity","data-science":"data-science","design":"design","digital-life":"digital-life","disability":"disability","economy":"economy","education":"education","equality":"equality","family":"family","feminism":"feminism","fiction":"fiction","film":"film","fitness":"fitness","food":"food","freelancing":"freelancing","future":"future","gadgets":"gadgets","gaming":"gaming","gun-control":"gun-control","health":"health","history":"history","humor":"humor","immigration":"immigration","ios-development":"ios-development","javascript":"javascript","justice":"justice","language":"language","leadership":"leadership","lgbtqia":"lgbtqia","lifestyle":"lifestyle","machine-learning":"machine-learning","makers":"makers","marketing":"marketing","math":"math","media":"media","mental-health":"mental-health","mindfulness":"mindfulness","money":"money","music":"music","neuroscience":"neuroscience","nonfiction":"nonfiction","outdoors":"outdoors","parenting":"parenting","pets":"pets","philosophy":"philosophy","photography":"photography","podcasts":"podcast","poetry":"poetry","politics":"politics","privacy":"privacy","product-management":"product-management","productivity":"productivity","programming":"programming","psychedelics":"psychedelics","psychology":"psychology","race":"race","relationships":"relationships","religion":"religion","remote-work":"remote-work","san-francisco":"san-francisco","science":"science","self":"self","self-driving-cars":"self-driving-cars","sexuality":"sexuality","social-media":"social-media","society":"society","software-engineering":"software-engineering","space":"space","spirituality":"spirituality","sports":"sports","startups":"startup","style":"style","technology":"technology","transportation":"transportation","travel":"travel","true-crime":"true-crime","tv":"tv","ux":"ux","venture-capital":"venture-capital","visual-design":"visual-design","work":"work","world":"world","writing":"writing"},"defaultImages":{"avatar":{"imageId":"1*dmbNkD5D-u45r44go_cf0g.png","height":150,"width":150},"orgLogo":{"imageId":"7*V1_7XP4snlmqrc_0Njontw.png","height":110,"width":500},"postLogo":{"imageId":"bd978bb536350a710e8efb012513429cabdc4c28700604261aeda246d0f980b7","height":810,"width":1440},"postPreviewImage":{"imageId":"1*hn4v1tCaJy7cWMyb0bpNpQ.png","height":386,"width":579}},"collectionStructuredData":{"8d6b8a439e32":{"name":"Elemental","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fcdn-images-1.medium.com\u002Fmax\u002F980\u002F1*9ygdqoKprhwuTVKUM0DLPA@2x.png","width":980,"height":159}}},"3f6ecf56618":{"name":"Forge","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fcdn-images-1.medium.com\u002Fmax\u002F596\u002F1*uULpIlImcO5TDuBZ6lm7Lg@2x.png","width":596,"height":183}}},"ae2a65f35510":{"name":"GEN","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fmiro.medium.com\u002Fmax\u002F264\u002F1*RdVZMdvfV3YiZTw6mX7yWA.png","width":264,"height":140}}},"88d9857e584e":{"name":"LEVEL","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fmiro.medium.com\u002Fmax\u002F540\u002F1*JqYMhNX6KNNb2UlqGqO2WQ.png","width":540,"height":108}}},"7b6769f2748b":{"name":"Marker","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fcdn-images-1.medium.com\u002Fmax\u002F383\u002F1*haCUs0wF6TgOOvfoY-jEoQ@2x.png","width":383,"height":92}}},"444d13b52878":{"name":"OneZero","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fmiro.medium.com\u002Fmax\u002F540\u002F1*cw32fIqCbRWzwJaoQw6BUg.png","width":540,"height":123}}},"8ccfed20cbb2":{"name":"Zora","data":{"@type":"NewsMediaOrganization","ethicsPolicy":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Farticles\u002F360043290473","logo":{"@type":"ImageObject","url":"https:\u002F\u002Fmiro.medium.com\u002Fmax\u002F540\u002F1*tZUQqRcCCZDXjjiZ4bDvgQ.png","width":540,"height":106}}}},"embeddedPostIds":{"coronavirus":"cd3010f9d81f"},"sharedCdcMessaging":{"COVID_APPLICABLE_TAG_SLUGS":[],"COVID_APPLICABLE_TOPIC_NAMES":[],"COVID_APPLICABLE_TOPIC_NAMES_FOR_TOPIC_PAGE":[],"COVID_MESSAGES":{"tierA":{"text":"For more information on the novel coronavirus and Covid-19, visit cdc.gov.","markups":[{"start":66,"end":73,"href":"https:\u002F\u002Fwww.cdc.gov\u002Fcoronavirus\u002F2019-nCoV"}]},"tierB":{"text":"Anyone can publish on Medium per our Policies, but we don’t fact-check every story. For more info about the coronavirus, see cdc.gov.","markups":[{"start":37,"end":45,"href":"https:\u002F\u002Fhelp.medium.com\u002Fhc\u002Fen-us\u002Fcategories\u002F201931128-Policies-Safety"},{"start":125,"end":132,"href":"https:\u002F\u002Fwww.cdc.gov\u002Fcoronavirus\u002F2019-nCoV"}]},"paywall":{"text":"This article has been made free for everyone, thanks to Medium Members. For more information on the novel coronavirus and Covid-19, visit cdc.gov.","markups":[{"start":56,"end":70,"href":"https:\u002F\u002Fmedium.com\u002Fmembership"},{"start":138,"end":145,"href":"https:\u002F\u002Fwww.cdc.gov\u002Fcoronavirus\u002F2019-nCoV"}]},"unbound":{"text":"This article is free for everyone, thanks to Medium Members. For more information on the novel coronavirus and Covid-19, visit cdc.gov.","markups":[{"start":45,"end":59,"href":"https:\u002F\u002Fmedium.com\u002Fmembership"},{"start":127,"end":134,"href":"https:\u002F\u002Fwww.cdc.gov\u002Fcoronavirus\u002F2019-nCoV"}]}},"COVID_BANNER_POST_ID_OVERRIDE_WHITELIST":["3b31a67bff4a"]},"sharedVoteMessaging":{"TAGS":["politics","election-2020","government","us-politics","election","2020-presidential-race","trump","donald-trump","democrats","republicans","congress","republican-party","democratic-party","biden","joe-biden","maga"],"TOPICS":["politics","election"],"MESSAGE":{"text":"Find out more about the U.S. election results here.","markups":[{"start":46,"end":50,"href":"https:\u002F\u002Fcookpolitical.com\u002F2020-national-popular-vote-tracker"}]},"EXCLUDE_POSTS":["397ef29e3ca5"]},"embedPostRules":[],"recircOptions":{"v1":{"limit":3},"v2":{"limit":8}},"braintreeClientKey":"production_zjkj96jm_m56f8fqpf7ngnrd4","braintree":{"enabled":true,"merchantId":"m56f8fqpf7ngnrd4","merchantAccountId":{"usd":"AMediumCorporation_instant","eur":"amediumcorporation_EUR","cad":"amediumcorporation_CAD"},"publicKey":"ds2nn34bg2z7j5gd","braintreeEnvironment":"production","dashboardUrl":"https:\u002F\u002Fwww.braintreegateway.com\u002Fmerchants","gracePeriodDurationInDays":14,"mediumMembershipPlanId":{"monthly":"ce105f8c57a3","monthlyV2":"e8a5e126-792b-4ee6-8fba-d574c1b02fc5","monthlyWithTrial":"d5ee3dbe3db8","monthlyPremium":"fa741a9b47a2","yearly":"a40ad4a43185","yearlyV2":"3815d7d6-b8ca-4224-9b8c-182f9047866e","yearlyStaff":"d74fb811198a","yearlyWithTrial":"b3bc7350e5c7","yearlyPremium":"e21bd2c12166","monthlyOneYearFree":"e6c0637a-2bad-4171-ab4f-3c268633d83c","monthly25PercentOffFirstYear":"235ecc62-0cdb-49ae-9378-726cd21c504b","monthly20PercentOffFirstYear":"ba518864-9c13-4a99-91ca-411bf0cac756","monthly15PercentOffFirstYear":"594c029b-9f89-43d5-88f8-8173af4e070e","monthly10PercentOffFirstYear":"c6c7bc9a-40f2-4b51-8126-e28511d5bdb0","monthlyForStudents":"629ebe51-da7d-41fd-8293-34cd2f2030a8","yearlyOneYearFree":"78ba7be9-0d9f-4ece-aa3e-b54b826f2bf1","yearly25PercentOffFirstYear":"2dbb010d-bb8f-4eeb-ad5c-a08509f42d34","yearly20PercentOffFirstYear":"47565488-435b-47f8-bf93-40d5fbe0ebc8","yearly15PercentOffFirstYear":"8259809b-0881-47d9-acf7-6c001c7f720f","yearly10PercentOffFirstYear":"9dd694fb-96e1-472c-8d9e-3c868d5c1506","yearlyForStudents":"e29345ef-ab1c-4234-95c5-70e50fe6bc23","monthlyCad":"p52orjkaceei","yearlyCad":"h4q9g2up9ktt"},"braintreeDiscountId":{"oneMonthFree":"MONTHS_FREE_01","threeMonthsFree":"MONTHS_FREE_03","sixMonthsFree":"MONTHS_FREE_06","fiftyPercentOffOneYear":"FIFTY_PERCENT_OFF_ONE_YEAR"},"3DSecureVersion":"2","defaultCurrency":"usd","providerPlanIdCurrency":{"4ycw":"usd","rz3b":"usd","3kqm":"usd","jzw6":"usd","c2q2":"usd","nnsw":"usd","q8qw":"usd","d9y6":"usd","fx7w":"cad","nwf2":"cad"}},"paypalClientId":"AXj1G4fotC2GE8KzWX9mSxCH1wmPE3nJglf4Z2ig_amnhvlMVX87otaq58niAg9iuLktVNF_1WCMnN7v","paypal":{"host":"https:\u002F\u002Fapi.paypal.com:443","clientMode":"production","serverMode":"live","webhookId":"4G466076A0294510S","monthlyPlan":{"planId":"P-9WR0658853113943TMU5FDQA","name":"Medium Membership (Monthly) with setup fee","description":"Unlimited access to the best and brightest stories on Medium. Membership billed monthly."},"yearlyPlan":{"planId":"P-7N8963881P8875835MU5JOPQ","name":"Medium Membership (Annual) with setup fee","description":"Unlimited access to the best and brightest stories on Medium. Membership billed annually."},"oneYearGift":{"name":"Medium Membership (1 Year, Digital Gift Code)","description":"Unlimited access to the best and brightest stories on Medium. Gift codes can be redeemed at medium.com\u002Fredeem.","price":"50.00","currency":"USD","sku":"membership-gift-1-yr"},"oldMonthlyPlan":{"planId":"P-96U02458LM656772MJZUVH2Y","name":"Medium Membership (Monthly)","description":"Unlimited access to the best and brightest stories on Medium. Membership billed monthly."},"oldYearlyPlan":{"planId":"P-59P80963JF186412JJZU3SMI","name":"Medium Membership (Annual)","description":"Unlimited access to the best and brightest stories on Medium. Membership billed annually."},"monthlyPlanWithTrial":{"planId":"P-66C21969LR178604GJPVKUKY","name":"Medium Membership (Monthly) with setup fee","description":"Unlimited access to the best and brightest stories on Medium. Membership billed monthly."},"yearlyPlanWithTrial":{"planId":"P-6XW32684EX226940VKCT2MFA","name":"Medium Membership (Annual) with setup fee","description":"Unlimited access to the best and brightest stories on Medium. Membership billed annually."},"oldMonthlyPlanNoSetupFee":{"planId":"P-4N046520HR188054PCJC7LJI","name":"Medium Membership (Monthly)","description":"Unlimited access to the best and brightest stories on Medium. Membership billed monthly."},"oldYearlyPlanNoSetupFee":{"planId":"P-7A4913502Y5181304CJEJMXQ","name":"Medium Membership (Annual)","description":"Unlimited access to the best and brightest stories on Medium. Membership billed annually."},"sdkUrl":"https:\u002F\u002Fwww.paypal.com\u002Fsdk\u002Fjs"},"stripePublishableKey":"pk_live_7FReX44VnNIInZwrIIx6ghjl","log":{"json":true,"level":"info"},"imageUploadMaxSizeMb":25,"staffPicks":{"title":"Staff Picks","catalogId":"c7bc6e1ee00f"}},"session":{"xsrf":""}}</script><script>window.__APOLLO_STATE__ = {"ROOT_QUERY":{"__typename":"Query","collectionByDomainOrSlug({\"domainOrSlug\":\"infinitgraph\"})":{"__ref":"Collection:12ed059500cc"},"viewer":null,"postResult({\"id\":\"d48f58f8aefe\"})":{"__ref":"Post:d48f58f8aefe"}},"ImageMetadata:":{"__typename":"ImageMetadata","id":""},"Collection:12ed059500cc":{"__typename":"Collection","id":"12ed059500cc","favicon":{"__ref":"ImageMetadata:"},"customStyleSheet":null,"colorPalette":{"__typename":"ColorPalette","highlightSpectrum":{"__typename":"ColorSpectrum","backgroundColor":"#FFFFFFFF","colorPoints":[{"__typename":"ColorPoint","color":"#FFFFFFFF","point":0},{"__typename":"ColorPoint","color":"#FFE8F3E8","point":0.1},{"__typename":"ColorPoint","color":"#FFE8F3E8","point":0.2},{"__typename":"ColorPoint","color":"#FFD1E7D1","point":0.6},{"__typename":"ColorPoint","color":"#FFA3D0A2","point":1}]},"defaultBackgroundSpectrum":{"__typename":"ColorSpectrum","backgroundColor":"#FFFFFFFF","colorPoints":[{"__typename":"ColorPoint","color":"#FF1A8917","point":0},{"__typename":"ColorPoint","color":"#FF11800E","point":0.1},{"__typename":"ColorPoint","color":"#FF0F730C","point":0.2},{"__typename":"ColorPoint","color":"#FF095407","point":1}]},"tintBackgroundSpectrum":null},"domain":null,"slug":"infinitgraph","googleAnalyticsId":null,"editors":[{"__typename":"CollectionMastheadUserItem","user":{"__ref":"User:ed50638a776f"}}],"name":"InfinitGraph","avatar":{"__ref":"ImageMetadata:1*sSrYGmX4voK-tio0-dNYRg.jpeg"},"description":"InfinitGraph mission is to pioneer the future of AI, crafting cutting edge GenAI products that redefine possibilities.","subscriberCount":6,"latestPostsConnection({\"paging\":{\"limit\":1}})":{"__typename":"PostConnection","posts":[{"__ref":"Post:7e752bf1c57a"}]},"viewerEdge":{"__ref":"CollectionViewerEdge:collectionId:12ed059500cc-viewerId:lo_409b16ad0044"},"twitterUsername":null,"facebookPageId":null,"logo":{"__ref":"ImageMetadata:1*sSrYGmX4voK-tio0-dNYRg.jpeg"}},"User:ed50638a776f":{"__typename":"User","id":"ed50638a776f"},"ImageMetadata:1*sSrYGmX4voK-tio0-dNYRg.jpeg":{"__typename":"ImageMetadata","id":"1*sSrYGmX4voK-tio0-dNYRg.jpeg","originalWidth":143,"originalHeight":91},"User:49613fd82e4a":{"__typename":"User","id":"49613fd82e4a","customDomainState":null,"hasSubdomain":false,"username":"jf_rezkellah"},"Post:7e752bf1c57a":{"__typename":"Post","id":"7e752bf1c57a","firstPublishedAt":1727354441443,"creator":{"__ref":"User:49613fd82e4a"},"collection":{"__ref":"Collection:12ed059500cc"},"isSeries":false,"mediumUrl":"https:\u002F\u002Fmedium.com\u002Finfinitgraph\u002Fevaluating-large-language-models-llms-a-comprehensive-approach-7e752bf1c57a","sequence":null,"uniqueSlug":"evaluating-large-language-models-llms-a-comprehensive-approach-7e752bf1c57a"},"LinkedAccounts:4751fd7878c5":{"__typename":"LinkedAccounts","mastodon":null,"id":"4751fd7878c5"},"UserViewerEdge:userId:4751fd7878c5-viewerId:lo_409b16ad0044":{"__typename":"UserViewerEdge","id":"userId:4751fd7878c5-viewerId:lo_409b16ad0044","isFollowing":false,"isUser":false,"isMuting":false},"NewsletterV3:3c36e1d68dc9":{"__typename":"NewsletterV3","id":"3c36e1d68dc9","type":"NEWSLETTER_TYPE_AUTHOR","slug":"4751fd7878c5","name":"4751fd7878c5","collection":null,"user":{"__ref":"User:4751fd7878c5"}},"User:4751fd7878c5":{"__typename":"User","id":"4751fd7878c5","name":"kirouane Ayoub","username":"ayoubkirouane3","newsletterV3":{"__ref":"NewsletterV3:3c36e1d68dc9"},"linkedAccounts":{"__ref":"LinkedAccounts:4751fd7878c5"},"isSuspended":false,"imageId":"1*T-KWhmfASlLM3XMvRKZnWA.jpeg","mediumMemberAt":0,"verifications":{"__typename":"VerifiedInfo","isBookAuthor":false},"socialStats":{"__typename":"SocialStats","followerCount":309,"followingCount":0,"collectionFollowingCount":2},"customDomainState":null,"hasSubdomain":false,"bio":"I Like building Machine Learning models from scratch .","isPartnerProgramEnrolled":false,"viewerEdge":{"__ref":"UserViewerEdge:userId:4751fd7878c5-viewerId:lo_409b16ad0044"},"viewerIsUser":false,"postSubscribeMembershipUpsellShownAt":0,"membership":null,"allowNotes":true,"twitterScreenName":""},"Paragraph:a96d61ac0572_0":{"__typename":"Paragraph","id":"a96d61ac0572_0","name":"e40b","type":"H3","href":null,"layout":null,"metadata":null,"text":"Building a robust GraphRAG System for a specific use case -Part Two-","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_1":{"__typename":"Paragraph","id":"a96d61ac0572_1","name":"4ffc","type":"H3","href":null,"layout":null,"metadata":null,"text":"Part Two: The Fine-Tuning Process","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_2":{"__typename":"Paragraph","id":"a96d61ac0572_2","name":"4960","type":"P","href":null,"layout":null,"metadata":null,"text":"In the first part of this series, we embarked on the crucial task of preparing a custom dataset for fine-tuning a large language model (LLM) for text-to-Cypher translation. We generated a diverse set of questions tailored to our specific Neo4j graph database schema and then leveraged another LLM to translate these questions into their corresponding Cypher queries. This process resulted in a high-quality dataset of question-Cypher pairs, which will serve as the foundation for fine-tuning our target LLM in this part.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":7,"end":17,"href":"https:\u002F\u002Fmedium.com\u002Finfinitgraph\u002Fbuilding-a-robust-graphrag-system-for-specific-use-case-part-one-0db34b6eea75","anchorType":"LINK","userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_3":{"__typename":"Paragraph","id":"a96d61ac0572_3","name":"4255","type":"H4","href":null,"layout":null,"metadata":null,"text":"Fine-Tuning Llama 3.1 with QLoRA","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_4":{"__typename":"Paragraph","id":"a96d61ac0572_4","name":"589b","type":"P","href":null,"layout":null,"metadata":null,"text":"Our objective in this part is to fine-tune a Llama 3.1 8b model using the dataset generated in the previous step. To achieve this efficiently, we will leverage one of the Parameter-Efficient Fine-Tuning (PEFT) methods called QLoRA (Quantized Low-Rank Adaptation), implemented using the Unsloth framework.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":45,"end":57,"href":"https:\u002F\u002Fhuggingface.co\u002Fmeta-llama\u002FMeta-Llama-3.1-8B-Instruct","anchorType":"LINK","userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_5":{"__typename":"Paragraph","id":"a96d61ac0572_5","name":"771f","type":"H4","href":null,"layout":null,"metadata":null,"text":"PEFT (Parameter-Efficient Fine-Tuning)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"ImageMetadata:0*P6kJ1FqVWiSWEnRd.png":{"__typename":"ImageMetadata","id":"0*P6kJ1FqVWiSWEnRd.png","originalHeight":487,"originalWidth":1050,"focusPercentX":null,"focusPercentY":null,"alt":null},"Paragraph:a96d61ac0572_6":{"__typename":"Paragraph","id":"a96d61ac0572_6","name":"27f4","type":"IMG","href":null,"layout":"INSET_CENTER","metadata":{"__ref":"ImageMetadata:0*P6kJ1FqVWiSWEnRd.png"},"text":"","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_7":{"__typename":"Paragraph","id":"a96d61ac0572_7","name":"7498","type":"P","href":null,"layout":null,"metadata":null,"text":"Parameter-Efficient Fine-Tuning (PEFT) techniques address the challenge of fine-tuning large language models, which can be computationally expensive and require significant memory resources. Instead of updating all the parameters of a pre-trained LLM, PEFT methods modify only a small subset of parameters, typically those that have the most significant impact on the target task. This approach drastically reduces the computational burden and memory footprint of the fine-tuning process, making it feasible to fine-tune large LLMs even on resource-constrained hardware.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":0,"end":38,"href":"https:\u002F\u002Fwww.leewayhertz.com\u002Fparameter-efficient-fine-tuning","anchorType":"LINK","userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_8":{"__typename":"Paragraph","id":"a96d61ac0572_8","name":"22e0","type":"H4","href":null,"layout":null,"metadata":null,"text":"QLoRA (Quantized Low-Rank Adaptation)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"ImageMetadata:0*lsSpKinoKZ-QwRes.png":{"__typename":"ImageMetadata","id":"0*lsSpKinoKZ-QwRes.png","originalHeight":546,"originalWidth":1050,"focusPercentX":null,"focusPercentY":null,"alt":null},"Paragraph:a96d61ac0572_9":{"__typename":"Paragraph","id":"a96d61ac0572_9","name":"6afd","type":"IMG","href":null,"layout":"INSET_CENTER","metadata":{"__ref":"ImageMetadata:0*lsSpKinoKZ-QwRes.png"},"text":"","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_10":{"__typename":"Paragraph","id":"a96d61ac0572_10","name":"4b66","type":"P","href":null,"layout":null,"metadata":null,"text":"QLoRA is a particularly efficient PEFT technique that combines the benefits of Low-Rank Adaptation (LoRA) with quantization. LoRA fine-tunes models by adding small, low-rank matrices to the existing layers, effectively injecting task-specific knowledge without modifying the original model’s weights. QLoRA further enhances this approach by applying 4-bit quantization to the pre-trained model’s weights, drastically reducing memory consumption while maintaining performance comparable to full 16-bit fine-tuning. This combination of techniques allows for fine-tuning large LLMs, such as Llama 3.1, on relatively modest hardware, even a single GPU with limited memory.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":0,"end":5,"href":"https:\u002F\u002Farxiv.org\u002Fabs\u002F2305.14314","anchorType":"LINK","userId":null,"linkMetadata":null},{"__typename":"Markup","type":"A","start":79,"end":105,"href":"https:\u002F\u002Farxiv.org\u002Fabs\u002F2106.09685","anchorType":"LINK","userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":350,"end":368,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":588,"end":597,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_11":{"__typename":"Paragraph","id":"a96d61ac0572_11","name":"6ba0","type":"H4","href":null,"layout":null,"metadata":null,"text":"Unsloth Framework","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"ImageMetadata:0*Sa48aUfZwjgKAJda.png":{"__typename":"ImageMetadata","id":"0*Sa48aUfZwjgKAJda.png","originalHeight":315,"originalWidth":898,"focusPercentX":null,"focusPercentY":null,"alt":null},"Paragraph:a96d61ac0572_12":{"__typename":"Paragraph","id":"a96d61ac0572_12","name":"1617","type":"IMG","href":null,"layout":"INSET_CENTER","metadata":{"__ref":"ImageMetadata:0*Sa48aUfZwjgKAJda.png"},"text":"","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_13":{"__typename":"Paragraph","id":"a96d61ac0572_13","name":"351e","type":"P","href":null,"layout":null,"metadata":null,"text":"The Unsloth framework is an open-source solution designed to streamline and simplify the fine-tuning and training of LLMs like Llama 3, Mistral, and Gemma. Developed by a team with experience at NVIDIA, Unsloth focuses on making the fine-tuning process faster, more efficient, and less resource-intensive. It achieves this by incorporating advanced techniques like LoRA and quantization, providing a user-friendly interface, and offering seamless integration with popular tools like Google Colab.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":4,"end":11,"href":"https:\u002F\u002Fdocs.unsloth.ai\u002F","anchorType":"LINK","userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_14":{"__typename":"Paragraph","id":"a96d61ac0572_14","name":"6e1e","type":"P","href":null,"layout":null,"metadata":null,"text":"Unsloth’s primary goal is to democratize the creation of custom AI models, enabling developers to efficiently build and deploy models tailored to specific needs, regardless of their computational resources. By utilizing Unsloth, we can leverage the power of QLoRA to fine-tune our Llama 3.1 model for text-to-Cypher translation effectively and efficiently.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_15":{"__typename":"Paragraph","id":"a96d61ac0572_15","name":"5d0a","type":"H4","href":null,"layout":null,"metadata":null,"text":"Installing Dependencies","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_16":{"__typename":"Paragraph","id":"a96d61ac0572_16","name":"a08d","type":"P","href":null,"layout":null,"metadata":null,"text":"Here we install the necessary packages for fine-tuning.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_17":{"__typename":"Paragraph","id":"a96d61ac0572_17","name":"2410","type":"PRE","href":null,"layout":null,"metadata":null,"text":"# Installs Unsloth, Xformers (Flash Attention) and all other packages!\n!pip install \"unsloth[colab-new] @ git+https:\u002F\u002Fgithub.com\u002Funslothai\u002Funsloth.git\"\n\n# We have to check which Torch version for Xformers (2.3 -\u003E 0.0.27)\nfrom torch import __version__; from packaging.version import Version as V\nxformers = \"xformers==0.0.27\" if V(__version__) \u003C V(\"2.4.0\") else \"xformers\"\n!pip install --no-deps {xformers} trl peft accelerate bitsandbytes triton","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_18":{"__typename":"Paragraph","id":"a96d61ac0572_18","name":"d4d0","type":"H4","href":null,"layout":null,"metadata":null,"text":"Loading the Model","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_19":{"__typename":"Paragraph","id":"a96d61ac0572_19","name":"4948","type":"P","href":null,"layout":null,"metadata":null,"text":"Here we load the pre-trained Llama 3.1 8B model and its tokenizer using the FastLanguageModel class from Unsloth. We specify the maximum sequence length (max_seq_length) to restrict the model's context window, which impacts both compute and memory usage.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":76,"end":93,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":154,"end":168,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_20":{"__typename":"Paragraph","id":"a96d61ac0572_20","name":"285e","type":"PRE","href":null,"layout":null,"metadata":null,"text":"from unsloth import FastLanguageModel\nimport torch\nmax_seq_length = 2048 # to restricts its context window, most of model support larger, but make sure to use it based on your need since it will consumes more compute and VRAM\ndtype = None # None for auto detection. Float16 for Tesla T4, V100, Bfloat16 for Ampere+\nload_in_4bit = True # Use 4bit quantization to reduce memory usage. Can be False.\n\nmodel, tokenizer = FastLanguageModel.from_pretrained(\n model_name = \"unsloth\u002FMeta-Llama-3.1-8B\",\n max_seq_length = max_seq_length,\n dtype = dtype,\n load_in_4bit = load_in_4bit,\n)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_21":{"__typename":"Paragraph","id":"a96d61ac0572_21","name":"a75d","type":"P","href":null,"layout":null,"metadata":null,"text":"We set the data type (dtype) to None for automatic detection, or we can explicitly choose float16 for older GPUs or bfloat16 for newer Ampere GPUs. We enable 4-bit quantization (load_in_4bit) to reduce memory usage during fine-tuning.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":22,"end":27,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":32,"end":36,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":90,"end":97,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":116,"end":124,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":178,"end":190,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_22":{"__typename":"Paragraph","id":"a96d61ac0572_22","name":"c0fc","type":"H4","href":null,"layout":null,"metadata":null,"text":"Configuring the Model for Training","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_23":{"__typename":"Paragraph","id":"a96d61ac0572_23","name":"19ea","type":"P","href":null,"layout":null,"metadata":null,"text":"Here we prepare the loaded model for PEFT using QLoRA. We use the get_peft_model function to apply LoRA to specific layers of the model. We define the rank (r) of the LoRA matrices, which determines the number of trainable parameters. Higher ranks can store more information but increase computational and memory costs.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":66,"end":80,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":157,"end":158,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_24":{"__typename":"Paragraph","id":"a96d61ac0572_24","name":"399f","type":"PRE","href":null,"layout":null,"metadata":null,"text":"model = FastLanguageModel.get_peft_model(\n model,\n r = 16, # Suggested 8, 16, 32, 64, 128, higher ranks can store more information but increase the computational and memory cost of LoRA.\n target_modules = [\"q_proj\", \"k_proj\", \"v_proj\", \"o_proj\",\n \"gate_proj\", \"up_proj\", \"down_proj\",],\n lora_alpha = 16, # rule of thumb, double the r or equal. alpha is a scaling factor for updates\n lora_dropout = 0, # Supports any, but = 0 is optimized. Probability of zeroing out elements in low-rank matrices for regularization.\n random_state = 3407,\n use_rslora = True, # has been proven to work better ( https:\u002F\u002Farxiv.org\u002Fpdf\u002F2312.03732 )\n)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"graphql"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_25":{"__typename":"Paragraph","id":"a96d61ac0572_25","name":"daf4","type":"P","href":null,"layout":null,"metadata":null,"text":"We specify the target_modules, which are the layers where LoRA will be applied. We set lora_alpha (scaling factor for updates), lora_dropout (probability of dropout for regularization), and random_state for reproducibility. We also enable use_rslora, which has been shown to improve performance.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":15,"end":29,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":87,"end":97,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":128,"end":140,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":190,"end":202,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":239,"end":249,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_26":{"__typename":"Paragraph","id":"a96d61ac0572_26","name":"0bdd","type":"H4","href":null,"layout":null,"metadata":null,"text":"Instruction Tuning Prompt Formatter","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_27":{"__typename":"Paragraph","id":"a96d61ac0572_27","name":"ee75","type":"P","href":null,"layout":null,"metadata":null,"text":"Here we define the prompt format and a function for preparing training data. We use the Alpaca format, which consists of an instruction, input, and response. We customize the instruction to guide the model to convert text to Cypher queries based on the provided graph schema.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_28":{"__typename":"Paragraph","id":"a96d61ac0572_28","name":"ab2d","type":"PRE","href":null,"layout":null,"metadata":null,"text":"prompt = \"\"\"Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.\n\n### Instruction:\n{}\n\n### Input:\n{}\n\n### Response:\n{}\"\"\"\n\nEOS_TOKEN = tokenizer.eos_token # Must add EOS_TOKEN\ndef formatting_prompts_func(examples):\n instructions = f\"Convert text to cypher query based on this schema: {graph.schema}\" # examples[\"instructions\"]\n inputs = examples[\"input\"]\n outputs = examples[\"output\"]\n texts = []\n for input, output in zip(inputs, outputs):\n # Must add EOS_TOKEN, otherwise your generation will go on forever!\n text = prompt.format(instructions, input, output) + EOS_TOKEN\n texts.append(text)\n return { \"text\" : texts, }\npass","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_29":{"__typename":"Paragraph","id":"a96d61ac0572_29","name":"74d7","type":"P","href":null,"layout":null,"metadata":null,"text":"The formatting_prompts_func takes a dataset of examples and formats them according to the Alpaca prompt template, adding the end-of-sequence token (EOS_TOKEN) to ensure proper termination of the generated sequences.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":4,"end":27,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":148,"end":157,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_30":{"__typename":"Paragraph","id":"a96d61ac0572_30","name":"1705","type":"H4","href":null,"layout":null,"metadata":null,"text":"Loading and Preparing the Dataset","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_31":{"__typename":"Paragraph","id":"a96d61ac0572_31","name":"24c4","type":"P","href":null,"layout":null,"metadata":null,"text":"Here we load the dataset generated in the first part, filter out rows with syntax errors or timeouts, and rename the columns to match the expected format for the fine-tuning process.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_32":{"__typename":"Paragraph","id":"a96d61ac0572_32","name":"5226","type":"PRE","href":null,"layout":null,"metadata":null,"text":"import pandas as pd\n\ndf = pd.read_csv('final_text2cypher.csv')\ndf = df[(df['syntax_error'] == False) & (df['timeout'] == False)]\ndf = df[['question','cypher']]\ndf.rename(columns={'question': 'input','cypher':'output'}, inplace=True)\ndf.reset_index(drop=True, inplace=True)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"bash"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_33":{"__typename":"Paragraph","id":"a96d61ac0572_33","name":"6ac5","type":"P","href":null,"layout":null,"metadata":null,"text":"We then convert the Pandas DataFrame into a Hugging Face Dataset object and apply the formatting_prompts_func to format the examples according to the Alpaca prompt template.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":57,"end":64,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"CODE","start":86,"end":109,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_34":{"__typename":"Paragraph","id":"a96d61ac0572_34","name":"045e","type":"PRE","href":null,"layout":null,"metadata":null,"text":"from datasets import Dataset\ndataset = Dataset.from_pandas(df)\ndataset = dataset.map(formatting_prompts_func, batched = True)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_35":{"__typename":"Paragraph","id":"a96d61ac0572_35","name":"56d9","type":"H4","href":null,"layout":null,"metadata":null,"text":"Creating the Supervised Fine-Tuning Trainer","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_36":{"__typename":"Paragraph","id":"a96d61ac0572_36","name":"3b1d","type":"P","href":null,"layout":null,"metadata":null,"text":"Here we create the SFTTrainer from the TRL library to fine-tune the model using the prepared dataset. We provide the model, tokenizer, training dataset, text field name, maximum sequence length, and other configurations.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":19,"end":29,"href":"https:\u002F\u002Fhuggingface.co\u002Fdocs\u002Ftrl\u002Fen\u002Fsft_trainer","anchorType":"LINK","userId":null,"linkMetadata":null},{"__typename":"Markup","type":"A","start":39,"end":42,"href":"https:\u002F\u002Fhuggingface.co\u002Fdocs\u002Ftrl\u002Fen\u002Findex","anchorType":"LINK","userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":19,"end":29,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_37":{"__typename":"Paragraph","id":"a96d61ac0572_37","name":"f943","type":"PRE","href":null,"layout":null,"metadata":null,"text":"from trl import SFTTrainer\nfrom transformers import TrainingArguments\n\ntrainer = SFTTrainer(\n model = model,\n tokenizer = tokenizer,\n train_dataset = dataset,\n dataset_text_field = \"text\",\n max_seq_length = max_seq_length,\n dataset_num_proc = 2,\n packing = False, # Can make training 5x faster for short sequences.\n args = TrainingArguments(\n per_device_train_batch_size = 2,\n gradient_accumulation_steps = 4,\n warmup_steps = 5,\n # max_steps = 60,\n num_train_epochs=1,\n learning_rate = 2e-4, # the rate at which the model updates its parameters during training.\n fp16 = not torch.cuda.is_bf16_supported(),\n bf16 = torch.cuda.is_bf16_supported(),\n logging_steps = 1,\n optim = \"adamw_8bit\",\n weight_decay = 0.01,\n lr_scheduler_type = \"linear\",\n seed = 3407,\n output_dir = \"outputs\",\n ),\n)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_38":{"__typename":"Paragraph","id":"a96d61ac0572_38","name":"3f19","type":"P","href":null,"layout":null,"metadata":null,"text":"We use the TrainingArguments class from Transformers to define the training parameters, including batch size, gradient accumulation steps, warmup steps, learning rate, optimizer, weight decay, and other hyperparameters.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":11,"end":28,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_39":{"__typename":"Paragraph","id":"a96d61ac0572_39","name":"d2f9","type":"H4","href":null,"layout":null,"metadata":null,"text":"Starting the Training","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_40":{"__typename":"Paragraph","id":"a96d61ac0572_40","name":"98c0","type":"PRE","href":null,"layout":null,"metadata":null,"text":"trainer_stats = trainer.train()","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"ini"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_41":{"__typename":"Paragraph","id":"a96d61ac0572_41","name":"5401","type":"P","href":null,"layout":null,"metadata":null,"text":"This will start the training loop, iterating over the training dataset and updating the model’s parameters based on the defined training arguments.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_42":{"__typename":"Paragraph","id":"a96d61ac0572_42","name":"1c10","type":"H4","href":null,"layout":null,"metadata":null,"text":"Inference","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_43":{"__typename":"Paragraph","id":"a96d61ac0572_43","name":"0952","type":"P","href":null,"layout":null,"metadata":null,"text":"Here we enable native faster inference for the fine-tuned model and define a function for generating Cypher queries.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_44":{"__typename":"Paragraph","id":"a96d61ac0572_44","name":"9a0b","type":"PRE","href":null,"layout":null,"metadata":null,"text":"FastLanguageModel.for_inference(model) # Enable native 2x faster inference\ndef generate_cypher_query(question):\n inputs = tokenizer(\n [\n prompt.format(\n f\"Convert text to cypher query based on this schema: {graph.schema}\", # instruction\n question, # input\n \"\", # output - leave this blank for generation!\n )\n ], return_tensors = \"pt\").to(\"cuda\")\n\n outputs = model.generate(**inputs, max_new_tokens = 64, use_cache = True)\n result = tokenizer.batch_decode(outputs)\n cypher_query = result[0].split(\"### Response:\")[1].split(\"###\")[0].strip().replace(\"\u003C|end_of_text|\u003E\", \"\").replace(\"\u003Ceos\u003E\", \"\").replace(\"{{\", \"{\").replace(\"}}\", \"}\")\n return cypher_query\n\nquestion = \"Write your question here ..\"\ncypher_query = generate_cypher_query(question)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"scss"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_45":{"__typename":"Paragraph","id":"a96d61ac0572_45","name":"9628","type":"P","href":null,"layout":null,"metadata":null,"text":"The generate_cypher_query function takes a natural language question as input, formats it according to the Alpaca prompt template, and uses the fine-tuned model to generate a Cypher query. The generated query is then extracted from the model's output and cleaned up.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":4,"end":25,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_46":{"__typename":"Paragraph","id":"a96d61ac0572_46","name":"8a96","type":"H4","href":null,"layout":null,"metadata":null,"text":"Saving the Model","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_47":{"__typename":"Paragraph","id":"a96d61ac0572_47","name":"7f3e","type":"P","href":null,"layout":null,"metadata":null,"text":"Here we save the fine-tuned model in the GGUF format. We can choose to save the model in 8-bit quantized format (Q8_0), 16-bit format (f16), or other quantized formats like q4_k_m, depending on the desired trade-off between model size and performance.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":41,"end":52,"href":"https:\u002F\u002Fhuggingface.co\u002Fdocs\u002Fhub\u002Fen\u002Fgguf","anchorType":"LINK","userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":113,"end":117,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":173,"end":179,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_48":{"__typename":"Paragraph","id":"a96d61ac0572_48","name":"0761","type":"PRE","href":null,"layout":null,"metadata":null,"text":"# Save to 8bit Q8_0\nif True: model.save_pretrained_gguf(\"model\", tokenizer,)\n# or Save to 16bit GGUF\nif False: model.save_pretrained_gguf(\"model\", tokenizer, quantization_method = \"f16\")\n# or Save to q4_k_m GGUF\nif False: model.save_pretrained_gguf(\"model\", tokenizer, quantization_method = \"q4_k_m\") # you can use any other format not only \"q4_k_m\"","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_49":{"__typename":"Paragraph","id":"a96d61ac0572_49","name":"9cda","type":"H3","href":null,"layout":null,"metadata":null,"text":"Deploying the Model and Creating an OpenAI-Compatible API Endpoint","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_50":{"__typename":"Paragraph","id":"a96d61ac0572_50","name":"334d","type":"H4","href":null,"layout":null,"metadata":null,"text":"Installing and Creating the Model with Ollama :","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_51":{"__typename":"Paragraph","id":"a96d61ac0572_51","name":"e155","type":"P","href":null,"layout":null,"metadata":null,"text":"Here we install Ollama, a tool for serving LLMs .","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":16,"end":22,"href":"https:\u002F\u002Fgithub.com\u002Follama\u002Follama","anchorType":"LINK","userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_52":{"__typename":"Paragraph","id":"a96d61ac0572_52","name":"f8f3","type":"PRE","href":null,"layout":null,"metadata":null,"text":"curl -fsSL https:\u002F\u002Follama.com\u002Finstall.sh | sh","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"bash"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_53":{"__typename":"Paragraph","id":"a96d61ac0572_53","name":"3c50","type":"P","href":null,"layout":null,"metadata":null,"text":"And create a Modelfile that specifies the path to the saved GGUF model.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":13,"end":22,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":60,"end":64,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_54":{"__typename":"Paragraph","id":"a96d61ac0572_54","name":"cad1","type":"PRE","href":null,"layout":null,"metadata":null,"text":"nano Modelfile","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"bash"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_55":{"__typename":"Paragraph","id":"a96d61ac0572_55","name":"035b","type":"P","href":null,"layout":null,"metadata":null,"text":"The Modelfile containg:","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"STRONG","start":4,"end":13,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_56":{"__typename":"Paragraph","id":"a96d61ac0572_56","name":"5b82","type":"PRE","href":null,"layout":null,"metadata":null,"text":"FROM \u002Fpath\u002Fto\u002Fmodel.gguf","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"css"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_57":{"__typename":"Paragraph","id":"a96d61ac0572_57","name":"0de4","type":"P","href":null,"layout":null,"metadata":null,"text":"We then use the ollama create command to build the model for serving.","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"CODE","start":16,"end":29,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_58":{"__typename":"Paragraph","id":"a96d61ac0572_58","name":"f225","type":"PRE","href":null,"layout":null,"metadata":null,"text":"ollama create llama3.1-cypher","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"lua"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_59":{"__typename":"Paragraph","id":"a96d61ac0572_59","name":"4856","type":"H4","href":null,"layout":null,"metadata":null,"text":"Starting the Server :","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_60":{"__typename":"Paragraph","id":"a96d61ac0572_60","name":"2b54","type":"P","href":null,"layout":null,"metadata":null,"text":"Here we start the Ollama server, which will make the fine-tuned model accessible via an API endpoint.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_61":{"__typename":"Paragraph","id":"a96d61ac0572_61","name":"81bb","type":"PRE","href":null,"layout":null,"metadata":null,"text":"ollama serve","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"typescript"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_62":{"__typename":"Paragraph","id":"a96d61ac0572_62","name":"0f3f","type":"H4","href":null,"layout":null,"metadata":null,"text":"Testing the API","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_63":{"__typename":"Paragraph","id":"a96d61ac0572_63","name":"f253","type":"P","href":null,"layout":null,"metadata":null,"text":"Here we demonstrate how to interact with the deployed model using the OpenAI API client. We initialize the client with the URL of the Ollama server and send a chat completion request with a natural language question.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_64":{"__typename":"Paragraph","id":"a96d61ac0572_64","name":"43c1","type":"PRE","href":null,"layout":null,"metadata":null,"text":"pip install openai","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"EXPLICIT","lang":"bash"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_65":{"__typename":"Paragraph","id":"a96d61ac0572_65","name":"ff82","type":"PRE","href":null,"layout":null,"metadata":null,"text":"\nfrom openai import OpenAI\n\nclient = OpenAI(\n base_url = 'http:\u002F\u002F127.0.0.1:11434\u002Fv1',\n api_key='ollama', # required, but unused\n)\n\nresponse = client.chat.completions.create(\n model=\"llama3.1-cypher\",\n messages=[\n {\"role\": \"user\", \"content\": \"Write your question here .. \"},\n ]\n)\nprint(response.choices[0].message.content)","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":{"__typename":"CodeBlockMetadata","mode":"AUTO","lang":"python"},"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_66":{"__typename":"Paragraph","id":"a96d61ac0572_66","name":"2362","type":"P","href":null,"layout":null,"metadata":null,"text":"The server will use the fine-tuned model to generate a Cypher query and return it as part of the API response.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_67":{"__typename":"Paragraph","id":"a96d61ac0572_67","name":"8f5a","type":"P","href":null,"layout":null,"metadata":null,"text":"With our Llama 3.1 model now fine-tuned and deployed as an OpenAI-compatible API endpoint, we possess a powerful tool for translating natural language questions into Cypher queries. This capability lays the groundwork for building a sophisticated question-answering system capable of extracting valuable insights from our graph database. In the final part of this series, we’ll explore how to integrate this fine-tuned model with a knowledge extraction component to create a comprehensive Q&A system that empowers users to interact with their data using natural language.","hasDropCap":null,"dropCapImage":null,"markups":[],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_68":{"__typename":"Paragraph","id":"a96d61ac0572_68","name":"2deb","type":"P","href":null,"layout":null,"metadata":null,"text":"My LinkedIn , HuggingFace","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":3,"end":11,"href":"https:\u002F\u002Fwww.linkedin.com\u002Fin\u002Fayoub-kirouane3","anchorType":"LINK","userId":null,"linkMetadata":null},{"__typename":"Markup","type":"A","start":14,"end":25,"href":"https:\u002F\u002Fhuggingface.co\u002Fayoubkirouane","anchorType":"LINK","userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":0,"end":11,"href":null,"anchorType":null,"userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":14,"end":25,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"Paragraph:a96d61ac0572_69":{"__typename":"Paragraph","id":"a96d61ac0572_69","name":"e45e","type":"P","href":null,"layout":null,"metadata":null,"text":"InfinitGraph website , Linkedin","hasDropCap":null,"dropCapImage":null,"markups":[{"__typename":"Markup","type":"A","start":13,"end":20,"href":"https:\u002F\u002Finfinitgraph.ai\u002F","anchorType":"LINK","userId":null,"linkMetadata":null},{"__typename":"Markup","type":"A","start":23,"end":31,"href":"https:\u002F\u002Fwww.linkedin.com\u002Fcompany\u002Finfinitgraph","anchorType":"LINK","userId":null,"linkMetadata":null},{"__typename":"Markup","type":"STRONG","start":0,"end":12,"href":null,"anchorType":null,"userId":null,"linkMetadata":null}],"codeBlockMetadata":null,"iframe":null,"mixtapeMetadata":null},"CollectionViewerEdge:collectionId:12ed059500cc-viewerId:lo_409b16ad0044":{"__typename":"CollectionViewerEdge","id":"collectionId:12ed059500cc-viewerId:lo_409b16ad0044","isEditor":false,"isMuting":false},"PostViewerEdge:postId:d48f58f8aefe-viewerId:lo_409b16ad0044":{"__typename":"PostViewerEdge","shouldIndexPostForExternalSearch":true,"id":"postId:d48f58f8aefe-viewerId:lo_409b16ad0044"},"Post:d48f58f8aefe":{"__typename":"Post","id":"d48f58f8aefe","collection":{"__ref":"Collection:12ed059500cc"},"content({\"postMeteringOptions\":{}})":{"__typename":"PostContent","isLockedPreviewOnly":false,"bodyModel":{"__typename":"RichText","sections":[{"__typename":"Section","name":"75c7","startIndex":0,"textLayout":null,"imageLayout":null,"backgroundImage":null,"videoLayout":null,"backgroundVideo":null},{"__typename":"Section","name":"a8f1","startIndex":67,"textLayout":null,"imageLayout":null,"backgroundImage":null,"videoLayout":null,"backgroundVideo":null},{"__typename":"Section","name":"9d8a","startIndex":68,"textLayout":null,"imageLayout":null,"backgroundImage":null,"videoLayout":null,"backgroundVideo":null}],"paragraphs":[{"__ref":"Paragraph:a96d61ac0572_0"},{"__ref":"Paragraph:a96d61ac0572_1"},{"__ref":"Paragraph:a96d61ac0572_2"},{"__ref":"Paragraph:a96d61ac0572_3"},{"__ref":"Paragraph:a96d61ac0572_4"},{"__ref":"Paragraph:a96d61ac0572_5"},{"__ref":"Paragraph:a96d61ac0572_6"},{"__ref":"Paragraph:a96d61ac0572_7"},{"__ref":"Paragraph:a96d61ac0572_8"},{"__ref":"Paragraph:a96d61ac0572_9"},{"__ref":"Paragraph:a96d61ac0572_10"},{"__ref":"Paragraph:a96d61ac0572_11"},{"__ref":"Paragraph:a96d61ac0572_12"},{"__ref":"Paragraph:a96d61ac0572_13"},{"__ref":"Paragraph:a96d61ac0572_14"},{"__ref":"Paragraph:a96d61ac0572_15"},{"__ref":"Paragraph:a96d61ac0572_16"},{"__ref":"Paragraph:a96d61ac0572_17"},{"__ref":"Paragraph:a96d61ac0572_18"},{"__ref":"Paragraph:a96d61ac0572_19"},{"__ref":"Paragraph:a96d61ac0572_20"},{"__ref":"Paragraph:a96d61ac0572_21"},{"__ref":"Paragraph:a96d61ac0572_22"},{"__ref":"Paragraph:a96d61ac0572_23"},{"__ref":"Paragraph:a96d61ac0572_24"},{"__ref":"Paragraph:a96d61ac0572_25"},{"__ref":"Paragraph:a96d61ac0572_26"},{"__ref":"Paragraph:a96d61ac0572_27"},{"__ref":"Paragraph:a96d61ac0572_28"},{"__ref":"Paragraph:a96d61ac0572_29"},{"__ref":"Paragraph:a96d61ac0572_30"},{"__ref":"Paragraph:a96d61ac0572_31"},{"__ref":"Paragraph:a96d61ac0572_32"},{"__ref":"Paragraph:a96d61ac0572_33"},{"__ref":"Paragraph:a96d61ac0572_34"},{"__ref":"Paragraph:a96d61ac0572_35"},{"__ref":"Paragraph:a96d61ac0572_36"},{"__ref":"Paragraph:a96d61ac0572_37"},{"__ref":"Paragraph:a96d61ac0572_38"},{"__ref":"Paragraph:a96d61ac0572_39"},{"__ref":"Paragraph:a96d61ac0572_40"},{"__ref":"Paragraph:a96d61ac0572_41"},{"__ref":"Paragraph:a96d61ac0572_42"},{"__ref":"Paragraph:a96d61ac0572_43"},{"__ref":"Paragraph:a96d61ac0572_44"},{"__ref":"Paragraph:a96d61ac0572_45"},{"__ref":"Paragraph:a96d61ac0572_46"},{"__ref":"Paragraph:a96d61ac0572_47"},{"__ref":"Paragraph:a96d61ac0572_48"},{"__ref":"Paragraph:a96d61ac0572_49"},{"__ref":"Paragraph:a96d61ac0572_50"},{"__ref":"Paragraph:a96d61ac0572_51"},{"__ref":"Paragraph:a96d61ac0572_52"},{"__ref":"Paragraph:a96d61ac0572_53"},{"__ref":"Paragraph:a96d61ac0572_54"},{"__ref":"Paragraph:a96d61ac0572_55"},{"__ref":"Paragraph:a96d61ac0572_56"},{"__ref":"Paragraph:a96d61ac0572_57"},{"__ref":"Paragraph:a96d61ac0572_58"},{"__ref":"Paragraph:a96d61ac0572_59"},{"__ref":"Paragraph:a96d61ac0572_60"},{"__ref":"Paragraph:a96d61ac0572_61"},{"__ref":"Paragraph:a96d61ac0572_62"},{"__ref":"Paragraph:a96d61ac0572_63"},{"__ref":"Paragraph:a96d61ac0572_64"},{"__ref":"Paragraph:a96d61ac0572_65"},{"__ref":"Paragraph:a96d61ac0572_66"},{"__ref":"Paragraph:a96d61ac0572_67"},{"__ref":"Paragraph:a96d61ac0572_68"},{"__ref":"Paragraph:a96d61ac0572_69"}]},"validatedShareKey":"","shareKeyCreator":null},"creator":{"__ref":"User:4751fd7878c5"},"inResponseToEntityType":null,"isLocked":false,"isMarkedPaywallOnly":false,"lockedSource":"LOCKED_POST_SOURCE_NONE","mediumUrl":"https:\u002F\u002Fmedium.com\u002Finfinitgraph\u002Fbuilding-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe","primaryTopic":null,"topics":[{"__typename":"Topic","slug":"machine-learning"},{"__typename":"Topic","slug":"programming"}],"isPublished":true,"latestPublishedVersion":"a96d61ac0572","visibility":"PUBLIC","postResponses":{"__typename":"PostResponses","count":0},"clapCount":55,"allowResponses":true,"isLimitedState":false,"title":"Building a robust GraphRAG System for a specific use case -Part Two-","isSeries":false,"sequence":null,"uniqueSlug":"building-a-robust-graphrag-system-for-a-specific-use-case-part-two-d48f58f8aefe","socialTitle":"","socialDek":"","canonicalUrl":"","metaDescription":"","latestPublishedAt":1726046189641,"readingTime":7.761320754716981,"previewContent":{"__typename":"PreviewContent","subtitle":"Part Two: The Fine-Tuning Process"},"previewImage":{"__ref":"ImageMetadata:0*P6kJ1FqVWiSWEnRd.png"},"isShortform":false,"seoTitle":"","firstPublishedAt":1726040766542,"updatedAt":1731785769394,"shortformType":"SHORTFORM_TYPE_LINK","seoDescription":"","viewerEdge":{"__ref":"PostViewerEdge:postId:d48f58f8aefe-viewerId:lo_409b16ad0044"},"isSuspended":false,"license":"ALL_RIGHTS_RESERVED","tags":[],"isNewsletter":false,"statusForCollection":"APPROVED","pendingCollection":null,"detectedLanguage":"en","wordCount":1911,"layerCake":0,"responsesLocked":false}}</script><script>window.__MIDDLEWARE_STATE__={"session":{"xsrf":""},"cache":{"cacheStatus":"MISS"}}</script><script src="https://cdn-client.medium.com/lite/static/js/manifest.aa9242f7.js"></script><script src="https://cdn-client.medium.com/lite/static/js/9865.1496d74a.js"></script><script src="https://cdn-client.medium.com/lite/static/js/main.e556b4ac.js"></script><script src="https://cdn-client.medium.com/lite/static/js/instrumentation.d9108df7.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/reporting.ff22a7a5.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/9120.5df29668.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/5049.d1ead72d.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/4810.6318add7.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/6618.db187378.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2707.b0942613.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/9977.5b3eb23a.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/8599.1ab63137.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/5250.9f9e01d2.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/5787.e66a3a4d.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2648.26563adf.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/8393.826a25fb.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/3104.c3413b66.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/3735.afb7e926.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/5642.8ad8a900.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/6546.cd03f950.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/6834.08de95de.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/7346.72622eb9.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2420.2a5e2d95.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/839.ca7937c2.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/7975.d195c6f1.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2106.21ff89d3.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/7394.094844de.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2961.00a48598.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/8204.c4082863.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/4391.59acaed3.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/PostPage.MainContent.1387c5dc.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/8414.6565ad5f.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/3974.8d3e0217.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/2527.a0afad8a.chunk.js"></script> <script src="https://cdn-client.medium.com/lite/static/js/PostResponsesContent.36c2ecf4.chunk.js"></script><script>window.main();</script><script>(function(){function c(){var b=a.contentDocument||a.contentWindow.document;if(b){var d=b.createElement('script');d.innerHTML="window.__CF$cv$params={r:'8e9551ab0c714a7d',t:'MTczMjc0NDA3MS4wMDAwMDA='};var a=document.createElement('script');a.nonce='';a.src='/cdn-cgi/challenge-platform/scripts/jsd/main.js';document.getElementsByTagName('head')[0].appendChild(a);";b.getElementsByTagName('head')[0].appendChild(d)}}if(document.body){var a=document.createElement('iframe');a.height=1;a.width=1;a.style.position='absolute';a.style.top=0;a.style.left=0;a.style.border='none';a.style.visibility='hidden';document.body.appendChild(a);if('loading'!==document.readyState)c();else if(window.addEventListener)document.addEventListener('DOMContentLoaded',c);else{var e=document.onreadystatechange||function(){};document.onreadystatechange=function(b){e(b);'loading'!==document.readyState&&(document.onreadystatechange=e,c())}}}})();</script></body></html>

Pages: 1 2 3 4 5 6 7 8 9 10