CINXE.COM
sppr 2022 - Accepted Papers
<!DOCTYPE html><html lang="en-US" itemscope itemtype="http://schema.org/WebPage"><head><meta charset="utf-8"><script nonce="-R1CwJytIrdmm_Hp_93n9Q">var DOCS_timing={}; DOCS_timing['sl']=new Date().getTime();</script><script nonce="-R1CwJytIrdmm_Hp_93n9Q">function _DumpException(e) {throw e;}</script><script data-id="_gd" nonce="-R1CwJytIrdmm_Hp_93n9Q">window.WIZ_global_data = {"K1cgmc":"%.@.[null,null,null,[null,1,[1732003404,595764000]]]]","nQyAE":{}};</script><script nonce="-R1CwJytIrdmm_Hp_93n9Q">_docs_flag_initialData={"atari-emtpr":false,"atari-eibrm":false,"docs-text-elei":false,"docs-text-usc":true,"atari-bae":false,"docs-text-emtps":true,"docs-text-etsrdpn":false,"docs-text-etsrds":false,"docs-text-endes":false,"docs-text-escpv":true,"docs-text-ecfs":false,"docs-text-ecis":true,"docs-text-eectfs":true,"docs-text-edctzs":true,"docs-text-eetxpc":false,"docs-text-eetxp":false,"docs-text-ertkmcp":true,"docs-text-ettctvs":false,"docs-text-ettts":true,"docs-text-escoubs":false,"docs-text-escivs":false,"docs-text-escitrbs":false,"docs-text-ecgvd":false,"docs-text-esbbcts":true,"docs-text-etccdts":false,"docs-text-etcchrs":false,"docs-text-etctrs":false,"docs-text-eltbbs":true,"docs-text-ecvdis":false,"docs-text-elaiabbs":false,"docs-text-eiosmc":false,"docs-text-ecslpo":false,"docs-text-etb":false,"docs-text-esbefr":false,"docs-text-ipi":false,"docs-etshc":false,"docs-text-tbcb":2.0E7,"docs-efsmsdl":false,"docs-text-etof":false,"docs-text-ehlb":false,"docs-text-epa":true,"docs-text-dwit":false,"docs-text-elawp":false,"docs-eec":false,"docs-ecot":"","docs-text-enbcr":false,"docs-sup":"","umss":false,"docs-eldi":false,"docs-dli":false,"docs-liap":"/logImpressions","ilcm":{"eui":"AHKXmL2SMhTVHFHe1thnrWO8lu2r6djXVwQI3ok-FyoxTcTGCgs_jHlbRYcnBHxyor5XHd39Mgu8","je":1,"sstu":1732340170010964,"si":"CNXi2oPe8YkDFZkQbwYdk2QIUw","gsc":null,"ei":[5703839,5704621,5706832,5706836,5707711,5737784,5737800,5738513,5738529,5740798,5740814,5743108,5743124,5747261,5748013,5748029,5752678,5752694,5753313,5753329,5754213,5754229,5755080,5755096,5758807,5758823,5762243,5762259,5764252,5764268,5765535,5765551,5766761,5766777,5773662,5773678,5774331,5774347,5774836,5774852,5776501,5776517,5784931,5784947,5784951,5784967,5791766,5791782,5796457,5796473,14101306,14101502,14101510,14101534,49372435,49372443,49375314,49375322,49472063,49472071,49622823,49622831,49623173,49623181,49643568,49643576,49644015,49644023,49769337,49769345,49822921,49822929,49823164,49823172,49833462,49833470,49842855,49842863,49924706,49924714,50221720,50221728,50266222,50266230,50273528,50273536,50297076,50297084,50297426,50297434,50498907,50498915,50529103,50529111,50561333,50561341,50586962,50586970,70971256,70971264,71035517,71035525,71038255,71038263,71079938,71079946,71085241,71085249,71185170,71185178,71197826,71197834,71238946,71238954,71289146,71289154,71387889,71387897,71429507,71429515,71478200,71478208,71478589,71478597,71502841,71502849,71528597,71528605,71530083,71530091,71544834,71544842,71545513,71545521,71546425,71546433,71560069,71560077,71561541,71561549,71573870,71573878,71642103,71642111,71652840,71652848,71658040,71658048,71659813,71659821,71689860,71689868,71699841,71699849,71720760,71721087,71721095,71733073,71733081,71798420,71798436,71798440,71798456,71849655,71849663,71882106,71882114,71897827,71897835,71960540,71960548,71961126,71961134,94353368,94353376,94390143,94390151,94397741,94397749,94413607,94413615,94420737,94420745,94434257,94434265,94435578,94435586,94444282,94444290,94484634,94484642,94489858,94489866,94502654,94502662,94526768,94526776,94545004,94545012,94597639,94597647,94630911,94661802,94661810,94707424,94707432,94784571,94784579,94875009,94875017,94904089,94904097,94929210,94929218,94942490,94942498,95065889,95065897,95087186,95087194,95112873,95112881,95118561,95118569,95135933,95135941,95234185,95234871,95234879,95251262,95251270,95254920,95254928,95266740,95266748,95270945,95270953,95271343,95271351,95314802,95314810,95317985,99237681,99237689,99247596,99247604,99310979,99310987,99338440,99338448,99368792,99368800,99401881,99401889,99402331,99402339,99437441,99437449,99460069,100130662,100130678,101406734,101406742,101442805,101442813,101456452,101456460,101488823,101488831,101489187,101489195,101507186,101507194,101519280,101519288,101544667,101544675,101606928,101606936,101617516,101617524,101631040,101631048,101705089,101708583,101708591,101771970,101771978,101776366,101776374,101783430,101783446],"crc":0,"cvi":[]},"docs-ccdil":false,"docs-eil":true,"info_params":{},"buildLabel":"editors.sites-viewer-frontend_20241112.02_p1","docs-show_debug_info":false,"atari-jefp":"/_/view/jserror","docs-jern":"view","atari-rhpp":"/_/view","docs-ecuach":false,"docs-cclt":2033,"docs-ecci":true,"docs-esi":false,"docs-efypr":true,"docs-eyprp":true}; _docs_flag_cek= null ; if (window['DOCS_timing']) {DOCS_timing['ifdld']=new Date().getTime();}</script><meta name="viewport" content="width=device-width, initial-scale=1"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta name="referrer" content="origin"><link rel="icon" href="https://lh3.googleusercontent.com/gDb1QI7OiN5jEvAU61kaiknhuPKfa4qyvZpUdWN0peeLW5AFvhYq1SqXoWPRUIHKEVi41-H4tlFU974GEiEE7_gd5CcIWmthNLcr-u-Ma_O3kQ"><meta property="og:title" content="sppr 2022 - Accepted Papers"><meta property="og:type" content="website"><meta property="og:url" content="https://sites.google.com/view/sppr-2022/accepted-papers"><meta property="og:description" content=" Accepted Papers Depression Detection using Machine and Deep Learning Models to Assess Mental Health of Social Media Users Smita Ghosh1, Sneha Ghosh2, Diptaraj Sen2, Pramita Das3, 1Department of Mathematics and Computer Science, Santa Clara University, California, USA, 2Department of Computer"><meta itemprop="name" content="sppr 2022 - Accepted Papers"><meta itemprop="description" content=" Accepted Papers Depression Detection using Machine and Deep Learning Models to Assess Mental Health of Social Media Users Smita Ghosh1, Sneha Ghosh2, Diptaraj Sen2, Pramita Das3, 1Department of Mathematics and Computer Science, Santa Clara University, California, USA, 2Department of Computer"><meta itemprop="url" content="https://sites.google.com/view/sppr-2022/accepted-papers"><meta itemprop="thumbnailUrl" content="https://lh6.googleusercontent.com/lWnc0LyRNC7ppGvzhtZyNToMWHay6_vSvgabGJZ0Vv06P0zUd1KNRjknhvXuXSzZIqStswLFknAJcU5vQbbqbM0=w16383"><meta itemprop="image" content="https://lh6.googleusercontent.com/lWnc0LyRNC7ppGvzhtZyNToMWHay6_vSvgabGJZ0Vv06P0zUd1KNRjknhvXuXSzZIqStswLFknAJcU5vQbbqbM0=w16383"><meta itemprop="imageUrl" content="https://lh6.googleusercontent.com/lWnc0LyRNC7ppGvzhtZyNToMWHay6_vSvgabGJZ0Vv06P0zUd1KNRjknhvXuXSzZIqStswLFknAJcU5vQbbqbM0=w16383"><meta property="og:image" content="https://lh6.googleusercontent.com/lWnc0LyRNC7ppGvzhtZyNToMWHay6_vSvgabGJZ0Vv06P0zUd1KNRjknhvXuXSzZIqStswLFknAJcU5vQbbqbM0=w16383"><link href="https://fonts.googleapis.com/css?family=Lato%3A300%2C300italic%2C400%2C400italic%2C700%2C700italic&display=swap" rel="stylesheet" nonce="sCzgJ16LSIMruEOJW4MZ9A"><link href="https://fonts.googleapis.com/css?family=Google+Sans:400,500|Roboto:300,400,500,700|Source+Code+Pro:400,700&display=swap" rel="stylesheet" nonce="sCzgJ16LSIMruEOJW4MZ9A"><link rel="stylesheet" href="https://www.gstatic.com/_/atari/_/ss/k=atari.vw.7R57rYYVGiA.L.X.O/am=MBiA/d=1/rs=AGEqA5nH97dOwqY0iGblvGlWiaR59owncA" data-id="_cl" nonce="sCzgJ16LSIMruEOJW4MZ9A"><script nonce="-R1CwJytIrdmm_Hp_93n9Q"></script><title>sppr 2022 - Accepted Papers</title><style jsname="ptDGoc" nonce="sCzgJ16LSIMruEOJW4MZ9A">.M63kCb{background-color: rgba(255,255,255,1);}.OUGEr{color: rgba(33,33,33,1);}.duRjpb .OUGEr{color: rgba(34,110,147,1);}.JYVBee .OUGEr{color: rgba(34,110,147,1);}.OmQG5e .OUGEr{color: rgba(33,33,33,1);}.iwQgFb{background-color: rgba(0,0,0,0.150000006);}.ySLm4c{font-family: Lato, sans-serif;}.CbiMKe{background-color: rgba(30,108,147,1);}.qeLZfd .zfr3Q{color: rgba(33,33,33,1);}.qeLZfd .qnVSj{color: rgba(33,33,33,1);}.qeLZfd .Glwbz{color: rgba(33,33,33,1);}.qeLZfd .duRjpb{color: rgba(34,110,147,1);}.qeLZfd .qLrapd{color: rgba(34,110,147,1);}.qeLZfd .JYVBee{color: rgba(34,110,147,1);}.qeLZfd .aHM7ed{color: rgba(34,110,147,1);}.qeLZfd .OmQG5e{color: rgba(33,33,33,1);}.qeLZfd .NHD4Gf{color: rgba(33,33,33,1);}.qeLZfd .aw5Odc{color: rgba(0,101,128,1);}.qeLZfd .dhtgD:hover{color: rgba(0,0,0,1);}.qeLZfd .dhtgD:visited{color: rgba(0,101,128,1);}.qeLZfd .iwQgFb{background-color: rgba(0,0,0,0.150000006);}.qeLZfd .OUGEr{color: rgba(33,33,33,1);}.qeLZfd .duRjpb .OUGEr{color: rgba(34,110,147,1);}.qeLZfd .JYVBee .OUGEr{color: rgba(34,110,147,1);}.qeLZfd .OmQG5e .OUGEr{color: rgba(33,33,33,1);}.qeLZfd:before{background-color: rgba(242,242,242,1); display: block;}.lQAHbd .zfr3Q{color: rgba(255,255,255,1);}.lQAHbd .qnVSj{color: rgba(255,255,255,1);}.lQAHbd .Glwbz{color: rgba(255,255,255,1);}.lQAHbd .duRjpb{color: rgba(255,255,255,1);}.lQAHbd .qLrapd{color: rgba(255,255,255,1);}.lQAHbd .JYVBee{color: rgba(255,255,255,1);}.lQAHbd .aHM7ed{color: rgba(255,255,255,1);}.lQAHbd .OmQG5e{color: rgba(255,255,255,1);}.lQAHbd .NHD4Gf{color: rgba(255,255,255,1);}.lQAHbd .aw5Odc{color: rgba(255,255,255,1);}.lQAHbd .dhtgD:hover{color: rgba(255,255,255,1);}.lQAHbd .dhtgD:visited{color: rgba(255,255,255,1);}.lQAHbd .iwQgFb{background-color: rgba(255,255,255,0.150000006);}.lQAHbd .OUGEr{color: rgba(255,255,255,1);}.lQAHbd .duRjpb .OUGEr{color: rgba(255,255,255,1);}.lQAHbd .JYVBee .OUGEr{color: rgba(255,255,255,1);}.lQAHbd .OmQG5e .OUGEr{color: rgba(255,255,255,1);}.lQAHbd .CbiMKe{background-color: rgba(255,255,255,1);}.lQAHbd:before{background-color: rgba(30,108,147,1); display: block;}.cJgDec .zfr3Q{color: rgba(255,255,255,1);}.cJgDec .zfr3Q .OUGEr{color: rgba(255,255,255,1);}.cJgDec .qnVSj{color: rgba(255,255,255,1);}.cJgDec .Glwbz{color: rgba(255,255,255,1);}.cJgDec .qLrapd{color: rgba(255,255,255,1);}.cJgDec .aHM7ed{color: rgba(255,255,255,1);}.cJgDec .NHD4Gf{color: rgba(255,255,255,1);}.cJgDec .IFuOkc:before{background-color: rgba(33,33,33,1); opacity: 0; display: block;}.O13XJf{height: 340px; padding-bottom: 60px; padding-top: 60px;}.O13XJf .IFuOkc{background-color: rgba(34,110,147,1); background-image: url(https://ssl.gstatic.com/atari/images/simple-header-blended-small.png);}.O13XJf .IFuOkc:before{background-color: rgba(33,33,33,1); opacity: 0.4; display: block;}.O13XJf .zfr3Q{color: rgba(255,255,255,1);}.O13XJf .qnVSj{color: rgba(255,255,255,1);}.O13XJf .Glwbz{color: rgba(255,255,255,1);}.O13XJf .duRjpb{color: rgba(255,255,255,1);}.O13XJf .qLrapd{color: rgba(255,255,255,1);}.O13XJf .JYVBee{color: rgba(255,255,255,1);}.O13XJf .aHM7ed{color: rgba(255,255,255,1);}.O13XJf .OmQG5e{color: rgba(255,255,255,1);}.O13XJf .NHD4Gf{color: rgba(255,255,255,1);}.tpmmCb .zfr3Q{color: rgba(33,33,33,1);}.tpmmCb .zfr3Q .OUGEr{color: rgba(33,33,33,1);}.tpmmCb .qnVSj{color: rgba(33,33,33,1);}.tpmmCb .Glwbz{color: rgba(33,33,33,1);}.tpmmCb .qLrapd{color: rgba(33,33,33,1);}.tpmmCb .aHM7ed{color: rgba(33,33,33,1);}.tpmmCb .NHD4Gf{color: rgba(33,33,33,1);}.tpmmCb .IFuOkc:before{background-color: rgba(255,255,255,1); display: block;}.tpmmCb .Wew9ke{fill: rgba(33,33,33,1);}.aw5Odc{color: rgba(0,101,128,1);}.dhtgD:hover{color: rgba(0,122,147,1);}.dhtgD:active{color: rgba(0,122,147,1);}.dhtgD:visited{color: rgba(0,101,128,1);}.Zjiec{color: rgba(255,255,255,1); font-family: Lato, sans-serif; font-size: 19pt; font-weight: 300; letter-spacing: 1px; line-height: 1.3; padding-bottom: 62.5px; padding-left: 48px; padding-right: 36px; padding-top: 11.5px;}.XMyrgf{margin-top: 0px; margin-left: 48px; margin-bottom: 24px; margin-right: 24px;}.TlfmSc{color: rgba(255,255,255,1); font-family: Lato, sans-serif; font-size: 15pt; font-weight: 300; line-height: 1.333;}.Mz8gvb{color: rgba(255,255,255,1);}.zDUgLc{background-color: rgba(33,33,33,1);}.QTKDff.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}.YTv4We{color: rgba(178,178,178,1);}.YTv4We:hover:before{background-color: rgba(255,255,255,0.1199999973); display: block;}.YTv4We.chg4Jd:focus:before{border-color: rgba(255,255,255,0.3600000143); display: block;}.eWDljc{background-color: rgba(33,33,33,1);}.eWDljc .hDrhEe{padding-left: 8px;}.ZXW7w{color: rgba(255,255,255,1); opacity: 0.26;}.PsKE7e{color: rgba(255,255,255,1); font-family: Lato, sans-serif; font-size: 12pt; font-weight: 300;}.lhZOrc{color: rgba(73,170,212,1);}.hDrhEe:hover{color: rgba(73,170,212,1);}.M9vuGd{color: rgba(73,170,212,1); font-weight: 400;}.jgXgSe:hover{color: rgba(73,170,212,1);}.j10yRb:hover{color: rgba(0,188,212,1);}.j10yRb.chg4Jd:focus:before{border-color: rgba(255,255,255,0.3600000143); display: block;}.tCHXDc{color: rgba(255,255,255,1);}.iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}.wgxiMe{background-color: rgba(33,33,33,1);}.fOU46b .TlfmSc{color: rgba(255,255,255,1);}.fOU46b .KJll8d{background-color: rgba(255,255,255,1);}.fOU46b .Mz8gvb{color: rgba(255,255,255,1);}.fOU46b .Mz8gvb.chg4Jd:focus:before{border-color: rgba(255,255,255,1); display: block;}.fOU46b .qV4dIc{color: rgba(255,255,255,0.8700000048);}.fOU46b .jgXgSe:hover{color: rgba(255,255,255,1);}.fOU46b .M9vuGd{color: rgba(255,255,255,1);}.fOU46b .tCHXDc{color: rgba(255,255,255,0.8700000048);}.fOU46b .iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}.fOU46b .G8QRnc .Mz8gvb{color: rgba(0,0,0,0.8000000119);}.fOU46b .G8QRnc .Mz8gvb.chg4Jd:focus:before{border-color: rgba(0,0,0,0.8000000119); display: block;}.fOU46b .G8QRnc .ZXW7w{color: rgba(0,0,0,0.8000000119);}.fOU46b .G8QRnc .TlfmSc{color: rgba(0,0,0,0.8000000119);}.fOU46b .G8QRnc .KJll8d{background-color: rgba(0,0,0,0.8000000119);}.fOU46b .G8QRnc .qV4dIc{color: rgba(0,0,0,0.6399999857);}.fOU46b .G8QRnc .jgXgSe:hover{color: rgba(0,0,0,0.8199999928);}.fOU46b .G8QRnc .M9vuGd{color: rgba(0,0,0,0.8199999928);}.fOU46b .G8QRnc .tCHXDc{color: rgba(0,0,0,0.6399999857);}.fOU46b .G8QRnc .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.fOU46b .usN8rf .Mz8gvb{color: rgba(0,0,0,0.8000000119);}.fOU46b .usN8rf .Mz8gvb.chg4Jd:focus:before{border-color: rgba(0,0,0,0.8000000119); display: block;}.fOU46b .usN8rf .ZXW7w{color: rgba(0,0,0,0.8000000119);}.fOU46b .usN8rf .TlfmSc{color: rgba(0,0,0,0.8000000119);}.fOU46b .usN8rf .KJll8d{background-color: rgba(0,0,0,0.8000000119);}.fOU46b .usN8rf .qV4dIc{color: rgba(0,0,0,0.6399999857);}.fOU46b .usN8rf .jgXgSe:hover{color: rgba(0,0,0,0.8199999928);}.fOU46b .usN8rf .M9vuGd{color: rgba(0,0,0,0.8199999928);}.fOU46b .usN8rf .tCHXDc{color: rgba(0,0,0,0.6399999857);}.fOU46b .usN8rf .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.fOU46b .aCIEDd .qV4dIc{color: rgba(33,33,33,1);}.fOU46b .aCIEDd .TlfmSc{color: rgba(33,33,33,1);}.fOU46b .aCIEDd .KJll8d{background-color: rgba(33,33,33,1);}.fOU46b .aCIEDd .ZXW7w{color: rgba(33,33,33,1);}.fOU46b .aCIEDd .jgXgSe:hover{color: rgba(33,33,33,1); opacity: 0.82;}.fOU46b .aCIEDd .Mz8gvb{color: rgba(33,33,33,1);}.fOU46b .aCIEDd .tCHXDc{color: rgba(33,33,33,1);}.fOU46b .aCIEDd .iWs3gf.chg4Jd:focus{background-color: rgba(33,33,33,0.1199999973);}.fOU46b .a3ETed .qV4dIc{color: rgba(255,255,255,1);}.fOU46b .a3ETed .TlfmSc{color: rgba(255,255,255,1);}.fOU46b .a3ETed .KJll8d{background-color: rgba(255,255,255,1);}.fOU46b .a3ETed .ZXW7w{color: rgba(255,255,255,1);}.fOU46b .a3ETed .jgXgSe:hover{color: rgba(255,255,255,1); opacity: 0.82;}.fOU46b .a3ETed .Mz8gvb{color: rgba(255,255,255,1);}.fOU46b .a3ETed .tCHXDc{color: rgba(255,255,255,1);}.fOU46b .a3ETed .iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}@media only screen and (min-width: 1280px){.XeSM4.b2Iqye.fOU46b .LBrwzc .tCHXDc{color: rgba(255,255,255,0.8700000048);}}.XeSM4.b2Iqye.fOU46b .LBrwzc .iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}@media only screen and (min-width: 1280px){.KuNac.b2Iqye.fOU46b .tCHXDc{color: rgba(0,0,0,0.6399999857);}}.KuNac.b2Iqye.fOU46b .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.fOU46b .zDUgLc{opacity: 0;}.LBrwzc .ZXW7w{color: rgba(0,0,0,1);}.LBrwzc .KJll8d{background-color: rgba(0,0,0,1);}.GBy4H .ZXW7w{color: rgba(255,255,255,1);}.GBy4H .KJll8d{background-color: rgba(255,255,255,1);}.eBSUbc{background-color: rgba(33,33,33,1); color: rgba(0,188,212,0.6999999881);}.BFDQOb:hover{color: rgba(73,170,212,1);}.ImnMyf{background-color: rgba(255,255,255,1); color: rgba(33,33,33,1);}.Vs12Bd{background-color: rgba(242,242,242,1); color: rgba(33,33,33,1);}.S5d9Rd{background-color: rgba(30,108,147,1); color: rgba(255,255,255,1);}.zfr3Q{color: rgba(33,33,33,1); font-family: Lato, sans-serif; font-size: 11pt; font-weight: 400; line-height: 1.6667; margin-top: 12px;}.qnVSj{color: rgba(33,33,33,1);}.Glwbz{color: rgba(33,33,33,1);}.duRjpb{color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 34pt; font-weight: 300; letter-spacing: 0.5px; line-height: 1.2; margin-top: 30px;}.Ap4VC{margin-bottom: -30px;}.qLrapd{color: rgba(34,110,147,1);}.JYVBee{color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 19pt; font-weight: 400; line-height: 1.4; margin-top: 20px;}.CobnVe{margin-bottom: -20px;}.aHM7ed{color: rgba(34,110,147,1);}.OmQG5e{color: rgba(33,33,33,1); font-family: Lato, sans-serif; font-size: 15pt; font-style: normal; font-weight: 400; line-height: 1.25; margin-top: 16px;}.GV3q8e{margin-bottom: -16px;}.NHD4Gf{color: rgba(33,33,33,1);}.LB7kq .duRjpb{font-size: 64pt; letter-spacing: 2px; line-height: 1; margin-top: 40px;}.LB7kq .JYVBee{font-size: 25pt; font-weight: 300; line-height: 1.1; margin-top: 25px;}@media only screen and (max-width: 479px){.LB7kq .duRjpb{font-size: 40pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.LB7kq .duRjpb{font-size: 53pt;}}@media only screen and (max-width: 479px){.LB7kq .JYVBee{font-size: 19pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.LB7kq .JYVBee{font-size: 22pt;}}.O13XJf{height: 340px; padding-bottom: 60px; padding-top: 60px;}@media only screen and (min-width: 480px) and (max-width: 767px){.O13XJf{height: 280px; padding-bottom: 40px; padding-top: 40px;}}@media only screen and (max-width: 479px){.O13XJf{height: 250px; padding-bottom: 30px; padding-top: 30px;}}.SBrW1{height: 520px;}@media only screen and (min-width: 480px) and (max-width: 767px){.SBrW1{height: 520px;}}@media only screen and (max-width: 479px){.SBrW1{height: 400px;}}.Wew9ke{fill: rgba(255,255,255,1);}.gk8rDe{height: 180px; padding-bottom: 32px; padding-top: 60px;}.gk8rDe .zfr3Q{color: rgba(0,0,0,1);}.gk8rDe .duRjpb{color: rgba(34,110,147,1); font-size: 45pt; line-height: 1.1;}.gk8rDe .qLrapd{color: rgba(34,110,147,1);}.gk8rDe .JYVBee{color: rgba(34,110,147,1); font-size: 27pt; line-height: 1.35; margin-top: 15px;}.gk8rDe .aHM7ed{color: rgba(34,110,147,1);}.gk8rDe .OmQG5e{color: rgba(33,33,33,1);}.gk8rDe .NHD4Gf{color: rgba(33,33,33,1);}@media only screen and (max-width: 479px){.gk8rDe .duRjpb{font-size: 30pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.gk8rDe .duRjpb{font-size: 38pt;}}@media only screen and (max-width: 479px){.gk8rDe .JYVBee{font-size: 20pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.gk8rDe .JYVBee{font-size: 24pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.gk8rDe{padding-top: 45px;}}@media only screen and (max-width: 479px){.gk8rDe{padding-bottom: 0px; padding-top: 30px;}}.dhtgD{text-decoration: underline;}.JzO0Vc{background-color: rgba(33,33,33,1); font-family: Lato, sans-serif; width: 250px;}@media only screen and (min-width: 1280px){.JzO0Vc{padding-top: 48.5px;}}.TlfmSc{font-family: Lato, sans-serif; font-size: 15pt; font-weight: 300; line-height: 1.333;}.PsKE7e{font-family: Lato, sans-serif; font-size: 12pt;}.IKA38e{line-height: 1.21;}.hDrhEe{padding-bottom: 11.5px; padding-top: 11.5px;}.zDUgLc{opacity: 1;}.QmpIrf{background-color: rgba(30,108,147,1); border-color: rgba(255,255,255,1); color: rgba(255,255,255,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.xkUom{border-color: rgba(30,108,147,1); color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.xkUom:hover{background-color: rgba(30,108,147,0.1000000015);}.KjwKmc{color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal; line-height: normal;}.KjwKmc:hover{background-color: rgba(30,108,147,0.1000000015);}.lQAHbd .QmpIrf{background-color: rgba(255,255,255,1); border-color: rgba(34,110,147,1); color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.lQAHbd .xkUom{border-color: rgba(242,242,242,1); color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.lQAHbd .xkUom:hover{background-color: rgba(255,255,255,0.1000000015);}.lQAHbd .KjwKmc{color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.lQAHbd .KjwKmc:hover{background-color: rgba(255,255,255,0.1000000015);}.lQAHbd .Mt0nFe{border-color: rgba(255,255,255,0.200000003);}.cJgDec .QmpIrf{background-color: rgba(255,255,255,1); border-color: rgba(34,110,147,1); color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.cJgDec .xkUom{border-color: rgba(242,242,242,1); color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.cJgDec .xkUom:hover{background-color: rgba(255,255,255,0.1000000015);}.cJgDec .KjwKmc{color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.cJgDec .KjwKmc:hover{background-color: rgba(255,255,255,0.1000000015);}.tpmmCb .QmpIrf{background-color: rgba(255,255,255,1); border-color: rgba(34,110,147,1); color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.tpmmCb .xkUom{border-color: rgba(30,108,147,1); color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.tpmmCb .xkUom:hover{background-color: rgba(30,108,147,0.1000000015);}.tpmmCb .KjwKmc{color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.tpmmCb .KjwKmc:hover{background-color: rgba(30,108,147,0.1000000015);}.gk8rDe .QmpIrf{background-color: rgba(30,108,147,1); border-color: rgba(255,255,255,1); color: rgba(255,255,255,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.gk8rDe .xkUom{border-color: rgba(30,108,147,1); color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.gk8rDe .xkUom:hover{background-color: rgba(30,108,147,0.1000000015);}.gk8rDe .KjwKmc{color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.gk8rDe .KjwKmc:hover{background-color: rgba(30,108,147,0.1000000015);}.O13XJf .QmpIrf{background-color: rgba(255,255,255,1); border-color: rgba(34,110,147,1); color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.O13XJf .xkUom{border-color: rgba(242,242,242,1); color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.O13XJf .xkUom:hover{background-color: rgba(255,255,255,0.1000000015);}.O13XJf .KjwKmc{color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.O13XJf .KjwKmc:hover{background-color: rgba(255,255,255,0.1000000015);}.Y4CpGd{font-family: Lato, sans-serif; font-size: 11pt;}.CMArNe{background-color: rgba(242,242,242,1);}.LBrwzc .TlfmSc{color: rgba(0,0,0,0.8000000119);}.LBrwzc .YTv4We{color: rgba(0,0,0,0.6399999857);}.LBrwzc .YTv4We.chg4Jd:focus:before{border-color: rgba(0,0,0,0.6399999857); display: block;}.LBrwzc .Mz8gvb{color: rgba(0,0,0,0.6399999857);}.LBrwzc .tCHXDc{color: rgba(0,0,0,0.6399999857);}.LBrwzc .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.LBrwzc .wgxiMe{background-color: rgba(255,255,255,1);}.LBrwzc .qV4dIc{color: rgba(0,0,0,0.6399999857);}.LBrwzc .M9vuGd{color: rgba(0,0,0,0.8000000119); font-weight: bold;}.LBrwzc .Zjiec{color: rgba(0,0,0,0.8000000119);}.LBrwzc .IKA38e{color: rgba(0,0,0,0.6399999857);}.LBrwzc .lhZOrc.IKA38e{color: rgba(0,0,0,0.8000000119); font-weight: bold;}.LBrwzc .j10yRb:hover{color: rgba(0,0,0,0.8000000119);}.LBrwzc .eBSUbc{color: rgba(0,0,0,0.8000000119);}.LBrwzc .hDrhEe:hover{color: rgba(0,0,0,0.8000000119);}.LBrwzc .jgXgSe:hover{color: rgba(0,0,0,0.8000000119);}.LBrwzc .M9vuGd:hover{color: rgba(0,0,0,0.8000000119);}.LBrwzc .zDUgLc{border-bottom-color: rgba(204,204,204,1); border-bottom-width: 1px; border-bottom-style: solid;}.fOU46b .LBrwzc .M9vuGd{color: rgba(0,0,0,0.8000000119);}.fOU46b .LBrwzc .jgXgSe:hover{color: rgba(0,0,0,0.8000000119);}.fOU46b .LBrwzc .zDUgLc{opacity: 1; border-bottom-style: none;}.fOU46b .LBrwzc .tCHXDc{color: rgba(0,0,0,0.6399999857);}.fOU46b .LBrwzc .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.fOU46b .GBy4H .M9vuGd{color: rgba(255,255,255,1);}.fOU46b .GBy4H .jgXgSe:hover{color: rgba(255,255,255,1);}.fOU46b .GBy4H .zDUgLc{opacity: 1;}.fOU46b .GBy4H .tCHXDc{color: rgba(255,255,255,0.8700000048);}.fOU46b .GBy4H .iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}.XeSM4.G9Qloe.fOU46b .LBrwzc .tCHXDc{color: rgba(0,0,0,0.6399999857);}.XeSM4.G9Qloe.fOU46b .LBrwzc .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.GBy4H .lhZOrc.IKA38e{color: rgba(255,255,255,1);}.GBy4H .eBSUbc{color: rgba(255,255,255,0.8700000048);}.GBy4H .hDrhEe:hover{color: rgba(255,255,255,1);}.GBy4H .j10yRb:hover{color: rgba(255,255,255,1);}.GBy4H .YTv4We{color: rgba(255,255,255,1);}.GBy4H .YTv4We.chg4Jd:focus:before{border-color: rgba(255,255,255,1); display: block;}.GBy4H .tCHXDc{color: rgba(255,255,255,0.8700000048);}.GBy4H .iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}.GBy4H .jgXgSe:hover{color: rgba(255,255,255,1);}.GBy4H .jgXgSe:hover{color: rgba(255,255,255,1);}.GBy4H .M9vuGd{color: rgba(255,255,255,1);}.GBy4H .M9vuGd:hover{color: rgba(255,255,255,1);}.QcmuFb{padding-left: 20px;}.vDPrib{padding-left: 40px;}.TBDXjd{padding-left: 60px;}.bYeK8e{padding-left: 80px;}.CuqSDe{padding-left: 100px;}.Havqpe{padding-left: 120px;}.JvDrRe{padding-left: 140px;}.o5lrIf{padding-left: 160px;}.yOJW7c{padding-left: 180px;}.rB8cye{padding-left: 200px;}.RuayVd{padding-right: 20px;}.YzcKX{padding-right: 40px;}.reTV0b{padding-right: 60px;}.vSYeUc{padding-right: 80px;}.PxtZIe{padding-right: 100px;}.ahQMed{padding-right: 120px;}.rzhcXb{padding-right: 140px;}.PBhj0b{padding-right: 160px;}.TlN46c{padding-right: 180px;}.GEdNnc{padding-right: 200px;}.TMjjoe{font-family: Lato, sans-serif; font-size: 9pt; line-height: 1.2; margin-top: 0px;}@media only screen and (min-width: 1280px){.yxgWrb{margin-left: 250px;}}@media only screen and (max-width: 479px){.Zjiec{font-size: 15pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.Zjiec{font-size: 17pt;}}@media only screen and (max-width: 479px){.TlfmSc{font-size: 13pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.TlfmSc{font-size: 14pt;}}@media only screen and (max-width: 479px){.PsKE7e{font-size: 12pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.PsKE7e{font-size: 12pt;}}@media only screen and (max-width: 479px){.duRjpb{font-size: 24pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.duRjpb{font-size: 29pt;}}@media only screen and (max-width: 479px){.JYVBee{font-size: 15pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.JYVBee{font-size: 17pt;}}@media only screen and (max-width: 479px){.OmQG5e{font-size: 13pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.OmQG5e{font-size: 14pt;}}@media only screen and (max-width: 479px){.TlfmSc{font-size: 13pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.TlfmSc{font-size: 14pt;}}@media only screen and (max-width: 479px){.PsKE7e{font-size: 12pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.PsKE7e{font-size: 12pt;}}@media only screen and (max-width: 479px){.TMjjoe{font-size: 9pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.TMjjoe{font-size: 9pt;}}section[id="h.33e574b64c58fbdc_11"] .IFuOkc:before{opacity: 0.6;}section[id="h.33e574b64c58fbdc_227"] .IFuOkc:before{opacity: 0.2;}section[id="h.33e574b64c58fbdc_48"] .IFuOkc:before{opacity: 0.1;}</style><script nonce="-R1CwJytIrdmm_Hp_93n9Q">_at_config = [null,"AIzaSyChg3MFqzdi1P5J-YvEyakkSA1yU7HRcDI","897606708560-a63d8ia0t9dhtpdt4i3djab2m42see7o.apps.googleusercontent.com",null,null,null,null,null,null,null,null,null,null,null,"SITES_%s",null,null,null,null,null,null,null,null,null,["AHKXmL2SMhTVHFHe1thnrWO8lu2r6djXVwQI3ok-FyoxTcTGCgs_jHlbRYcnBHxyor5XHd39Mgu8",1,"CNXi2oPe8YkDFZkQbwYdk2QIUw",1732340170010964,[5703839,5704621,5706832,5706836,5707711,5737784,5737800,5738513,5738529,5740798,5740814,5743108,5743124,5747261,5748013,5748029,5752678,5752694,5753313,5753329,5754213,5754229,5755080,5755096,5758807,5758823,5762243,5762259,5764252,5764268,5765535,5765551,5766761,5766777,5773662,5773678,5774331,5774347,5774836,5774852,5776501,5776517,5784931,5784947,5784951,5784967,5791766,5791782,5796457,5796473,14101306,14101502,14101510,14101534,49372435,49372443,49375314,49375322,49472063,49472071,49622823,49622831,49623173,49623181,49643568,49643576,49644015,49644023,49769337,49769345,49822921,49822929,49823164,49823172,49833462,49833470,49842855,49842863,49924706,49924714,50221720,50221728,50266222,50266230,50273528,50273536,50297076,50297084,50297426,50297434,50498907,50498915,50529103,50529111,50561333,50561341,50586962,50586970,70971256,70971264,71035517,71035525,71038255,71038263,71079938,71079946,71085241,71085249,71185170,71185178,71197826,71197834,71238946,71238954,71289146,71289154,71387889,71387897,71429507,71429515,71478200,71478208,71478589,71478597,71502841,71502849,71528597,71528605,71530083,71530091,71544834,71544842,71545513,71545521,71546425,71546433,71560069,71560077,71561541,71561549,71573870,71573878,71642103,71642111,71652840,71652848,71658040,71658048,71659813,71659821,71689860,71689868,71699841,71699849,71720760,71721087,71721095,71733073,71733081,71798420,71798436,71798440,71798456,71849655,71849663,71882106,71882114,71897827,71897835,71960540,71960548,71961126,71961134,94353368,94353376,94390143,94390151,94397741,94397749,94413607,94413615,94420737,94420745,94434257,94434265,94435578,94435586,94444282,94444290,94484634,94484642,94489858,94489866,94502654,94502662,94526768,94526776,94545004,94545012,94597639,94597647,94630911,94661802,94661810,94707424,94707432,94784571,94784579,94875009,94875017,94904089,94904097,94929210,94929218,94942490,94942498,95065889,95065897,95087186,95087194,95112873,95112881,95118561,95118569,95135933,95135941,95234185,95234871,95234879,95251262,95251270,95254920,95254928,95266740,95266748,95270945,95270953,95271343,95271351,95314802,95314810,95317985,99237681,99237689,99247596,99247604,99310979,99310987,99338440,99338448,99368792,99368800,99401881,99401889,99402331,99402339,99437441,99437449,99460069,100130662,100130678,101406734,101406742,101442805,101442813,101456452,101456460,101488823,101488831,101489187,101489195,101507186,101507194,101519280,101519288,101544667,101544675,101606928,101606936,101617516,101617524,101631040,101631048,101705089,101708583,101708591,101771970,101771978,101776366,101776374,101783430,101783446]],null,null,null,null,0,null,null,null,null,null,null,null,null,null,"https://drive.google.com",null,null,null,null,null,null,null,null,null,0,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,"v2internal","https://docs.google.com",null,null,null,null,null,null,"https://sites.google.com/new/",null,null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,"",null,null,null,null,null,null,null,null,null,null,null,null,6,null,null,"https://accounts.google.com/o/oauth2/auth","https://accounts.google.com/o/oauth2/postmessageRelay",null,null,null,null,78,"https://sites.google.com/new/?usp\u003dviewer_footer",null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,"https://www.gstatic.com/atari/embeds/83a60601c213b72fb19c1855fb0c5f26/intermediate-frame-minified.html",0,null,"v2beta",null,null,null,null,null,null,4,"https://accounts.google.com/o/oauth2/iframe",null,null,null,null,null,null,"https://339251111-atari-embeds.googleusercontent.com/embeds/16cb204cf3a9d4d223a0a3fd8b0eec5d/inner-frame-minified.html",null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,"https://sites.google.com/view/sppr-2022/accepted-papers",null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,0,null,null,null,null,null,null,0,null,"",null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,null,null,[1732340170011,"editors.sites-viewer-frontend_20241112.02_p1","695977640",null,1,1,""],null,null,null,null,0,null,null,0,null,null,null,null,null,null,null,null,20,500,"https://domains.google.com",null,0,null,null,null,null,null,null,null,null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,1,0,1,0,0,0,0,null,null,null,null,null,"https://www.google.com/calendar/embed",null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,null,null,null,"PROD",0,null,0,null,1]; window.globals = {"enableAnalytics":true,"webPropertyId":"","showDebug":false,"hashedSiteId":"9a0bde1eeac560416c183e4363724a8af03275213b2294fc934535ca84b23969","normalizedPath":"view/sppr-2022/accepted-papers","pageTitle":"Accepted Papers"}; function gapiLoaded() {if (globals.gapiLoaded == undefined) {globals.gapiLoaded = true;} else {globals.gapiLoaded();}}window.messages = []; window.addEventListener && window.addEventListener('message', function(e) {if (window.messages && e.data && e.data.magic == 'SHIC') {window.messages.push(e);}});</script><script src="https://apis.google.com/js/client.js?onload=gapiLoaded" nonce="-R1CwJytIrdmm_Hp_93n9Q"></script><script nonce="-R1CwJytIrdmm_Hp_93n9Q">(function(){}).call(this); </script><script nonce="-R1CwJytIrdmm_Hp_93n9Q">const imageUrl = 'https:\/\/lh5.googleusercontent.com\/ojgjvcj3KLOBHR-XNDFxvmn_RE7y1kvhxrmBfL5yARcVCs7R1CX7KsAWsg-sWEYFKAqgDMxbOz4sp3tZu8kBi2c\x3dw16383'; function bgImgLoaded() { if (!globals.headerBgImgLoaded) { globals.headerBgImgLoaded = new Date().getTime(); } else { globals.headerBgImgLoaded(); } } if (imageUrl) { const img = new Image(); img.src = imageUrl; img.onload = bgImgLoaded; globals.headerBgImgExists = true; } else { globals.headerBgImgExists = false; } </script></head><body dir="ltr" itemscope itemtype="http://schema.org/WebPage" id="yDmH0d" css="yDmH0d"><div jscontroller="pc62j" jsmodel="iTeaXe" jsaction="rcuQ6b:WYd;GvneHb:og1FDd;vbaUQc:uAM5ec;"><div id="docs-banner-container"><div id="docs-banners"><div id="HB1eCd-mzNpsf-r8s4j-ORHb"></div><div id="HB1eCd-TZk80d-r8s4j-ORHb" aria-live="assertive" aria-atomic="true"></div></div><div class="HB1eCd-Vkfede-NBtyUd-PvRhvb-LwH6nd"></div></div><div jscontroller="X4BaPc" jsaction="rcuQ6b:WYd;o6xM5b:Pg9eo;HuL2Hd:mHeCvf;VMhF5:FFYy5e;sk3Qmb:HI1Mdd;JIbuQc:rSzFEd(z2EeY),aSaF6e(ilzYPe);"><div jscontroller="o1L5Wb" data-sitename="sppr-2022" data-search-scope="1" data-universe="1" jsmodel="fNFZH" jsaction="Pe9H6d:cZFEp;WMZaJ:VsGN3;hJluRd:UADL7b;zuqEgd:HI9w0;tr6QDd:Y8aXB;MxH79b:xDkBfb;JIbuQc:SPXMTb(uxAMZ),LjG1Ed(a6mxbb);" jsname="G0jgYd"><div jsname="gYwusb" class="p9b27"></div><div jscontroller="RrXLpc" jsname="XeeWQc" role="banner" jsaction="keydown:uiKYid(OH0EC);rcuQ6b:WYd;zuqEgd:ufqpf;JIbuQc:XfTnxb(lfEfFf),AlTiYc(GeGHKb),AlTiYc(m1xNUe),zZlNMe(pZn8Oc);YqO5N:ELcyfe;"><div jsname="bF1uUb" class="BuY5Fd" jsaction="click:xVuwSc;"></div><div jsname="MVsrn" class="TbNlJb "><div role="button" class="U26fgb mUbCce fKz7Od h3nfre M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="GeGHKb" aria-label="Back to site" aria-disabled="false" tabindex="0" data-tooltip="Back to site" data-tooltip-vertical-offset="-12" data-tooltip-horizontal-offset="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="V4YR2c" viewBox="0 0 24 24" focusable="false"><path d="M0 0h24v24H0z" fill="none"/><path d="M20 11H7.83l5.59-5.59L12 4l-8 8 8 8 1.41-1.41L7.83 13H20v-2z"/></svg></span></span></div><div class="E2UJ5" jsname="M6JdT"><div class="rFrNMe b7AJhc zKHdkd" jscontroller="pxq3x" jsaction="clickonly:KjsqPd; focus:Jt1EX; blur:fpfTEe; input:Lg5SV" jsshadow jsname="OH0EC" aria-expanded="true"><div class="aCsJod oJeWuf"><div class="aXBtI I0VJ4d Wic03c"><span jsslot class="A37UZe qgcB3c iHd5yb"><div role="button" class="U26fgb mUbCce fKz7Od i3PoXe M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="lfEfFf" aria-label="Search" aria-disabled="false" tabindex="0" data-tooltip="Search" data-tooltip-vertical-offset="-12" data-tooltip-horizontal-offset="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="vu8Pwe" viewBox="0 0 24 24" focusable="false"><path d="M15.5 14h-.79l-.28-.27C15.41 12.59 16 11.11 16 9.5 16 5.91 13.09 3 9.5 3S3 5.91 3 9.5 5.91 16 9.5 16c1.61 0 3.09-.59 4.23-1.57l.27.28v.79l5 4.99L20.49 19l-4.99-5zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z"/><path d="M0 0h24v24H0z" fill="none"/></svg></span></span></div><div class="EmVfjc SKShhf" data-loadingmessage="Loading…" jscontroller="qAKInc" jsaction="animationend:kWijWc;dyRcpb:dyRcpb" jsname="aZ2wEe"><div class="Cg7hO" aria-live="assertive" jsname="vyyg5"></div><div jsname="Hxlbvc" class="xu46lf"><div class="ir3uv uWlRce co39ub"><div class="xq3j6 ERcjC"><div class="X6jHbb GOJTSe"></div></div><div class="HBnAAc"><div class="X6jHbb GOJTSe"></div></div><div class="xq3j6 dj3yTd"><div class="X6jHbb GOJTSe"></div></div></div><div class="ir3uv GFoASc Cn087"><div class="xq3j6 ERcjC"><div class="X6jHbb GOJTSe"></div></div><div class="HBnAAc"><div class="X6jHbb GOJTSe"></div></div><div class="xq3j6 dj3yTd"><div class="X6jHbb GOJTSe"></div></div></div><div class="ir3uv WpeOqd hfsr6b"><div class="xq3j6 ERcjC"><div class="X6jHbb GOJTSe"></div></div><div class="HBnAAc"><div class="X6jHbb GOJTSe"></div></div><div class="xq3j6 dj3yTd"><div class="X6jHbb GOJTSe"></div></div></div><div class="ir3uv rHV3jf EjXFBf"><div class="xq3j6 ERcjC"><div class="X6jHbb GOJTSe"></div></div><div class="HBnAAc"><div class="X6jHbb GOJTSe"></div></div><div class="xq3j6 dj3yTd"><div class="X6jHbb GOJTSe"></div></div></div></div></div><div role="button" class="U26fgb mUbCce fKz7Od JyJRXe M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="m1xNUe" aria-label="Back to site" aria-disabled="false" tabindex="0" data-tooltip="Back to site" data-tooltip-vertical-offset="-12" data-tooltip-horizontal-offset="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="V4YR2c" viewBox="0 0 24 24" focusable="false"><path d="M0 0h24v24H0z" fill="none"/><path d="M20 11H7.83l5.59-5.59L12 4l-8 8 8 8 1.41-1.41L7.83 13H20v-2z"/></svg></span></span></div></span><div class="Xb9hP"><input type="search" class="whsOnd zHQkBf" jsname="YPqjbf" autocomplete="off" tabindex="0" aria-label="Search this site" value="" aria-disabled="false" autofocus role="combobox" data-initial-value=""/><div jsname="LwH6nd" class="ndJi5d snByac" aria-hidden="true">Search this site</div></div><span jsslot class="A37UZe sxyYjd MQL3Ob"><div role="button" class="U26fgb mUbCce fKz7Od Kk06A M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="pZn8Oc" aria-label="Clear search" aria-disabled="false" tabindex="0" data-tooltip="Clear search" data-tooltip-vertical-offset="-12" data-tooltip-horizontal-offset="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="fAUEUd" viewBox="0 0 24 24" focusable="false"><path d="M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z"></path><path d="M0 0h24v24H0z" fill="none"></path></svg></span></span></div></span><div class="i9lrp mIZh1c"></div><div jsname="XmnwAc" class="OabDMe cXrdqd"></div></div></div><div class="LXRPh"><div jsname="ty6ygf" class="ovnfwe Is7Fhb"></div></div></div></div></div></div></div><div jsname="tiN4bf"><style nonce="sCzgJ16LSIMruEOJW4MZ9A">.rrJNTc{opacity: 0;}.bKy5e{pointer-events: none; position: absolute; top: 0;}</style><div class="bKy5e"><div class="rrJNTc" tabindex="-1"><div class="VfPpkd-dgl2Hf-ppHlrf-sM5MNb" data-is-touch-wrapper='true'><button class="VfPpkd-LgbsSe VfPpkd-LgbsSe-OWXEXe-dgl2Hf LjDxcd XhPA0b LQeN7 WsSUlf jz7fPb" jscontroller="soHxf" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc; touchcancel:JMtRjd; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;mlnRJb:fLiPzd;" data-idom-class="LjDxcd XhPA0b LQeN7 WsSUlf jz7fPb" jsname="z2EeY" tabindex="0"><div class="VfPpkd-Jh9lGc"></div><div class="VfPpkd-J1Ukfc-LhBDec"></div><div class="VfPpkd-RLmnJb"></div><span jsname="V67aGc" class="VfPpkd-vQzf8d">Skip to main content</span></button></div><div class="VfPpkd-dgl2Hf-ppHlrf-sM5MNb" data-is-touch-wrapper='true'><button class="VfPpkd-LgbsSe VfPpkd-LgbsSe-OWXEXe-dgl2Hf LjDxcd XhPA0b LQeN7 WsSUlf br90J" jscontroller="soHxf" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc; touchcancel:JMtRjd; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;mlnRJb:fLiPzd;" data-idom-class="LjDxcd XhPA0b LQeN7 WsSUlf br90J" jsname="ilzYPe" tabindex="0"><div class="VfPpkd-Jh9lGc"></div><div class="VfPpkd-J1Ukfc-LhBDec"></div><div class="VfPpkd-RLmnJb"></div><span jsname="V67aGc" class="VfPpkd-vQzf8d">Skip to navigation</span></button></div></div></div><div class="M63kCb N63NQ"></div><div class="QZ3zWd"><div class="fktJzd AKpWA fOU46b yMcSQd Ly6Unf G9Qloe XeSM4 XxIgdb" jsname="UzWXSb" data-uses-custom-theme="false" data-legacy-theme-name="QualityBasics" data-legacy-theme-font-kit="Light" data-legacy-theme-color-kit="Blue" jscontroller="Md9ENb" jsaction="gsiSmd:Ffcznf;yj5fUd:cpPetb;HNXL3:q0Vyke;e2SXKd:IPDu5e;BdXpgd:nhk7K;rcuQ6b:WYd;"><header id="atIdViewHeader"><div class="BbxBP HP6J1d K5Zlne" jsname="WA9qLc" jscontroller="RQOkef" jsaction="rcuQ6b:JdcaS;MxH79b:JdcaS;VbOlFf:ywL4Jf;FaOgy:ywL4Jf; keydown:Hq2uPe; wheel:Ut4Ahc;" data-top-navigation="true" data-is-preview="false"><div class="DXsoRd YTv4We oNsfjf" role="button" tabindex="0" jsaction="click:LUvzV" jsname="z4Tpl" id="s9iPrd" aria-haspopup="true" aria-controls="yuynLe" aria-expanded="false"><svg class="wFCWne" viewBox="0 0 24 24" stroke="currentColor" jsname="B1n9ub" focusable="false"><g transform="translate(12,12)"><path class="hlJH0" d="M-9 -5 L9 -5" fill="none" stroke-width="2"/><path class="HBu6N" d="M-9 0 L9 0" fill="none" stroke-width="2"/><path class="cLAGQe" d="M-9 5 L9 5" fill="none" stroke-width="2"/></g></svg></div><nav class="JzO0Vc" jsname="ihoMLd" role="navigation" tabindex="-1" id="yuynLe" jsaction="transitionend:UD2r5"><a class="XMyrgf" href="/view/sppr-2022/home"><img src="https://lh6.googleusercontent.com/lWnc0LyRNC7ppGvzhtZyNToMWHay6_vSvgabGJZ0Vv06P0zUd1KNRjknhvXuXSzZIqStswLFknAJcU5vQbbqbM0=w16383" class="r9CsCb" role="img" aria-label="Site home"></a><a class="Zjiec oNsfjf" href="/view/sppr-2022/home"><span>sppr 2022</span></a><ul class="jYxBte Fpy8Db" tabindex="-1"><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/home" data-url="/view/sppr-2022/home" data-type="1" data-level="1">Home</a></div></div></li><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/paper-submission" data-url="/view/sppr-2022/paper-submission" data-type="1" data-level="1">Paper Submission</a></div></div></li><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/program-committee" data-url="/view/sppr-2022/program-committee" data-type="1" data-level="1">Program Committee</a></div></div></li><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf lhZOrc" aria-current="true"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" aria-selected="true" href="/view/sppr-2022/accepted-papers" data-url="/view/sppr-2022/accepted-papers" data-type="1" data-level="1">Accepted Papers</a></div></div></li><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/contact-us" data-url="/view/sppr-2022/contact-us" data-type="1" data-level="1">Contact Us</a></div></div></li><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/venue" data-url="/view/sppr-2022/venue" data-type="1" data-level="1">Venue</a></div></div></li></ul></nav><div class="VLoccc K5Zlne QDWEj U8eYrb" jsname="rtFGi"><div class="Pvc6xe"><div jsname="I8J07e" class="TlfmSc YSH9J"><a class="GAuSPc" jsname="jIujaf" href="/view/sppr-2022/home"><img src="https://lh6.googleusercontent.com/lWnc0LyRNC7ppGvzhtZyNToMWHay6_vSvgabGJZ0Vv06P0zUd1KNRjknhvXuXSzZIqStswLFknAJcU5vQbbqbM0=w16383" class="lzy1Td" role="img" aria-label="Site home" jsname="SwcDWb"><span class="QTKDff p46B7e">sppr 2022</span></a></div><nav class="plFg0c" jscontroller="HXO1uc" jsaction="rcuQ6b:rcuQ6b;MxH79b:CfS0pe;" id="WDxLfe" data-is-preview="false" style="visibility: hidden;" role="navigation" tabindex="-1"><ul jsname="waIgnc" class="K1Ci7d oXBWEc jYxBte"><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/home" data-url="/view/sppr-2022/home" data-type="1" data-level="1">Home</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/paper-submission" data-url="/view/sppr-2022/paper-submission" data-type="1" data-level="1">Paper Submission</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/program-committee" data-url="/view/sppr-2022/program-committee" data-type="1" data-level="1">Program Committee</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J M9vuGd" aria-current="true"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" aria-selected="true" href="/view/sppr-2022/accepted-papers" data-url="/view/sppr-2022/accepted-papers" data-type="1" data-level="1">Accepted Papers</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/contact-us" data-url="/view/sppr-2022/contact-us" data-type="1" data-level="1">Contact Us</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/venue" data-url="/view/sppr-2022/venue" data-type="1" data-level="1">Venue</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf ZmrVpf oXBWEc" more-menu-item jsaction="mouseenter:Vx8Jlb; mouseleave:ysDRUd"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" aria-expanded="false" aria-haspopup="true" data-level="1">More</a><div class="mBHtvb u5fiyc" role="presentation" title="Expand/Collapse" jsaction="click:oESVTe" jsname="ix0Hvc"><svg class="dvmRw" viewBox="0 0 24 24" stroke="currentColor" jsname="HIH2V" focusable="false"><g transform="translate(9.7,12) rotate(45)"><path class="K4B8Y" d="M-4.2 0 L4.2 0" stroke-width="2"/></g><g transform="translate(14.3,12) rotate(-45)"><path class="MrYMx" d="M-4.2 0 L4.2 0" stroke-width="2"/></g></svg></div></div></div><div class="oGuwee eWDljc RPRy1e Mkt3Tc" style="display:none;" jsname="QXE97" jsaction="transitionend:SJBdh" role="group"><ul class="VcS63b"><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/home" data-url="/view/sppr-2022/home" data-type="1" data-in-more-submenu="true" data-level="2">Home</a></div></div></li><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/paper-submission" data-url="/view/sppr-2022/paper-submission" data-type="1" data-in-more-submenu="true" data-level="2">Paper Submission</a></div></div></li><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/program-committee" data-url="/view/sppr-2022/program-committee" data-type="1" data-in-more-submenu="true" data-level="2">Program Committee</a></div></div></li><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf lhZOrc" aria-current="true"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" aria-selected="true" href="/view/sppr-2022/accepted-papers" data-url="/view/sppr-2022/accepted-papers" data-type="1" data-in-more-submenu="true" data-level="2">Accepted Papers</a></div></div></li><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/contact-us" data-url="/view/sppr-2022/contact-us" data-type="1" data-in-more-submenu="true" data-level="2">Contact Us</a></div></div></li><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/sppr-2022/venue" data-url="/view/sppr-2022/venue" data-type="1" data-in-more-submenu="true" data-level="2">Venue</a></div></div></li></ul></div></li></ul></nav><div jscontroller="gK4msf" class="RBEWZc" jsname="h04Zod" jsaction="rcuQ6b:WYd;JIbuQc:AT95Ub;VbOlFf:HgE5D;FaOgy:HgE5D;MxH79b:JdcaS;" data-side-navigation="false"><div role="button" class="U26fgb mUbCce fKz7Od iWs3gf Wdnjke M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="R9oOZd" aria-label="Open search bar" aria-disabled="false" tabindex="0" data-tooltip="Open search bar" aria-expanded="false" data-tooltip-vertical-offset="-12" data-tooltip-horizontal-offset="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="vu8Pwe tCHXDc YSH9J" viewBox="0 0 24 24" focusable="false"><path d="M15.5 14h-.79l-.28-.27C15.41 12.59 16 11.11 16 9.5 16 5.91 13.09 3 9.5 3S3 5.91 3 9.5 5.91 16 9.5 16c1.61 0 3.09-.59 4.23-1.57l.27.28v.79l5 4.99L20.49 19l-4.99-5zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z"/><path d="M0 0h24v24H0z" fill="none"/></svg></span></span></div></div></div><div jsname="mADGA" class="zDUgLc"></div></div><div class="TxnWlb" jsname="BDdyze" jsaction="click:LUvzV"></div></div></header><div role="main" tabindex="-1" class="UtePc RCETm" dir="ltr"><section id="h.33e574b64c58fbdc_11" class="yaqOZd LB7kq cJgDec nyKByd O13XJf KEFykf" style=""><div class="Nu95r"><div class="IFuOkc" style="background-size: cover; background-position: center center; background-image: url(https://lh5.googleusercontent.com/ojgjvcj3KLOBHR-XNDFxvmn_RE7y1kvhxrmBfL5yARcVCs7R1CX7KsAWsg-sWEYFKAqgDMxbOz4sp3tZu8kBi2c=w16383);" jsname="LQX2Vd"></div></div><div class="mYVXT"><div class="LS81yb VICjCf j5pSsc db35Fc" tabindex="-1"><div class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd purZT-AhqUyc-II5mzb ZcASvf-AhqUyc-II5mzb pSzOP-AhqUyc-qWD73c Ktthjf-AhqUyc-qWD73c JNdkSc SQVYQc"><div class="JNdkSc-SmKAyb LkDMRd"><div class="" jscontroller="sGwD4d" jsaction="zXBUYb:zTPCnb;zQF9Uc:Qxe3nd;" jsname="F57UId"><div class="oKdM2c ZZyype Kzv0Me"><div id="h.33e574b64c58fbdc_14" class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd jXK9ad D2fZ2 zu5uec OjCsFc dmUFtb wHaque g5GTcb JYTMs"><div class="jXK9ad-SmKAyb"><div class="tyJCtd mGzaTb Depvyb baZpAe lkHyyc"><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 10pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: center;"><span style="color: #ffffff; font-family: 'Times New Roman', 'Arial'; font-size: 14pt; font-variant: normal; vertical-align: baseline;"><strong>11</strong></span><sup style="color: #ffffff; font-family: 'Times New Roman', 'Arial'; font-size: 8.4pt; font-variant: normal;"><strong>th </strong></sup><span style="color: #ffffff; font-family: 'Times New Roman', 'Arial'; font-size: 14pt; font-variant: normal; vertical-align: baseline;"><strong>International Conference on Signal, Image Processing and Pattern Recognition (SPPR 2022)</strong></span></p><p id="h.tix6mlexnij0" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: center;"><span style="color: #ffffff; font-family: 'Times New Roman', 'Arial'; font-size: 14pt; font-variant: normal; vertical-align: baseline;"><strong>November 26 ~ 27, 2022, London, United Kingdom</strong></span></p></div></div></div></div></div></div></div></div></div><div class="DnLU4" jsaction="JIbuQc:v5IJLd(ipHvib);"><div role="button" class="U26fgb mUbCce fKz7Od HqAAld Wew9ke M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="ipHvib" aria-label="Scroll down" aria-disabled="false" tabindex="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="XE8yyf" viewBox="0 0 24 24" focusable="false"><path d="M7.41 7.84L12 12.42l4.59-4.58L18 9.25l-6 6-6-6z"/><path d="M0-.75h24v24H0z" fill="none"/></svg></span></span></div></div></section><section id="h.33e574b64c58fbdc_36" class="yaqOZd" style=""><div class="IFuOkc"></div><div class="mYVXT"><div class="LS81yb VICjCf j5pSsc db35Fc" tabindex="-1"><div class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd purZT-AhqUyc-II5mzb ZcASvf-AhqUyc-II5mzb pSzOP-AhqUyc-qWD73c Ktthjf-AhqUyc-qWD73c JNdkSc SQVYQc"><div class="JNdkSc-SmKAyb LkDMRd"><div class="" jscontroller="sGwD4d" jsaction="zXBUYb:zTPCnb;zQF9Uc:Qxe3nd;" jsname="F57UId"><div class="oKdM2c ZZyype Kzv0Me"><div id="h.33e574b64c58fbdc_33" class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd jXK9ad D2fZ2 zu5uec OjCsFc dmUFtb wHaque g5GTcb JYTMs"><div class="jXK9ad-SmKAyb"><div class="tyJCtd mGzaTb Depvyb baZpAe"><div id="h.h71l4cl5ypjb" class="GV3q8e aP9Z7e"></div><h3 id="h.h71l4cl5ypjb_l" dir="ltr" class="CDt4Ke zfr3Q OmQG5e" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 4pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: center;" tabindex="-1"><div jscontroller="Ae65rd" jsaction="touchstart:UrsOsc; click:KjsqPd; focusout:QZoaZ; mouseover:y0pDld; mouseout:dq0hvd;fv1Rjc:jbFSOd;CrfLRd:SzACGe;" class="CjVfdc"><div class="PPhIP rviiZ" jsname="haAclf"><div role="presentation" class="U26fgb mUbCce fKz7Od LRAOtb Znu9nd M9Bg4d" jscontroller="mxS5xe" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;" jsshadow aria-describedby="h.h71l4cl5ypjb_l" aria-label="Copy heading link" aria-disabled="false" data-tooltip="Copy heading link" aria-hidden="true" data-tooltip-position="top" data-tooltip-vertical-offset="12" data-tooltip-horizontal-offset="0"><a class="FKF6mc TpQm9d" href="#h.h71l4cl5ypjb" aria-label="Copy heading link" jsname="hiK3ld" role="button" aria-describedby="h.h71l4cl5ypjb_l"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -11px"><svg class="OUGEr QdAdhf" width="22px" height="22px" viewBox="0 0 24 24" fill="currentColor" focusable="false"><path d="M0 0h24v24H0z" fill="none"/><path d="M3.9 12c0-1.71 1.39-3.1 3.1-3.1h4V7H7c-2.76 0-5 2.24-5 5s2.24 5 5 5h4v-1.9H7c-1.71 0-3.1-1.39-3.1-3.1zM8 13h8v-2H8v2zm9-6h-4v1.9h4c1.71 0 3.1 1.39 3.1 3.1s-1.39 3.1-3.1 3.1h-4V17h4c2.76 0 5-2.24 5-5s-2.24-5-5-5z"/></svg></span></span></a></div></div><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 14pt; vertical-align: baseline;"><strong>Accepted Papers</strong></span></div></h3><p dir="ltr" class="CDt4Ke zfr3Q"><br></p><p id="h.7of0mg9ku1u7" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Depression Detection using Machine and Deep Learning Models to Assess Mental Health of Social Media Users</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Smita Ghosh</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Sneha Ghosh</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Diptaraj Sen</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Pramita Das</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Mathematics and Computer Science, Santa Clara University, California, USA, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Computer Science and Engineering, University of Engineering andManagement, Kolkata, India, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Electronics and Communication Engineering, National Institute of Technology Durgapur, Durgapur, India</span></p><p id="h.9rpw5kdm15cs" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">During the COVID-19 pandemic millions of people were af ected due to quarantine and restrictions. With more than half of the worlds population active on social media, people resorted to these platforms as their outlet for emotions. This led to researchers analysing content on social media to detect depression by studying the patterns of content posting. This paper focuses on finding a data-driven metric called ‘Happiness Factor’ of a user to assess their mental health. Various models were trained to classify a post as ‘depressed’. A user’s ‘Happiness Factor’ was calculated based on the nature of their posts. This metric identifies degrees of depression of a user. The results show the ef ectiveness of the classifier in identifying the depression level. Also, a Mental Health Awareness Resource System is proposed which recommends mental health awareness resources to users on their social media interface based on their ‘Happiness Factor’.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #343a40; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">m</span></p><p id="h.s61brnj13u5e" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Depression Detection, Machine Learning, Deep Learning, Universal Sentence Encoder, Social Media.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.xugpsdc23a7c" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Query Optimization meets Reinforcement Learning</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Enamul Haque, David R. Cheriton School of Computer Science, University of Waterloo, Ontario, Canada</span></p><p id="h.ynpb34thyh4s" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Query optimization is one of the most important tasks of relational database management systems that, if improved properly, can help achieve higher performance in terms of time and resources. Configuration tuning of diverse database instances in distributed systems and optimization of query workloads for cloud databases are also important performance indicators to meet demands of the growing user base globally. But these problems are already NP-Hard, and solutions depend mostly on heuristics-based approximation or randomized algorithms. Here, in this work we explore how advances in Reinforcement Learning (RL) is contributing to this branch of computer systems research.</span></p><p id="h.700bhfxvoaje" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Machine Learning, Database, Query Optimization, Reinforcement Learning, Deep Learning.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.jkjtrrqpbxmb" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Prediction of Genetic Disorders using Machine Learning</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Sadichchha Naik</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Amisha Panchal</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Disha Nevare</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Dr. Chhaya Pawar</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Student, Department of Computer Engineering, Datta Meghe College of Engineering, Navi Mumbai, India, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Asst. Professor, Department of Computer Engineering, Datta Meghe College of Engineering, Navi Mumbai, India</span></p><p id="h.8ffubxj99sfv" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">A genetic disorder is a health condition that is usually caused by mutations in DNA or changes in the number or overall structure of chromosomes. Several types of commonly-known diseases are related to hereditary gene mutations. Genetic testing aids patients in making important decisions in the prevention, treatment, or early detection of hereditary disorders. With increasing population, studies have shown that there has been an exponential increase in the number of genetic disorders. Genetic disorders impact not only the physical health, but also the psychological and social well-being of patients and their families. Genetic disorders have powerful effects on families. Like many chronic conditions, they may require continual attention and lack cures or treatments. Low awareness of the importance of genetic testing contributes to the increase in the incidence of hereditary disorders. Many children succumb to these disorders and it is extremely important that genetic testing be done during pregnancy. In that direction, the project aims to predict Genetic Disorder and Disorder Subclass using a Machine Learning Model trained from a medical dataset. The model being derived out of a predictor and two classifiers, shall predict the presence of genetic disorder and further specify the disorder and disorder subclass, if present.</span></p><p id="h.yg7rsixv58ev" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Genetic disorder, Machine Learning, Medical dataset.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.xdej0gblp4w4" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Mining Movement Patterns to Separate Rugby Super League Players into Groups</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Victor Elijah Adeyemo</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1,2,3,4</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> Anna Palczewska</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Ben Jones</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2,3,4,5,6</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> Dan Weaving</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2,4</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">School of Built Environment, Engineering and Computing, Leeds Beckett University, Leeds, UK, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Carnegie Applied Rugby Research (CARR) Centre, Carnegie School of Sport, Leeds Beckett University, Leeds, UK, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">England Performance Unit, Rugby Football League, Leeds, UK, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">4</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Leeds Rhinos Rugby League Club, Leeds, UK, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">5</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">School of Science and Technology, University of New England, Armadale, Australia, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">6</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Division of Exercise Science and Sports Medicine, Department of Human Biology, Rica, Cape Town, South Africa</span></p><p id="h.ukp3k3p6l5k1" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The application of pattern mining algorithms to extract movement/event patterns from sports big data enables the extraction of on-field activities, provides context to such activities and enhances training specificity. Currently, aggregated physical performance indicators (e.g., total distance covered) are used to separate players into positional groups but these indicators do not capture the sequential nature of match activities. As there are various types of pattern mining algorithms, this study aims to identify which one discovers the best set of movement patterns (on-field activities) to separate players into two playing positions and utilize classification algorithms to find the most accurate separation. Three pattern mining frameworks were implemented to extract movement patterns and five machine learning classification algorithms to separate groups of players via a case study (i.e., two Elite Rugby League players playing positions). The pattern mining frameworks are Sequential Movement Pattern-mining (SMP), l-length Closed Contiguous (LCC) and Apriori Closed (APR). Five classifiers were fitted on tabular datasets (whose independent variables are set of movement patterns) to determine which type of movement patterns accurately separates the groups. Extracted “LCC” and “SMP” on-field activities shared a 0.179 Jaccard similarity score (18%) as both are consecutive patterns. Extracted “APR” on-field activities shared no significant similarity with both extracted “LCC” and “SMP” on-field activities because it mined non-consecutive patterns. Multi-layered Perceptron algorithm fitted on the dataset whose independent variables were the extracted “LCC” on-field activities achieved the highest accuracy of 91.02% ± 0.02 and precision, recall and F1 scores of 0.91. Therefore, we recommend the extraction of closed contiguous (consecutive) over non-consecutive patterns for separating groups of players.</span></p><p id="h.fgnkmybefly" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Pattern Mining, Performance Analysis, Rugby League, Sports Analytics, Machine Learning.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.j24au0be24l5" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>An AI based solution to reduce food loss by predicting the expiry date of Perishable Food</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Trewon Weerasooriya and Kishore Kumar, Faculty of Computing, Sri Lanka Institute of Information Technology Malabe, Sri Lanka</span></p><p id="h.stfwvcmolr30" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">“Food Loss and Waste” is a growing issue for the ecology, economy, and social aspects of the world. The social, environmental, and economic factors are all negatively impacted by it. Food loss and waste are mostly caused by several factors, including poor planning, excessive production, and customer perception. This study focuses on the hospitality and food services industry’s two primary sub-sectors, specifically Sri Lanka’s hotels and restaurants. It develops a solution to eliminate the causes highlighted through the use of technologies like artificial intelligence and cloud services which eliminates or reduces food loss and waste that occurs during the processes of the food supply chain. Primarily this paper is based on a machine learning based system which focuses on prediction of expiry dates of perishable food and generating donation recommendations based on the predicted results. While implementing a platform to bridge the gap between food donors and food donation recipients. The collected datasets with the use of two machine learning models, revealed that the expiry date of perishable food items can be predicted with a mean squared error of 0.7651. Which means that, this model predicts expiry dates with a standard error which is less than a day.</span></p><p id="h.vf2w1ebqk29i" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Machine Learning, Artificial intelligence, Random Forest, XGBoost.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.3bpacdn9iy69" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>A Comprehensive Study on Machine Learning Methods to Increase the Prediction Accuracy of Classifiers and Reduce the Number of Medical Tests Required to Diagnose Alzheimers Disease</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Md. Sharifur Rahman and Girijesh Prasad, School of Computing, Engineering and Intelligent Systems, Ulster University, Northern Ireland, UK</span></p><p id="h.vvdkrtqbkazc" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Alzheimers patients gradually lose their ability to think, behave, and interact with others. Medical history, laboratory tests, daily activities, and personality changes can all be used to diagnose the disorder. A series of time-consuming and expensive tests are used to diagnose the illness. The most effective way to identify Alzheimers disease is using a Random-forest classifier in this study, along with various other Machine Learning techniques. The main goal of this study is to fine-tune the classifier to detect illness with fewer tests while maintaining a reasonable disease discovery accuracy. We successfully identified the condition in almost 94% of cases using four of the thirty frequently utilized indicators.</span></p><p id="h.n9oqmb941fea" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Machine Learning, Accuracy, Precision, Recall, Classifier, Random Forest, Alzheimers.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.wothypxju04x" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>SERP Evaluation and Model Interpretation Based on Behavior Sequence</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Xiaoyu Dong and Shen Shen and Jinkang Jia and Yifan Wang, Baidu Inc, Beijing, China</span></p><p id="h.4cyywpvapwtf" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">SERP (Search Engine Result Page) quality evaluation plays a vital role in industrial practice. With the rapid iterations of search engine, traditional page-level metrics like click ratio, dwell time can no longer evaluate user experience on various templates of results. To promote evaluation accuracy, we implement Transformer to capture the sequential patterns from behavior sequence data. In recent studies, approaches focusing on modeling behavior sequences have emerged. Some studies concentrate on feature engineering by extracting subsequence patterns, others focus on end-to-end deep learning models. While widely used, these two methods both have drawbacks, either a risk of distortion of true subsequence patterns or difficulty for interpretation. Here we implement Transformer to give considerations to both completeness of sequential patterns and model interpretation. To find the best way of modeling behavior sequence data with continuous features, we adopt two embedding methods to predict SERP quality evaluation, and the second one achieves good promotion. What’s more, we develop a novel interpretation method for transformer models and demonstrate its ability to make interpretations for subsequence patterns.</span></p><p id="h.wi24a05somdo" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Behavior sequence, Transformer, Model Interpretation.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.z96n7l4mmmx0" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Web Application and Internet Security Threats</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Vaibhav Katiyar, University Institute of engineering and technology, CSJMU, Kanpur</span></p><p id="h.tf1ik75r9mlk" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Computer and network security are one of the most challenging topics in the Information Technology research community. Internet security is a significant subject that may affect a wide range of Internet users. People that use Internet to sell, buy and even to communicate needs their communications to be safe and secure. This paper is discussing the different aspects of Internet and networking security and weakness. Main elements of networking security techniques such as the firewalls, passwords, encryption, authentication and integrity are also discussed in this paper. The anatomy of a web applications attack and the attack techniques are also covered in details. The security of high-speed Internet as the growth of its use has stained the limits of existing network security measures. Therefore, other security defense techniques related to securing of high-speed Internet and computer security in the real world are studied as well such as, DNS, One-Time Password and defending the network as a whole.This paper is also surveyed the worm epidemics in the high- speed networks and their unprecedented rates spread.</span></p><p id="h.sj0ohdvvret5" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Web application Security, Network Security, Protection tools, SQL Injection, Firewall, and Intrusion Detection System.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.khd56ve8lunj" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Cloud Oriented Virtual Fragmented Database</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Ahmad Shukri Mohd Noor, Nur F. Mat Zain, Rabiei Mamat and Noor hafhizah A. Rahim, Faculty of Ocean Engineering Technology and Informatics, Universiti Malaysia Terengganu</span></p><p id="h.4bl1p2k2qzxq" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Fragmentation is way to replicate the data into multiple servers to increase the availability and accessibility. The existing technique Hierarchical Replication Scheme (HRS), Read One-write-All (ROWA) and Branch Replication Scheme (BRS), these three techniques are the most common techniques that used to replicate the data but they have their own drawbacks such as communication cost that is high. The proposed technique in this research is Mater-slave replica technique which is uses to distribute the data into servers, so it enables the user to access it from any server in case one server fail. This research about creates cloud environment and replicate the data into virtual servers to enables the user to access the data at any location and in any time. VirtualBox used in this research to create windows guested in the hard disk of the local host that used these windows as virtual servers and stored the data of the master replica server. by using three virtual servers, a series of experiments will be performed, and the results will be compared to the existing techniques, then the findings of the experimental will achieve the replication consistency of the database.</span></p><p id="h.h8i2fj492454" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Virtual Machine, Database fragmentation, Database Replication, Mater-salve replica, Data Availability.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.5h98558dredq" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Artificial Intelligence to Analyze Mineral Resource Cost Estimation using Ordinary Kriging and Schlumberger Methods</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Paryati</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Krit Salahddine</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Tehchnic Engineering, UPN “Veteran” Yogyakarta, Indonesia, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Ibn Zohr University, Agadir City, Morocco</span></p><p id="h.qbzjq2mue94a" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Artificial intelligence to analyze the estimated cost of mineral resources, mostly done in mining areas, to estimate the mining area which is an element of mineral reserves in nature. So to determine the commodity and the estimated cost for the mining area, a study of andesite estimation and a study of technical factors that influence the development of mining processes and estimates at PT. Varia Usaha Beton Mandalika, East Lombok Regency, West Nusa Tenggara, Indonesia. Thus, with an exploratory study, a description of the condition of andesite mineral commodities will be obtained at the location of the area. One approach used to estimate andesite is to use the Schlumberger method and use ordinary kriging so that it can simplify and speed up the work process, especially when carrying out andesite activities in a very large mining area. Research conducted in the mining area owned by PT. Varia Usaha Beton Mandalika, East Lombok Regency, West Nusa Tenggara, Indonesia. Mining companies use the Schlumberger configuration method and ordinary kriging. The results obtained by andesite mineral resource estimates are very good and relevant and valid. This mining method is used in the production operation process of andesite resource estimation. The selection model is based on the type of commodity and the contours of the mining area in the form of sand and stone. This method has the ability to detect the presence of inhomogeneous rock layers by comparing the apparent resistivity value with changes in the MN/2 electrode distance and is able to detect the presence of inhomogeneous rock layers. So this method is very suitable for estimating andesite resources in mining areas.</span></p><p id="h.ie05gh7g0wp0" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Artificial Intelligence, Estimation, Schlumberger, Vertical Sounding, Ordinary Kriging.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.223qmbi020ix" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Agent-based Modeling and Simulation of Complex Industrial Systems: Case Study of the Dehydration Process Noureddine</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Seddari</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1,2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Sohaib Hamioud</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Abdelghani Bouras</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">4</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Sara Kerraoui</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Nesrine Menai</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">LICUS Laboratory, Department of Computer Science, Université 20 Août 1955-Skikda, Skikda 21000, Algeria, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">LIRE Laboratory, Abdelhamid Mehri-Constantine 2 University, Constantine 25000,Algeria, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">LISCO Laboratory, Computer Science Department, Badji-Mokhtar University,Annaba 23000, Algeria, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">4</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Industrial Engineering, College of Engineering, AlfaisalUniversity,Riyadh 11533, Saudi Arabia</span></p><p id="h.cr92i0alks25" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Agent-based modeling and simulation (ABMS) is a new approach to modeling autonomous systems and interacting agents. This method is becoming more and more popular for its efficiency and simplicity. It constitutes an approach in the field of modeling complex systems. Indeed, ABMS offers, contrary to other types of simulations, the possibility of directly representing the simulated entities, their behaviors, and their interactions without having to recourse to mathematical equations. This work is a contribution in this sense, the goal is to propose an agent-based model to simulate an industrial system. The latter presents the problem of complexity, which can be mastered with the Multi-Agent Systems (MAS) approach. Our model is validated by a case study of the natural gas dehydration process. The latter is consolidated by a simulation made in the multi-agent platform JADE (Java Agent DEvelopment Framework).</span></p><p id="h.bxhqbirpyxxf" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Agent-based modeling and simulation (ABMS), Industrial system, Multi-Agent Systems (MAS), Multi-agent platform JADE.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.1liaatknxw7" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Data Augmentation Using GANs</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Dr Simant Prakoonwit</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Emilija Strelcenia</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Creative Technology, Bournemouth University, Bournemouth, England, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Creative Technology, Bournemouth University, Bournemouth, England</span></p><p id="h.gtwr4a2x7tu3" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">GANs have been found to be very successful in a number of applications, most notably image generation. However, there are still a number of challenges that need to be addressed in order to make GANs more widely applicable. In this literature review, we shall discuss various loss functions and variants of the GANs and their uses in the field of machine and deep learning that incorporates these improvements and can reliably be utilized for machine learning applications.</span></p><p id="h.5b5jsmserx3v" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">GANs, synthetic data, class imbalance, loss functions.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.vhh35gpeaerj" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Towards Tuberculosis Incidence Time Series Forecasting in Colombian Regions: The Antioquia Case</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Ivan Camilo Sanchez-Vega</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Alvaro David Orjuela-Cañon</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Carlos Enrique Awad Garcia</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Erika Vergara</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">4</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Maria Angelica Palencia</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">5</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Andres L. Jutinico</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">6</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Mechanical, Electronics and Biomedical Engineering Faculty, Universidad Antonio Nariño, Bogota D.C., Colombia, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">School of Medicine and Health Sciences, Universidad del Rosario, Bogota D.C., Colombia, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Subred Integrada de Servicios de Salud Centro Oriente, Bogota D.C., Colombia, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">4</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Universidad Antonio Nariño, Bogota D.C., Colombia, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">5</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Subred Integrada de Servicios de Salud Centro Oriente, Bogota D.C., Colombia, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">6</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Mechanical, Electronics and Biomedical Engineering Faculty, Universidad Antonio Nariño, Bogota D.C., Colombia</span></p><p id="h.l43zsh5yasvd" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Antioquia is a Colombian department where 6.7 million people live. Currently, it is the region of the country with the newest cases of tuberculosis reported in 2021, about 18.8%. In addition, the incidence rate of tuberculosis was 36.8 per 100,000 inhabitants. Public government health policy regarding tuberculosis should aim to prevent the uninfected community, in addition to detecting and treating people with tuberculosis. In this sense, the study of algorithms to predict the epidemic trend should be promoted. This work addresses the prediction of tuberculosis cases in Antioquia, considering data from the health surveillance system between 2007 and 2021. For the prediction, the Kalman filter and the autoregressive model are considered. The results show a better performance using the Kalman filter for the prediction of tuberculosis cases at six weeks.</span></p><p id="h.2asxi6xrzek8" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Tuberculosis, Forecasting, Autoregressive Model, Kalman Filter, Performance.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.ugd4hwptl9gh" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>CAVSS: A Commonsense Augmented Variational Sequence To Sequence Model For Language Generation</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Mingxuan Liang</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Luchen Zhang</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Junwu Zhu</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Yang Li</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Xiang Gao</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> College of Information Engineering, Yangzhou University, Yangzhou Jiangsu, China, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> National Computer Network Emergency Response Technical Team/Coordination Center of China</span></p><p id="h.kogjpklb6a68" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Commonsense knowledge as external knowledge enhances the semantic understanding of the input sequences of the model and is of guidance to text generation models. In this paper, we propose a novel approach of incorporating commonsense knowledge for enhancing the performance of end-to-end text generation models. Firstly, given an input sequence and retrieving the relevant knowledge triples, the embedding of the commonsense knowledge and the context vector encoded in the encoder part are spliced for sampling. Then an autoregressive transformation is applied to the sampling to prevent the problem of too slow fitting of simple Gaussian distribution, and a new learning objective is designed in the training phase to make this transformed distribution fit the posterior distribution. In addition, we perform variational operations on the decoding part of the attention mechanism to weaken the attention strength and prevent reconstruction from playing a decisive role in generation while ignoring other modules.</span></p><p id="h.5abok62dp992" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Deep Learning, Commonsense Knowledge, Auto-Encoder, Generation.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.6xq9fd29qetm" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Solving Security Problem in Access Control Systems</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Anh Tuan Truong, Ho Chi Minh City University of Technology, VNU-HCM, Ho Chi Minh City, Vietnam</span></p><p id="h.xxttre31e896" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Access control model is one of the most widespread frameworks for controlling the access in an organization. Many extended features have been proposed to make the model increasingly well adapted to real world systems (e.g., the administration of rules in access control system). Although these extensions bring advantages to the system, the interaction between them may create a conflict in it (i.e., the security problem). In this paper, we discuss this problem as well as the automated security analysis solutions that have been proposed to solve the problem. We also propose an approach that uses the capability of a model checker to automatically analyse access control policies. An extensive experimentation shows that the proposed approach outperforms a state-of-the-art analysis technique.</span></p><p id="h.25enrmxdrjvb" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">CCS CONCEPTS, Security and Trust, Access Control System, Security Analysis.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.vypjqd754fq2" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Performance based Comparative Analysis of Naive Bayes Variations for Text Classification</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Ali Abbas, Manish Jaiswal, Prajna Jha, Shreya Agarwal and Tanveer J. Siddiqui, Department of Electronics and Communication, University of Allahabad, Prayagraj, India.</span></p><p id="h.6jof5ywmoit1" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">With the high consumption of digital data, the problem of unorganized textual data created major challenges in today’s scenario. To overcome this issue of unorganized textual information we perform document classification which is divided into four major phases i.e., pre-processing, feature selection, model training, and model testing. In this paper, we have selected four types of feature vectors, three with weighting techniques and one without weighting. Performance has been tested on these four feature vectors with five variances of naïve bayes classifiers out of which two were not able to perform training due to the sparseness in the dataset and the rest three performed well. In the reported result of the experiment, F1-macro score and the accuracy of Complement Naïve Bayes using term frequency weighting scheme is 0.807901362 and 0.821163038 which outperform all the other feature sets with all the variance of naïve bayes classifiers. In terms of time consumption for training and testing again, the performance of Complement Naïve Bayes using term frequency weighting scheme found best.</span></p><p id="h.8zpol4g2pyxp" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Multinomial Naïve Bayes, Complement Naïve Bayes, Min-max scaling, tf-idf, machine learning, document classification.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.mr5ui3q094ma" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Generic Question Classification for Dialogue Systems</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Marine Troadec, Matthis Houl`es, and Philippe Blache, LPL-CNRS, ILCB, Aix-en-Provence, France</span></p><p id="h.flxkk32j3pc8" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">We present in this paper a new classification approach for identifying questions in dialogue systems. The difficulty in this task is first to be domain-independent, reusable whatever the dialogue application and second to be capable of a real time processing, in order to fit with the needs of reactivity in dialogue systems. The task is then different than that of question classification usually addressed in question answering systems. We propose in this paper a hierarchical classifier in two steps, filtering first question/no-question utterances and second the type of the question. Our method reaches a f-score of 98% for the first step and 97% for the second one, representing the state of the art for this task.</span></p><p id="h.93m5nj9f6x8k" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Question classification, Dialogue systems, Hierarchical classification.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.3xo202y7e8an" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Global Language Positioning System</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Xiaohui Zou</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Shunpeng Zou</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Searle Research Center, No. 100, Renshan Road, Hengqin, Guangdong 519000, China, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Peng Ge Learning Center,Csanady utca 4/b, 1132 , Budapest, Hungary</span></p><p id="h.749fj9dhf0xr" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The aim is to explore a new method of Global Language Positioning System, that is, natural language processing by using GPS sequencing positioning. The method is: first, it is pointed out that Susuer distinguishes language and speech although it is good for Indo -European language, but it is not enough for Chinese; then, in the process of Chinese information processing that can be divided into Yan and Yu as the formal language from Tasky and Calnap, as well as the way of combining a single element set of pure mathematics to combine the metal group set at all levels to build a broad -language formal information processing model that combines Chinese and arithmetic; finally, compare the broad - language bilingual treatment described herein, translated by Google Machines Translate as typical representatives, narrow bilingual processing, such as English -Chinese or Chinese -English machine translation, distinguish between complete matching and partial matching, and completely not matching as three types, and then through human -computer interaction and collaboration methods, the reasons why the centralized inspection is not matched, which provides a reliable basis for the improvement or optimization of the five links of classification, matching, translation, prediction and decision -making. As a result, it not only improved the quality of machine translation significantly, but also provided a brand - new natural language processing methods for further data mining, semantic identification, information extraction, knowledge processing and software modeling, and the improvement of NLP & AI technology quality, by using handling paradigm, namely: Global Language Positioning System. Its significance is: this new paradigm not only practically ef icient and precise, especially Chinese information processing, but also, theoretically, with the existing natural language processing methods, and it is not only the same function which can be made in three aspects of natural language understanding, expert knowledge expression, and software model recognition.</span></p><p id="h.yt99xw441b2z" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Natural Language Processing, Natural Language Understanding, NLP & AI , Susuer, Tasky and Calnap.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.wqkzlca2ggo1" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>The Idea System for Semantic & NLP</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Jiawen QIU</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Yezhen ZHAO</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Xiaohui ZOU</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2, 3</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">China Mobile Communications Group Guangdong Co., Ltd. Zhuhai Branch, China, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Searle Research Center, No. 100, Renshan Road, Zhuhai, Guangdong 519000, China, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Peking University Alumni Association, Beijing 100871, China</span></p><p id="h.z87t037matv3" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The purpose of this paper is to put forward the concept of idea system and try to design its knowledge representation principle on the basis of academic discussion on the formation principle of the conceptual relationship of human knowledge. This method is an idea-based knowledge modeling method. The steps are: first, clearly define the concept as the mapping from sign to meaning; further, clarify that both sign and meaning are just a subclass of idea; finally, determine the separation of the sign relation network and the meaning relation network in the conceptual idea relation network, and modeling and computational processing are unified with the idea relationship network. It is characterized by: the knowledge representation method and principle of the idea relation network. The result is that the idea system, which is a new knowledge modeling tool system, systematically introduces its typical embodiments in combination with word, formula, figure, table, and compares and analyzes the research results of related knowledge graphs. Its significance lies in that: it is significantly different from the traditional method, that is, the knowledge representation model that uses symbols instead of meanings to model. This paper establishes a new method and new principle of knowledge construction model that is assisted by symbols and directly modeled with ideas, which is a new method for cognitive computing. It provides a new way to express knowledge based on meaning rather than symbols.</span></p><p id="h.oykn5wkepc6p" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Semantic & NLP, Idea System, Human-Machine Collaboration, Knowledge Graph, Knowledge Modeling, Artificial.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.xbutikexxyoy" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Ambiguity Detection in Requirements Classification Task using Fine-tuned Transformation Technique</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Sadeen Alharbi, Department of Software Engineering, King Saud University, Saudi Arabia</span></p><p id="h.at4icqjg9lja" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The software requirement specification (SRS) document is essential in software development. This document influences all subsequent steps in software development. Nevertheless, requirements problems, such as insufficient or ambiguous specifications, can cause misunderstandings during the requirement analysis stage. This influences testing activities and increases the project’s duration and cost overrun risk. This paper represents an intuitive approach to detecting ambiguity in software requirements. The classifier should learn ambiguous features and characteristics extracted from the text on a training set and try to detect similar characteristics from a testing set. To achieve this, this study experimented with two main approaches. The first approach is feature extraction, which uses the hidden states as features and trains a support vector machine (SVM) classifier to assess software requirement ambiguity without modifying the pre-trained model. Unfortunately, this approach only identified 68% of the requirement ambiguity. The second approach is training an end-to-end model that updates the parameters of the pre-trained model. This approach enhanced the baseline results by 13%.</span></p><p id="h.4439wgizt69i" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Requirements classification, NLP, Ambiguity.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.loxgdxubx45b" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Heterogenous Graph Network for Explainable Medical Event Identification</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Fengyi Li and Hang Wang, Ant Group, Shanghai, China</span></p><p id="h.8kib0n9mb1at" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The explainability is critical for claim-decision making in medical insurance scenario, which is a challenging task due to various disease. To remedy this dilemma, this paper sets forth a novel architecture of claim-decision making via exploring the historical medical clues and identifying explainable medical events. Specifically, we introduce a heterogeneous graph construction to nourish the information of medical events and conduct text classification. Then, a knowledge fusion mechanism consumes the dynamic relation between events to eliminate the error-prone impact of graph neural networks, and a new witness loss facilitates the consistency between the distributions over the values of graph edges. Our proposed method is evaluated on actual medical insurance claim cases. Improvements of 2.6% F1-Score for claim-decision making and 16.6% Jaccard Similarity Coefficient compared to other counterpart models indicate the superiority of our work.</span></p><p id="h.ux6t96arsh0v" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Hierarchical Heterogeneous Graph, Explainability, Text Classification.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.68owozs5tbvb" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Impact of Blind and Automatic Image Classification in Improving Text Detection Performance of the Craft Algorithm</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Clarisa V. Albarillo</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Proceso L. Fernandez Jr</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">College of Computer Science, Don Mariano Marcos Memorial State University, La Union, Philippines, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Information Systems & Computer Science, Ateneo de Manila University, Quezon City, Philippines</span></p><p id="h.xjvmklsk4sw" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Text detection in natural scenes has been a significant and active research subject in computer vision and document analysis because of its wide range of application as evident by the emergence of the Robust Reading Competition. One of the algorithms which has good text detection performance in the said competition is the Character Region Awareness for Text Detection (CRAFT). Employing the ICDAR 2013 dataset, this study investigates the impact of automatic image classification and blind deconvolution as image pre-processing steps to further enhance the text detection performance of CRAFT. The proposed technique automatically classifies the scene images into two categories, blurry and non-blurry, by utilizing of a Laplacian operator with 100 as threshold. Prior to applying the CRAFT algorithm, images that are categorized as blurry are further pre-processed using blind deconvolution to reduce the blur. The results revealed that the proposed method significantly enhanced the detection performance of CRAFT, as demonstrated by its h-mean of 94.47% compared to the original 91.42% h-mean of CRAFT and even outperformed the top-ranked SenseTime, whose h-mean is 93.62%.</span></p><p id="h.43bz5mgxdq7t" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Blind Deconvolution, Computer Vision, Image Classification, Information Retrieval, Image Processing.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.b4gg0b6bzo5e" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Dynamic aspect ratio</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Njal Borch 1 and Ingar Arntzen, NORCE Norwegian Research Institute</span></p><p id="h.nnstm357p7e2" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Media is to a large extent consumed on devices that have non-standard aspect ratios, both physically or while rendering content.For example, social media platforms often favour 1:1 ratios, TVs 16:9, iPad tablets 4:3 or 3:4, most Androids 16:9 or 9:16, PCs 16:9 or 16:10 and web pages tend to use responsive design and can therefore have almost any aspect ratio In order to ensure good experiences, it is often therefore necessary to create multiple versions of content, where the content is cropped to a more suitable format. Creating multiple encoded version of the content is a static solution though, and there are good reasons for solving this dynamically on the client side. In this paper we make the case for a client side dynamic aspect ratio solution, present work on implementation and experimentation, and finally provide some insights into how such a system could be implemented and provided in real world systems. Our solution was tested on a few different pieces of content from NRK, both drama series and typical TV debates.</span></p><p id="h.rchg8m267u1x" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Dynamic aspect ratio, Focus track, Multi-device, client side, AI analysis.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.y4h43n7u31e6" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>PRAGAN: Progressive Recurrent Attention Gan with Pre-Trained Vit Discriminator for Single Image Deraining</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Bingcai Wei, Liye Zhang and Di Wang</span></p><p id="h.n8okeu6gushq" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Images collected under bad weather conditions are not conducive to the development of visual tasks. To solve this problem, it is a trend to use a variety of neural networks. The ingenious integration of network structures can make full use of the powerful representation and fitting ability of deep learning to complete low-level visual tasks. In this study, we propose a generative adversarial network(GAN) containing multiple attention mechanisms for the image deraining task. Firstly, to our best knowledge, we propose a pre-trained vision transformer(ViT) which is used for the discriminator in our GAN for single image deraining. Secondly, we propose a neural network training method, which can use a small amount of data for training while maintaining promising results and reliable vision quality. A large number of experiments have proved the correctness and effectiveness of our methods.</span></p><p id="h.ufd65q8l0gre" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Deep Learning, Image deraining, Neural Network, Vision Transformer.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.9vsbrxz7elqq" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Deep Learning for the Classification of the Injunction in French Oral Utterances</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Asma BOUGRINE, Philippe RAVIER, Abdenour HACINE-GHARBI and Hanane OUACHOUR, Prisme Laboratory, University of Orleans, 12 rue de Blois, 45067 Orléans, France</span></p><p id="h.o3mdrfo3l6ga" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The classification of the injunction in French oral speech is a difficult task. The logarithmic energy can be a good indicator and our goal is to validate the predominance of this prosodic feature using SVM and K-NN. When applied to our first dataset, this feature indeed showed the best classification rates (CR) of 82% for SVM and 71.42% for K-NN. However, the energy was not the relevant feature when applied to our second, heterogeneous and wild, dataset. In order to improve the classification rates, we applied LSTM and CNN networks. With LSTM, a CR=96.15% was found using 6 prosodic features with the first dataset against 64% with the second dataset. The CNN, a network capable of automatically extracting the most relevant features, gave a better result on the second dataset with CR=73% largely exceeding SVM, K-NN and LSTM.</span></p><p id="h.fu79seb40mty" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Injunction classification, prosodic features, CNN, LSTM, wild oral dataset.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.3baya123u56" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Fruit Type Classification using Deep Learning and Feature Fusion</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Harmandeep Singh Gill</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Baljit Singh Khehra</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Principal, Mata Gujri Khalsa College(Jalandhar) Punjab-144801, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Principal, BAM Khalsa College, Garhshankar (Hoshiarpur) Punjab</span></p><p id="h.z6xjvahk4mub" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Machine and deep learning applications play a dominant role in the current scenario in the agriculture sector. To date, the classification of fruits using image features has attained the researcher’s attraction very much from the last few years. Fruit recognition and classification is an ill-posed problem due to the heterogeneous nature of fruits. In the proposed work, Convolution neural network (CNN), Recurrent Neural Network (RNN), and Long-short Term Memory (LSTM) deep learning methods are used to extract the optimal image features, and to select features after extraction, and finally, use extracted image features to classify the fruits. To evaluate the performance of the proposed approach, the Support vector machine (SVM) unsupervised learning method, Artificial neuro-fuzzy inference system (ANFIS), and Feed-forward neural network (FFNN) classification results are compared, and observed that the proposed fruit classification approach results are quite efficient and promising.</span></p><p id="h.g22biky03wex" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Image classification, Feature Extraction, Deep Learning, Feature Fusion, CNN, RNN, LSTM.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.938orp5ngbn" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Head Detection in Top-View Fisheye Images using Radius-Aware Loss Function</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Xuan Wei and Xiaobo Lu, School of Automation, Southeast University, Nanjing 210096, P.R China</span></p><p id="h.sb1y6cr2lcwu" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">People detection is commonly used in computer vision systems, particularly for video surveillance and passenger flow statistics. Unlike standard cameras, fisheye cameras offer a large field of view and reduce occlusions when mounted overhead. However, due to the orientation variation of people in fisheye images, head detection models suffer from severe distortion when applied to fisheye images captured by top-view fisheye cameras. In this work, we develop an end-to-end head detection method against complex situations in top-view fisheye images. The radius-aware loss function is designed, and the channel attention module is added to the model to make our detection method adapt to the fisheye distortion. For evaluation, we collected and annotated new fisheye-image datasets.</span></p><p id="h.oy27407wjtyn" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Head Detection, Fisheye Images, Neural Network.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.tg05er9hlv9d" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Research on the Image Segmentation by Watershed Transforms</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Sidi Ahmed Taouli, Department of Biomedical Genius, Faculty of technology, University Aboubekr-Belkaid, Tlemcen, Algeria.</span></p><p id="h.n59ziisvzgns" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The processing of medical images play an important role in diagnosis and detection of different pathologies. The mathematical morphology is very suitable for the pretreatment and segmentation of medical images, which present rich information content. In this work we presented a segmentation paradigm by Watershed preceded by a filtering to eliminate insignificant minima, a marking to remove unmarked minima, and finally we implemented a hierarchical segmentation using the mosaic image of the original image. In principle, watershed segmentation depends on ridges to perform a proper segmentation, a property that is often fulfilled in contour detection where the boundaries of the objects are expressed as ridges. Watershed is normally implemented by region growing, based on a set of markers to avoid over segmentation. The diversity of segmentation offers us several ways to segment the image, always we must look for the right method to get good results.</span></p><p id="h.2klba0i5v6ul" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Image, Filtering, Mathematical Morphology, Watershed, Segmentation.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.engkw5g9xu41" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>A Soft System Methodology Approach to Stakeholder Engagement in Water Sensitive Urban</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Lina Ntomene Lukusa and Ulrike Rivett</span></p><p id="h.kwl392bgkiv4" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Poor water management can increase the extreme pressure already faced by water scarcity. Water quality and quantity will continue to degrade unless water management is addressed holistically. A holistic approach to water management named Water Sensitive Urban Design (WSUD) has thus been created to facilitate the effective management of water. Traditionally, water management has employed a linear design approach, while WSUD requires a systematic, cyclical approach. In simple terms, WSUD assumes that everything is connected. Hence, it is critical for different stakeholders involved in WSUD to engage and reach a consensus on a solution. However, many stakeholders in WSUD have conflicting interests. Using the soft system methodology (SSM), developed by Peter Checkland, as a problem-solving method, decision-makers can understand this problematic situation from different world views. The SSM addresses ill and complex challenging situations involving human activities in a complex structured scenario. This paper demo nstrates how SSM can be applied to understand the complexity of stakeholder engagement in WSUD. The paper concludes that SSM is an adequate solution to understand a complex problem better and propose efficient solutions.</span></p><p id="h.wfs369we6au9" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Co-design, ICT Platform, Soft Systems Methodology, Water Sensitive Urban Design.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.ny0ap9m12nui" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>A Soft System Methodology Approach to Stakeholder Engagement in Water Sensitive Urban</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Lina Ntomene Lukusa and Ulrike Rivett</span></p><p id="h.nfzfzagsw774" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Poor water management can increase the extreme pressure already faced by water scarcity. Water quality and quantity will continue to degrade unless water management is addressed holistically. A holistic approach to water management named Water Sensitive Urban Design (WSUD) has thus been created to facilitate the effective management of water. Traditionally, water management has employed a linear design approach, while WSUD requires a systematic, cyclical approach. In simple terms, WSUD assumes that everything is connected. Hence, it is critical for different stakeholders involved in WSUD to engage and reach a consensus on a solution. However, many stakeholders in WSUD have conflicting interests. Using the soft system methodology (SSM), developed by Peter Checkland, as a problem-solving method, decision-makers can understand this problematic situation from different world views. The SSM addresses ill and complex challenging situations involving human activities in a complex structured scenario. This paper demo nstrates how SSM can be applied to understand the complexity of stakeholder engagement in WSUD. The paper concludes that SSM is an adequate solution to understand a complex problem better and propose efficient solutions.</span></p><p id="h.k38ixvpm57c" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Co-design, ICT Platform, Soft Systems Methodology, Water Sensitive Urban Design.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.49c3iablklpe" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Indigenous Connection to Virtual Environments using Tribal Language Object Recognition as a Fuzzy Inference System</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Kevin Shedlock and Thomas Chandler, School of Computer Science and Engineering, Victoria University of Wellington, New Zealand</span></p><p id="h.lf4a23b8i9xd" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">For indigenous communities to engage with technology like virtual reality, being able to feel traditionally located inside the application is critical. Interactions with familiar language objects and methods in the real world need to correspond with experiences in the virtual world. Any deviation from tradition may result in a disconnection from the application. We investigate a method for embedding indigenous language during the processing of the virtual reality artefact. We argue that a virtual reality environment with strong tribal language and symbolic connections is important to alleviate any disconnect by tribal users of virtual reality. We ask; does the use of indigenous language objects improve the virtual reality experience? To explore this question, we quizzed indigenous Māori participants of Aotearoa, New Zealand. Then, implemented an indigenously organized process for evaluating a fuzzy inference system and report the results to better-understand the importance of language during the construction of virtual reality.</span></p><p id="h.l6v52mrzx10a" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Indigenous Research, IT Artefact, Virtual Reality, Indigenous Language, Soft Computing.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.ajb1gm4mu67y" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Explainable AI and Interpretable Model for Insurance Premium Prediction</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Umar Isa A and Anil Fernando, Department of Computer and Information Science, University of Strathclyde, UK</span></p><p id="h.y1g0wz2cw50" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Traditional machine learning metrics such as precision and recall, accuracy, MSE, RMSE, etc. are not enough for a practitioner to be confident about the performance and dependability of their models. When unrelated features are included in the prediction process, machine learning models with greater than 99% accuracy score may fail to recognize particular classes from the dataset. We need to explain the model to domain specialists who require a human-understandable explanation in addition to ML professionals to establish trust in the prediction of our model. This is achieved by creating a model-independent, locally accurate explanation set that makes the primary models conclusions understandable to anyone, including those who are not experts in the insurance industry. For effective human interaction with machine learning systems, this model interpretability is crucial. In addition to completing and supporting set validations in model selection, it is crucial to explain individual predictions in order to gauge trust. In this study, we proposed the use of LIME and SHAP approaches to understand and explain a model developed using random forest regression to predict insurance premiums. The SHAP algorithms drawback, as seen in our experiments, is its lengthy computing time—in order to produce the findings, it must compute every possible combination. In addition, the experiments conducted were intended to focus on the models interpretability and explainability using LIME and SHAP, not the insurance premium charge prediction. Two experiments were conducted. Experiment one was conducted to interpret the random forest regression model using LIME techniques. In experiment two, we used the SHAP technique to interpret the model.</span></p><p id="h.9qdymnke2com" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">LIME, SHAP, Explainable AI, Random Forest, Machine Learning, Insurance premium.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.ibkb588njmma" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Evaluation of Machines Learning Algorithms in Detection and Mitigation of DNS-Based Phishing Attacks</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Kambey L. Kisambu</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Eng. Gilberto Makero</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Msc, Cyber Security, Department of Computer Science, University of Dodoma, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Network Engineer, e-Government Authority of Tanzania</span></p><p id="h.refi9vritjd7" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">DNS-based phishing attacks are among the major threats to Internet users that are difficult to defend against because they do not appear to be malicious in nature.Users have been primarytargets for these attacks that aim to steal sensitive information. The DNS protocol is one ofthe approaches that adversaries use to transfer stolen data outside the institutions networkusing various forms of DNS tunneling attacks. This study deals with evaluation of ML algorithms indetection of DNS-based phishing attacks for securing networks.It deeply evaluate the efficacy of the algorithms when integrated with other solution. The main classifiers used such as SVM, KNN, Logistic Regression and Naïve Bayes were evaluated using performance metrics namelyaccuracy, precision, recall and f-score. Based on the findings, the study proposed improvement for securing systems and networks against DNS-based phishingattacks using the best performing ML algorithm to keep pace with attacks evolution.</span></p><p id="h.5try8l673uew" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Malware, DNS-based, Phishing attacks, Machine learning, algorithms, DNS filters, DNS Poisoning, Detection, Mitigation techniques.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.e3uue7i5n1kn" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Radio Map Construction based on BERT for Fingerprint-based Indoor Positioning System</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Zhuang Wang, Liye Zhang, Qun Kong, Cong Liu, Aikui Tian, Computer Science and Technology, Shandong University of Technology, Zibo, China</span></p><p id="h.903btnka0slk" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Due to the heavy workload of RSS collection and the complex indoor environment, the WLAN signal is easy to disappear. Therefore, it is time-consuming and laborious to build a WLAN fingerprint indoor positioning system based on Radio Map. To quickly deploy an indoor WLAN positioning system, the Bidirectional Encoder Representation from Transformers (BERT) model is used to populate the missing signal in the Radio Map to quickly build the Radio Map. Because the number of input data in the BERT model cannot exceed 512 words. Therefore, we redefine the model structure based on the original BERT model and fill the missing signals in the radio map in parallel. In addition, the sum of each fragment loss function is defined as the total loss function. The experimental results show that using the improved BERT model to fill the missing signals in Radio Map has higher accuracy and shorter time.</span></p><p id="h.p5mdsrqanhts" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Indoor Positioning System, WLAN, Radio Map, BERT Model, Loss Function.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.twmt43w94ufw" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Decreasing Noise Effect on the MIMO OFDM Channel Estimation based on Information Theoretic Learning</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Mohammad Reza pourmir</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Reza Monsefi</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Ghosheh Abed Hodtani</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Computer Department, Engineering Faculty Ferdowsi University of Mashhad (FUM), </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Electrical engineering Department, Engineering Faculty Ferdowsi University of Mashhad (FUM)</span></p><p id="h.1uro5qcfqdr2" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">In this paper, we introduce a new method for multiple-input multiple-output (MIMO) OFDM channel estimation based on deep learning using the information theoretic learning criteria, as a development of previous works, leading to the low complexity in estimating non-linear and time varying channels. A deep network, applicable as auto-encoder, with complex weights is introduced and the information-theoretic optimization criteria are exploited to obtain optimal weights showing that the proposed method decreases effect of noise on model parameters.</span></p><p id="h.ngv7i98zm1p0" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Deep Learning, MIMO OFDM Channel Estimation, Information-theoretic Criteria.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.3fii8d5538ij" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Present and the Future of Artificial Intelligence in Software Testing</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Nishtha Jaiswal</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Yatheshtha</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Rishi Prakash</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Paras Nath Barwal</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">4</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of E-Governance, CDAC, Noida, U.P., India, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of CSE, Banasthali Vidhyapith, Jaipur, Rajasthan, India, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of E-Governance, C-DAC, Noida, U.P., India, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">4</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of E-Governance, C-DAC, Noida, U.P., India</span></p><p id="h.3iqom078wdr7" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Software Testing is an important phase in software development where the software is tested against certain parameters in order to assure its quality before reaching the end user. Testing itself involves a lot of stages. There are various testing software tools like Selenium, TestComplete, LambdaTest, etc. which have made this task easier by automating the test running and error reporting process. But, generating test cases, deciding which test case to run, repairing the tests, and acting on reported failure still remains repetitive and time-consuming tasks for testers and developers. Luckily, in a world that is rapidly getting automated through Artificial Intelligence (AI), testing has not remained untouched. The involvement of AI has automated many important activities in the testing cycle like modeling, test generation, and failure detection. This has brought decision-making power to the testing tools. With the help of regressive algorithms, visual testing and self-healing systems are now a reality. Their use is expected to grow in the near future, completely automating the testing process. The purpose of this paper is, therefore, to analyze the advantages AI has brought to software testing so far, the benefit of using an AI-enabled automated testing framework, and how this technology can evolve in the near future.</span></p><p id="h.5kolkp6izxct" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Software Testing, Selenium, Artificial Intelligence, Automated Testing, Self-Healing Systems.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.9p9couor4mso" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>SmartCarbs: An Intelligent Mobile Application to Assist Diet Control using Artificial Intelligence and Computer Vision</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Olivia Lee</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Yu Sun</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Portola High School, 1001 Cadence, Irvine, CA 92618, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">California State Polytechnic University, Pomona, CA, 91768, Irvine, CA 92</span></p><p id="h.fq4czejrk1kh" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">In today’s society, Type 2 diabetes is a prevalent disease that af ects hundreds of millions of people worldwide [11]. However, many people are unaware that they are diabetic or pre-diabetic, so they do not have access to the information to make better-informed nutritional choices that will suit their personal needs. In this paper, we designed an application that uses image classification to provide an estimate of the nutritional content of the selected food [12]. We applied our application to identify and list the nutritional content of multiple dif erent foods, then conducted a qualitative evaluation of the approach. The results show that this application will facilitate healthy eating and allow users to utilize the image classification predictions to make diabetes-friendly.</span></p><p id="h.ij4k06g4vruo" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Machine Learning, Nutrition, Flutter, Image Classification.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.pcyx6jq8lkfy" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>An Object Detection and its Educational Effect on Aspiring Film Directors using Computer Vision and Machine Learning</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Fengrong Han</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Ang Li</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Veritas Collegiate Academy VA, 5225 Backlick Road Springfield,VA 22151, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">California State University, Long Beach, 1250 Bellflower Blvd, Long Beach, CA 90840</span></p><p id="h.y5rt1d6yck3g" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The issue that is being addressed is how those who wish to be involved in film production in the future can be more educated regarding what goes into making a movie. An application was created to help these people keep track of movie props and characters and where they are placed throughout a movie. This is performed by using videos or images as input for the application, and the application uses an object detection model from MediaPipe to determine what types of objects are in a scene and where exactly they are located in the scene [4]. Then, bounding boxes are placed around the objects with the classification of the object next to each bounding box, and the result is returned. The results of the experiment indicate that the application is consistent at detecting objects when the object is fully shown on the screen. However, when the application used the cropped images as input as opposed to the full version of the image, the application was not consistently accurate, as it was not able to recognize objects that were partially cut out of the picture. More work will be needed to improve both the object detection capabilities of partially obstructed objects and the overall user interface of the application [5].</span></p><p id="h.lykt46fn249n" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Movies, Object Detection, MediaPipe, Application.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.ixq2cajntdx8" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; vertical-align: baseline;"><strong>Fusion of Motion And Electromyographic Signals for Robust Hand Gesture Recognition Based on Heterogeneous Networks</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Yongxiang Zou</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Long Cheng</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Lijun Han</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Zhengwei Li</span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">School of Artificial Intelligence, University of Chinese Academy of Sciences, Beijing 100049, China, </span><sup style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">State Key Laboratory of Management and Control for Complex Systems, Institute of Automation, Chinese Academy of Sciences, Beijing 100190, China.</span></p><p id="h.gy2pfntgwx60" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Hand gesture recognition has become a vital subject in the fields of human-computer interaction and rehabilitation assessment. This paper presented a multi-modal fusion system named MFHG, which used two heterogeneous networks to extract and fuse the deep features of the motion signals and the surface electromyography (sEMG) signals, respectively. To extract the features of the motion signals, a graph neural network named the cumulative graph attention (CGAT) model was first proposed to characterize the prior knowledge of motion coupling between finger joints. The CGAT model used the cumulative mechanism to fuse the early and late extracted features to improve motion-based hand gesture recognition. In addition, a convolutional neural network (CNN) model named TFHG was proposed to extract both the time-domain and frequency-domain information of the sEMG signals. To improve the performance of hand gesture recognition, the deep features from multiple modes were merged with the average layer, then the regularization items containing center loss and the mutual information loss were employed to enhance the robustness of this multi-modal system. Finally, a data set was built, which contained the multi-modal signals from seven subjects on different days, to verify the performance of the multi-modal system. The experimental results indicated that the MFHG can reach 99.96% and 92.46% accuracy on hand gesture recognition in the case of within-session and 92.46% cross-day, respectively.</span></p><p id="h.9w7ys5p37i7f" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 12pt; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Leap Motion, sEMG, Multi-modal, Graph Neural Network, Feature Regularization.</span></p></div></div></div></div></div></div></div></div></div></section><section id="h.33e574b64c58fbdc_227" class="yaqOZd cJgDec nyKByd" style=""><div class="IFuOkc" style="background-size: cover; background-position: center center; background-image: url(https://lh5.googleusercontent.com/2ar2xfnwJjr9D7Yv6eaSAO4snEY0YnHEg_5l0Vy0_gUSlZl_uOCd-He1bgku8-INDCDUPZ-Nl2eFFCZUmoRrvYY=w16383);"></div><div class="mYVXT"><div class="LS81yb VICjCf j5pSsc db35Fc" tabindex="-1"><div class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd purZT-AhqUyc-II5mzb ZcASvf-AhqUyc-II5mzb pSzOP-AhqUyc-qWD73c Ktthjf-AhqUyc-qWD73c JNdkSc SQVYQc"><div class="JNdkSc-SmKAyb LkDMRd"><div class="" jscontroller="sGwD4d" jsaction="zXBUYb:zTPCnb;zQF9Uc:Qxe3nd;" jsname="F57UId"><div class="oKdM2c ZZyype Kzv0Me"><div id="h.33e574b64c58fbdc_224" class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd jXK9ad D2fZ2 zu5uec OjCsFc dmUFtb wHaque g5GTcb JYTMs"><div class="jXK9ad-SmKAyb"><div class="tyJCtd mGzaTb Depvyb baZpAe"><p dir="ltr" class="CDt4Ke zfr3Q" style="line-height: 1.2;"><span style="color: #ffffff; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-variant: normal; vertical-align: baseline;"><strong>Contact Us </strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="line-height: 1.2;"><span style="color: #ffffff; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-variant: normal; font-weight: normal; vertical-align: baseline;"> </span><span style="color: #ffffff; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-variant: normal; font-weight: normal; text-decoration: underline; vertical-align: baseline;"><a class="XqQF9c rXJpyf" href="mailto:spprconf@yahoo.com" target="_blank">spprconf@yahoo.com</a></span><span style="color: #ffffff; font-family: 'Times New Roman', 'Arial'; font-size: 12pt; font-variant: normal; font-weight: normal; vertical-align: baseline;"> </span></p></div></div></div></div></div></div></div></div></div></section></div><div class="Xpil1b xgQ6eb"></div><footer jsname="yePe5c"><section id="h.33e574b64c58fbdc_48" class="yaqOZd cJgDec nyKByd" style=""><div class="IFuOkc" style="background-size: cover; background-position: center center; background-image: url(https://lh4.googleusercontent.com/7zqA0UkGJMBB-FzPkFFe-hcS97J7KTEsMZ5dO0KZKsKU1lwBs9PmH78xM8_6-QQbIRvx2whaiuSl_Zhoij3BEJ8=w16383);"></div><div class="mYVXT"><div class="LS81yb VICjCf j5pSsc db35Fc" tabindex="-1"><div class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd purZT-AhqUyc-II5mzb ZcASvf-AhqUyc-II5mzb pSzOP-AhqUyc-qWD73c Ktthjf-AhqUyc-qWD73c JNdkSc SQVYQc"><div class="JNdkSc-SmKAyb LkDMRd"><div class="" jscontroller="sGwD4d" jsaction="zXBUYb:zTPCnb;zQF9Uc:Qxe3nd;" jsname="F57UId"><div class="oKdM2c ZZyype Kzv0Me"><div id="h.33e574b64c58fbdc_45" class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd jXK9ad D2fZ2 zu5uec OjCsFc dmUFtb wHaque g5GTcb JYTMs"><div class="jXK9ad-SmKAyb"><div class="tyJCtd mGzaTb Depvyb baZpAe"><small id="h.x5asltwnzvjc" dir="ltr" class="CDt4Ke zfr3Q TMjjoe" style="display: block; text-align: center;"><span style="color: #ffffff; font-family: 'Times New Roman', 'Arial'; font-size: 14pt; vertical-align: baseline;"><strong>Copyright © SPPR 2022 </strong></span></small></div></div></div></div></div></div></div></div></div></section></footer><div jscontroller="j1RDQb" jsaction="rcuQ6b:rcuQ6b;MxH79b:JdcaS;FaOgy:XuHpsb;" class="dZA9kd ynRLnc" data-last-updated-at-time="1671254427553" data-is-preview="false"><div role="button" class="U26fgb JRtysb WzwrXb I12f0b K2mXPb zXBiaf ynRLnc" jscontroller="iSvg6e" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;;keydown:I481le;" jsshadow jsname="Bg3gkf" aria-label="Site actions" aria-disabled="false" tabindex="0" aria-haspopup="true" aria-expanded="false" data-menu-corner="bottom-start" data-anchor-corner="top-start"><div class="NWlf3e MbhUzd" jsname="ksKsZd"></div><span jsslot class="MhXXcc oJeWuf"><span class="Lw7GHd snByac"><svg width="24" height="24" viewBox="0 0 24 24" focusable="false" class=" NMm5M"><path d="M11 17h2v-6h-2v6zm1-15C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 18c-4.41 0-8-3.59-8-8s3.59-8 8-8 8 3.59 8 8-3.59 8-8 8zM11 9h2V7h-2v2z"/></svg></span></span><div jsname="xl07Ob" style="display:none" aria-hidden="true"><div class="JPdR6b hVNH5c" jscontroller="uY3Nvd" jsaction="IpSVtb:TvD9Pc;fEN2Ze:xzS4ub;frq95c:LNeFm;cFpp9e:J9oOtd; click:H8nU8b; mouseup:H8nU8b; keydown:I481le; keypress:Kr2w4b; blur:O22p3e; focus:H8nU8b" role="menu" tabindex="0" style="position:fixed"><div class="XvhY1d" jsaction="mousedown:p8EH2c; touchstart:p8EH2c;"><div class="JAPqpe K0NPx"><span jsslot class="z80M1 FeRvI" jsaction="click:o6ZaF(preventDefault=true); mousedown:lAhnzb; mouseup:Osgxgf; mouseenter:SKyDAe; mouseleave:xq3APb;touchstart:jJiBRc; touchmove:kZeBdd; touchend:VfAz8" jsname="j7LFlb" aria-label="Google Sites" role="menuitem" tabindex="-1"><div class="aBBjbd MbhUzd" jsname="ksKsZd"></div><div class="uyYuVb oJeWuf" jsaction="JIbuQc:Toy3n;" jsname="V2zOu"><div class="jO7h3c">Google Sites</div></div></span><span jsslot class="z80M1 FeRvI" jsaction="click:o6ZaF(preventDefault=true); mousedown:lAhnzb; mouseup:Osgxgf; mouseenter:SKyDAe; mouseleave:xq3APb;touchstart:jJiBRc; touchmove:kZeBdd; touchend:VfAz8" jsname="j7LFlb" data-disabled-tooltip="Report abuse is not available in preview mode" aria-label="Report abuse" role="menuitem" tabindex="-1"><div class="aBBjbd MbhUzd" jsname="ksKsZd"></div><div class="uyYuVb oJeWuf" jscontroller="HYv29e" jsaction="JIbuQc:dQ6O0c;" jsname="lV5oke" data-abuse-proto="%.@.null,null,"https://sites.google.com/view/sppr-2022/accepted-papers"]" data-abuse-reporting-widget-proto="%.@.null,"https://sites.google.com/view/sppr-2022/accepted-papers"]"><div class="jO7h3c">Report abuse</div></div></span><span jsslot class="z80M1 FeRvI" jsaction="click:o6ZaF(preventDefault=true); mousedown:lAhnzb; mouseup:Osgxgf; mouseenter:SKyDAe; mouseleave:xq3APb;touchstart:jJiBRc; touchmove:kZeBdd; touchend:VfAz8" jsname="j7LFlb" aria-label="Page details" role="menuitem" tabindex="-1"><div class="aBBjbd MbhUzd" jsname="ksKsZd"></div><div class="uyYuVb oJeWuf" jsaction="JIbuQc:hriXLd;" jsname="Rg8K2c"><div class="jO7h3c">Page details</div></div></span></div></div></div></div></div></div><div jscontroller="j1RDQb" jsaction="focusin:gBxDVb(srlkmf); focusout:zvXhGb(srlkmf); click:ro2KTd(psdQ5e);JIbuQc:DSypkd(Bg3gkf);MxH79b:JdcaS;rcuQ6b:rcuQ6b;" class="LqzjUe ynRLnc" data-last-updated-at-time="1671254427553" data-is-preview="false"><div jsname="psdQ5e" class="Q0cSn"></div><div jsname="bN97Pc" class="hBW7Hb"><div role="button" class="U26fgb mUbCce fKz7Od kpPxtd QMuaBc M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="Bg3gkf" aria-label="Site actions" aria-disabled="false" tabindex="-1" aria-hidden="true"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg width="24" height="24" viewBox="0 0 24 24" focusable="false" class=" NMm5M"><path d="M11 17h2v-6h-2v6zm1-15C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 18c-4.41 0-8-3.59-8-8s3.59-8 8-8 8 3.59 8 8-3.59 8-8 8zM11 9h2V7h-2v2z"/></svg></span></span></div><div jsname="srlkmf" class="hUphyc"><div class="YkaBSd"><div class="iBkmkf"><span>Page updated</span> <span jsname="CFIm1b" class="dji00c" jsaction="AHmuwe:eGiyHb; mouseover:eGiyHb;" tabindex="0" role="contentinfo"></span></div></div><div class="YkaBSd" jsaction="click:Toy3n;"><div role="button" class="U26fgb kpPxtd J7BuEb" jsshadow jsname="V2zOu" aria-disabled="false" tabindex="0">Google Sites</div></div><div class="YkaBSd" jscontroller="HYv29e" jsaction="click:dQ6O0c;" data-abuse-proto="%.@.null,null,"https://sites.google.com/view/sppr-2022/accepted-papers"]" data-abuse-reporting-widget-proto="%.@.null,"https://sites.google.com/view/sppr-2022/accepted-papers"]"><div role="button" class="U26fgb kpPxtd J7BuEb" jsshadow aria-label="Report abuse" aria-disabled="false" tabindex="0">Report abuse</div></div></div></div></div></div></div></div><script nonce="-R1CwJytIrdmm_Hp_93n9Q">DOCS_timing['cov']=new Date().getTime();</script><script src="https://www.gstatic.com/_/atari/_/js/k=atari.vw.en_US.fw_mAcuwUyE.O/am=MBiA/d=1/rs=AGEqA5lwNXFYaHUUDGYHiMqlOO36DqQAOw/m=view" id="base-js" nonce="-R1CwJytIrdmm_Hp_93n9Q"></script></div></div><div jscontroller="YV8yqd" jsaction="rcuQ6b:npT2md"></div></body></html>