CINXE.COM

Accepted Papers

<!DOCTYPE html><html lang="en-US" itemscope itemtype="http://schema.org/WebPage"><head><meta charset="utf-8"><script nonce="8-mTV4AAsQYTPM4ekq5k_g">var DOCS_timing={}; DOCS_timing['sl']=new Date().getTime();</script><script nonce="8-mTV4AAsQYTPM4ekq5k_g">function _DumpException(e) {throw e;}</script><script data-id="_gd" nonce="8-mTV4AAsQYTPM4ekq5k_g">window.WIZ_global_data = {"K1cgmc":"%.@.[null,null,null,[null,1,[1732638810,747810000]]]]","nQyAE":{}};</script><script nonce="8-mTV4AAsQYTPM4ekq5k_g">_docs_flag_initialData={"atari-emtpr":false,"atari-eibrm":false,"docs-text-elei":false,"docs-text-usc":true,"atari-bae":false,"docs-text-emtps":true,"docs-text-etsrdpn":false,"docs-text-etsrds":false,"docs-text-endes":false,"docs-text-escpv":true,"docs-text-ecfs":false,"docs-text-ecis":true,"docs-text-eectfs":true,"docs-text-edctzs":true,"docs-text-eetxpc":false,"docs-text-eetxp":false,"docs-text-ertkmcp":true,"docs-text-ettctvs":false,"docs-text-ettts":true,"docs-text-escoubs":false,"docs-text-escivs":false,"docs-text-escitrbs":false,"docs-text-ecgvd":false,"docs-text-esbbcts":true,"docs-text-etccdts":false,"docs-text-etcchrs":false,"docs-text-etctrs":false,"docs-text-eltbbs":true,"docs-text-ecvdis":false,"docs-text-elaiabbs":false,"docs-text-eiosmc":false,"docs-text-ecslpo":false,"docs-text-etb":false,"docs-text-esbefr":false,"docs-text-ipi":false,"docs-etshc":false,"docs-text-tbcb":2.0E7,"docs-efsmsdl":false,"docs-text-etof":false,"docs-text-ehlb":false,"docs-text-epa":true,"docs-text-dwit":false,"docs-text-elawp":false,"docs-eec":false,"docs-ecot":"","docs-text-enbcr":false,"docs-sup":"","umss":false,"docs-eldi":false,"docs-dli":false,"docs-liap":"/logImpressions","ilcm":{"eui":"AHKXmL3rCJO5gy2OktnAkY8PqdzotcJfY6OG75qxJPAGWUMggphVec715325Z7pDeHI97Ibv69A3","je":1,"sstu":1732666658241865,"si":"CMr6tqWe-4kDFcs0bwYd-rkGYQ","gsc":null,"ei":[5703839,5704621,5706832,5706836,5707711,5737784,5737800,5738513,5738529,5740798,5740814,5743108,5743124,5747265,5748013,5748029,5752678,5752694,5753313,5753329,5754213,5754229,5755080,5755096,5758807,5758823,5762243,5762259,5764252,5764268,5765535,5765551,5766761,5766777,5773662,5773678,5774331,5774347,5774836,5774852,5776501,5776517,5784931,5784947,5784951,5784967,5791766,5791782,5796457,5796473,14101306,14101502,14101510,14101534,49372435,49372443,49375314,49375322,49472063,49472071,49622823,49622831,49623173,49623181,49643568,49643576,49644015,49644023,49769337,49769345,49822921,49822929,49823164,49823172,49833462,49833470,49842855,49842863,49924706,49924714,50221720,50221728,50266222,50266230,50273528,50273536,50297076,50297084,50297426,50297434,50498907,50498915,50529103,50529111,50561333,50561341,50586962,50586970,70971256,70971264,71035517,71035525,71038255,71038263,71079938,71079946,71085241,71085249,71185170,71185178,71197826,71197834,71238946,71238954,71289146,71289154,71387889,71387897,71429507,71429515,71478200,71478208,71478589,71478597,71502841,71502849,71528597,71528605,71530083,71530091,71544834,71544842,71545513,71545521,71546425,71546433,71560069,71560077,71561541,71561549,71573870,71573878,71642103,71642111,71652840,71652848,71658040,71658048,71659813,71659821,71689860,71689868,71699841,71699849,71720760,71721087,71721095,71733073,71733081,71798420,71798436,71798440,71798456,71849655,71849663,71882106,71882114,71897827,71897835,71960540,71960548,71961126,71961134,94327661,94327669,94333153,94333161,94353368,94353376,94390153,94390161,94413607,94413615,94420737,94420745,94434257,94434265,94435578,94435586,94444292,94444300,94484634,94484642,94489858,94489866,94502654,94502662,94526768,94526776,94545004,94545012,94597639,94597647,94630911,94661802,94661810,94707424,94707432,94784571,94784579,94875009,94875017,94904089,94904097,94929210,94929218,94942490,94942498,95065889,95065897,95086191,95086199,95087186,95087194,95087227,95087235,95112873,95112881,95118551,95118559,95135933,95135941,95234185,95234871,95234879,95251262,95251270,95254920,95254928,95270945,95270953,95314802,95314810,95317975,99237681,99237689,99247596,99247604,99310979,99310987,99338440,99338448,99368792,99368800,99401881,99401889,99402331,99402339,99437441,99437449,99460069,100130662,100130678,101406734,101406742,101442805,101442813,101456452,101456460,101488823,101488831,101489187,101489195,101507186,101507194,101519280,101519288,101606928,101606936,101617516,101617524,101631040,101631048,101705087,101708583,101708591,101771970,101771978,101776366,101776374,101783430,101783446,101875084,101875092,102047762,102047770],"crc":0,"cvi":[]},"docs-ccdil":false,"docs-eil":true,"info_params":{},"buildLabel":"editors.sites-viewer-frontend_20241112.02_p1","docs-show_debug_info":false,"atari-jefp":"/_/view/jserror","docs-jern":"view","atari-rhpp":"/_/view","docs-ecuach":false,"docs-cclt":2033,"docs-ecci":true,"docs-esi":false,"docs-efypr":true,"docs-eyprp":true}; _docs_flag_cek= null ; if (window['DOCS_timing']) {DOCS_timing['ifdld']=new Date().getTime();}</script><meta name="viewport" content="width=device-width, initial-scale=1"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta name="referrer" content="origin"><link rel="icon" href="https://lh4.googleusercontent.com/WGjdlXTcMa-SyWyRPyTTb8uv3J6sK37mp-Rm32-iLgBj3n081z2qnwK4gArVMALKtxuO9lArO3YIt3PFkSF_evkRnoJU9cbtjqxM30e3mA4lsCE1"><meta property="og:title" content="Accepted Papers"><meta property="og:type" content="website"><meta property="og:url" content="https://sites.google.com/view/aimlnet-2022/accepted-papers"><meta property="og:description" content=" Accepted Papers"><meta itemprop="name" content="Accepted Papers"><meta itemprop="description" content=" Accepted Papers"><meta itemprop="url" content="https://sites.google.com/view/aimlnet-2022/accepted-papers"><meta itemprop="thumbnailUrl" content="https://lh6.googleusercontent.com/kfETP3k0EbtQQVwXD0apgaA7Kejmii_mIMheofjc3WjkuudjB3aOHpmVE2Q6Ia2v3nNInxZISBrv7662kvw0gsY=w16383"><meta itemprop="image" content="https://lh6.googleusercontent.com/kfETP3k0EbtQQVwXD0apgaA7Kejmii_mIMheofjc3WjkuudjB3aOHpmVE2Q6Ia2v3nNInxZISBrv7662kvw0gsY=w16383"><meta itemprop="imageUrl" content="https://lh6.googleusercontent.com/kfETP3k0EbtQQVwXD0apgaA7Kejmii_mIMheofjc3WjkuudjB3aOHpmVE2Q6Ia2v3nNInxZISBrv7662kvw0gsY=w16383"><meta property="og:image" content="https://lh6.googleusercontent.com/kfETP3k0EbtQQVwXD0apgaA7Kejmii_mIMheofjc3WjkuudjB3aOHpmVE2Q6Ia2v3nNInxZISBrv7662kvw0gsY=w16383"><link href="https://fonts.googleapis.com/css?family=Lato%3A300%2C300italic%2C400%2C400italic%2C700%2C700italic&display=swap" rel="stylesheet" nonce="4b2AbZSLtLCYjvGkxBLgpg"><link href="https://fonts.googleapis.com/css?family=Google+Sans:400,500|Roboto:300,400,500,700|Source+Code+Pro:400,700&display=swap" rel="stylesheet" nonce="4b2AbZSLtLCYjvGkxBLgpg"><style nonce="4b2AbZSLtLCYjvGkxBLgpg">@media only screen and (max-width: 479px){.jgG6ef{font-size: 17.0pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.jgG6ef{font-size: 17.0pt;}}@media only screen and (min-width: 768px) and (max-width: 1279px){.jgG6ef{font-size: 18.0pt;}}@media only screen and (min-width: 1280px){.jgG6ef{font-size: 18.0pt;}}</style><link rel="stylesheet" href="https://www.gstatic.com/_/atari/_/ss/k=atari.vw.7R57rYYVGiA.L.X.O/am=MBiA/d=1/rs=AGEqA5nH97dOwqY0iGblvGlWiaR59owncA" data-id="_cl" nonce="4b2AbZSLtLCYjvGkxBLgpg"><script nonce="8-mTV4AAsQYTPM4ekq5k_g"></script><title>Accepted Papers</title><style jsname="ptDGoc" nonce="4b2AbZSLtLCYjvGkxBLgpg">.M63kCb{background-color: rgba(255,255,255,1);}.OUGEr{color: rgba(33,33,33,1);}.duRjpb .OUGEr{color: rgba(34,110,147,1);}.JYVBee .OUGEr{color: rgba(34,110,147,1);}.OmQG5e .OUGEr{color: rgba(33,33,33,1);}.iwQgFb{background-color: rgba(0,0,0,0.150000006);}.ySLm4c{font-family: Lato, sans-serif;}.CbiMKe{background-color: rgba(30,108,147,1);}.qeLZfd .zfr3Q{color: rgba(33,33,33,1);}.qeLZfd .qnVSj{color: rgba(33,33,33,1);}.qeLZfd .Glwbz{color: rgba(33,33,33,1);}.qeLZfd .duRjpb{color: rgba(34,110,147,1);}.qeLZfd .qLrapd{color: rgba(34,110,147,1);}.qeLZfd .JYVBee{color: rgba(34,110,147,1);}.qeLZfd .aHM7ed{color: rgba(34,110,147,1);}.qeLZfd .OmQG5e{color: rgba(33,33,33,1);}.qeLZfd .NHD4Gf{color: rgba(33,33,33,1);}.qeLZfd .aw5Odc{color: rgba(0,101,128,1);}.qeLZfd .dhtgD:hover{color: rgba(0,0,0,1);}.qeLZfd .dhtgD:visited{color: rgba(0,101,128,1);}.qeLZfd .iwQgFb{background-color: rgba(0,0,0,0.150000006);}.qeLZfd .OUGEr{color: rgba(33,33,33,1);}.qeLZfd .duRjpb .OUGEr{color: rgba(34,110,147,1);}.qeLZfd .JYVBee .OUGEr{color: rgba(34,110,147,1);}.qeLZfd .OmQG5e .OUGEr{color: rgba(33,33,33,1);}.qeLZfd:before{background-color: rgba(242,242,242,1); display: block;}.lQAHbd .zfr3Q{color: rgba(255,255,255,1);}.lQAHbd .qnVSj{color: rgba(255,255,255,1);}.lQAHbd .Glwbz{color: rgba(255,255,255,1);}.lQAHbd .duRjpb{color: rgba(255,255,255,1);}.lQAHbd .qLrapd{color: rgba(255,255,255,1);}.lQAHbd .JYVBee{color: rgba(255,255,255,1);}.lQAHbd .aHM7ed{color: rgba(255,255,255,1);}.lQAHbd .OmQG5e{color: rgba(255,255,255,1);}.lQAHbd .NHD4Gf{color: rgba(255,255,255,1);}.lQAHbd .aw5Odc{color: rgba(255,255,255,1);}.lQAHbd .dhtgD:hover{color: rgba(255,255,255,1);}.lQAHbd .dhtgD:visited{color: rgba(255,255,255,1);}.lQAHbd .iwQgFb{background-color: rgba(255,255,255,0.150000006);}.lQAHbd .OUGEr{color: rgba(255,255,255,1);}.lQAHbd .duRjpb .OUGEr{color: rgba(255,255,255,1);}.lQAHbd .JYVBee .OUGEr{color: rgba(255,255,255,1);}.lQAHbd .OmQG5e .OUGEr{color: rgba(255,255,255,1);}.lQAHbd .CbiMKe{background-color: rgba(255,255,255,1);}.lQAHbd:before{background-color: rgba(30,108,147,1); display: block;}.cJgDec .zfr3Q{color: rgba(255,255,255,1);}.cJgDec .zfr3Q .OUGEr{color: rgba(255,255,255,1);}.cJgDec .qnVSj{color: rgba(255,255,255,1);}.cJgDec .Glwbz{color: rgba(255,255,255,1);}.cJgDec .qLrapd{color: rgba(255,255,255,1);}.cJgDec .aHM7ed{color: rgba(255,255,255,1);}.cJgDec .NHD4Gf{color: rgba(255,255,255,1);}.cJgDec .IFuOkc:before{background-color: rgba(33,33,33,1); opacity: 0; display: block;}.O13XJf{height: 340px; padding-bottom: 60px; padding-top: 60px;}.O13XJf .IFuOkc{background-color: rgba(34,110,147,1); background-image: url(https://ssl.gstatic.com/atari/images/simple-header-blended-small.png);}.O13XJf .IFuOkc:before{background-color: rgba(33,33,33,1); opacity: 0.4; display: block;}.O13XJf .zfr3Q{color: rgba(255,255,255,1);}.O13XJf .qnVSj{color: rgba(255,255,255,1);}.O13XJf .Glwbz{color: rgba(255,255,255,1);}.O13XJf .duRjpb{color: rgba(255,255,255,1);}.O13XJf .qLrapd{color: rgba(255,255,255,1);}.O13XJf .JYVBee{color: rgba(255,255,255,1);}.O13XJf .aHM7ed{color: rgba(255,255,255,1);}.O13XJf .OmQG5e{color: rgba(255,255,255,1);}.O13XJf .NHD4Gf{color: rgba(255,255,255,1);}.tpmmCb .zfr3Q{color: rgba(33,33,33,1);}.tpmmCb .zfr3Q .OUGEr{color: rgba(33,33,33,1);}.tpmmCb .qnVSj{color: rgba(33,33,33,1);}.tpmmCb .Glwbz{color: rgba(33,33,33,1);}.tpmmCb .qLrapd{color: rgba(33,33,33,1);}.tpmmCb .aHM7ed{color: rgba(33,33,33,1);}.tpmmCb .NHD4Gf{color: rgba(33,33,33,1);}.tpmmCb .IFuOkc:before{background-color: rgba(255,255,255,1); display: block;}.tpmmCb .Wew9ke{fill: rgba(33,33,33,1);}.aw5Odc{color: rgba(0,101,128,1);}.dhtgD:hover{color: rgba(0,122,147,1);}.dhtgD:active{color: rgba(0,122,147,1);}.dhtgD:visited{color: rgba(0,101,128,1);}.Zjiec{color: rgba(255,255,255,1); font-family: Lato, sans-serif; font-size: 19pt; font-weight: 300; letter-spacing: 1px; line-height: 1.3; padding-bottom: 62.5px; padding-left: 48px; padding-right: 36px; padding-top: 11.5px;}.XMyrgf{margin-top: 0px; margin-left: 48px; margin-bottom: 24px; margin-right: 24px;}.TlfmSc{color: rgba(255,255,255,1); font-family: Lato, sans-serif; font-size: 15pt; font-weight: 300; line-height: 1.333;}.Mz8gvb{color: rgba(255,255,255,1);}.zDUgLc{background-color: rgba(33,33,33,1);}.QTKDff.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}.YTv4We{color: rgba(178,178,178,1);}.YTv4We:hover:before{background-color: rgba(255,255,255,0.1199999973); display: block;}.YTv4We.chg4Jd:focus:before{border-color: rgba(255,255,255,0.3600000143); display: block;}.eWDljc{background-color: rgba(33,33,33,1);}.eWDljc .hDrhEe{padding-left: 8px;}.ZXW7w{color: rgba(255,255,255,1); opacity: 0.26;}.PsKE7e{color: rgba(255,255,255,1); font-family: Lato, sans-serif; font-size: 12pt; font-weight: 300;}.lhZOrc{color: rgba(73,170,212,1);}.hDrhEe:hover{color: rgba(73,170,212,1);}.M9vuGd{color: rgba(73,170,212,1); font-weight: 400;}.jgXgSe:hover{color: rgba(73,170,212,1);}.j10yRb:hover{color: rgba(0,188,212,1);}.j10yRb.chg4Jd:focus:before{border-color: rgba(255,255,255,0.3600000143); display: block;}.tCHXDc{color: rgba(255,255,255,1);}.iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}.wgxiMe{background-color: rgba(33,33,33,1);}.fOU46b .TlfmSc{color: rgba(255,255,255,1);}.fOU46b .KJll8d{background-color: rgba(255,255,255,1);}.fOU46b .Mz8gvb{color: rgba(255,255,255,1);}.fOU46b .Mz8gvb.chg4Jd:focus:before{border-color: rgba(255,255,255,1); display: block;}.fOU46b .qV4dIc{color: rgba(255,255,255,0.8700000048);}.fOU46b .jgXgSe:hover{color: rgba(255,255,255,1);}.fOU46b .M9vuGd{color: rgba(255,255,255,1);}.fOU46b .tCHXDc{color: rgba(255,255,255,0.8700000048);}.fOU46b .iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}.fOU46b .G8QRnc .Mz8gvb{color: rgba(0,0,0,0.8000000119);}.fOU46b .G8QRnc .Mz8gvb.chg4Jd:focus:before{border-color: rgba(0,0,0,0.8000000119); display: block;}.fOU46b .G8QRnc .ZXW7w{color: rgba(0,0,0,0.8000000119);}.fOU46b .G8QRnc .TlfmSc{color: rgba(0,0,0,0.8000000119);}.fOU46b .G8QRnc .KJll8d{background-color: rgba(0,0,0,0.8000000119);}.fOU46b .G8QRnc .qV4dIc{color: rgba(0,0,0,0.6399999857);}.fOU46b .G8QRnc .jgXgSe:hover{color: rgba(0,0,0,0.8199999928);}.fOU46b .G8QRnc .M9vuGd{color: rgba(0,0,0,0.8199999928);}.fOU46b .G8QRnc .tCHXDc{color: rgba(0,0,0,0.6399999857);}.fOU46b .G8QRnc .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.fOU46b .usN8rf .Mz8gvb{color: rgba(0,0,0,0.8000000119);}.fOU46b .usN8rf .Mz8gvb.chg4Jd:focus:before{border-color: rgba(0,0,0,0.8000000119); display: block;}.fOU46b .usN8rf .ZXW7w{color: rgba(0,0,0,0.8000000119);}.fOU46b .usN8rf .TlfmSc{color: rgba(0,0,0,0.8000000119);}.fOU46b .usN8rf .KJll8d{background-color: rgba(0,0,0,0.8000000119);}.fOU46b .usN8rf .qV4dIc{color: rgba(0,0,0,0.6399999857);}.fOU46b .usN8rf .jgXgSe:hover{color: rgba(0,0,0,0.8199999928);}.fOU46b .usN8rf .M9vuGd{color: rgba(0,0,0,0.8199999928);}.fOU46b .usN8rf .tCHXDc{color: rgba(0,0,0,0.6399999857);}.fOU46b .usN8rf .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.fOU46b .aCIEDd .qV4dIc{color: rgba(33,33,33,1);}.fOU46b .aCIEDd .TlfmSc{color: rgba(33,33,33,1);}.fOU46b .aCIEDd .KJll8d{background-color: rgba(33,33,33,1);}.fOU46b .aCIEDd .ZXW7w{color: rgba(33,33,33,1);}.fOU46b .aCIEDd .jgXgSe:hover{color: rgba(33,33,33,1); opacity: 0.82;}.fOU46b .aCIEDd .Mz8gvb{color: rgba(33,33,33,1);}.fOU46b .aCIEDd .tCHXDc{color: rgba(33,33,33,1);}.fOU46b .aCIEDd .iWs3gf.chg4Jd:focus{background-color: rgba(33,33,33,0.1199999973);}.fOU46b .a3ETed .qV4dIc{color: rgba(255,255,255,1);}.fOU46b .a3ETed .TlfmSc{color: rgba(255,255,255,1);}.fOU46b .a3ETed .KJll8d{background-color: rgba(255,255,255,1);}.fOU46b .a3ETed .ZXW7w{color: rgba(255,255,255,1);}.fOU46b .a3ETed .jgXgSe:hover{color: rgba(255,255,255,1); opacity: 0.82;}.fOU46b .a3ETed .Mz8gvb{color: rgba(255,255,255,1);}.fOU46b .a3ETed .tCHXDc{color: rgba(255,255,255,1);}.fOU46b .a3ETed .iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}@media only screen and (min-width: 1280px){.XeSM4.b2Iqye.fOU46b .LBrwzc .tCHXDc{color: rgba(255,255,255,0.8700000048);}}.XeSM4.b2Iqye.fOU46b .LBrwzc .iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}@media only screen and (min-width: 1280px){.KuNac.b2Iqye.fOU46b .tCHXDc{color: rgba(0,0,0,0.6399999857);}}.KuNac.b2Iqye.fOU46b .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.fOU46b .zDUgLc{opacity: 0;}.LBrwzc .ZXW7w{color: rgba(0,0,0,1);}.LBrwzc .KJll8d{background-color: rgba(0,0,0,1);}.GBy4H .ZXW7w{color: rgba(255,255,255,1);}.GBy4H .KJll8d{background-color: rgba(255,255,255,1);}.eBSUbc{background-color: rgba(33,33,33,1); color: rgba(0,188,212,0.6999999881);}.BFDQOb:hover{color: rgba(73,170,212,1);}.ImnMyf{background-color: rgba(255,255,255,1); color: rgba(33,33,33,1);}.Vs12Bd{background-color: rgba(242,242,242,1); color: rgba(33,33,33,1);}.S5d9Rd{background-color: rgba(30,108,147,1); color: rgba(255,255,255,1);}.zfr3Q{color: rgba(33,33,33,1); font-family: Lato, sans-serif; font-size: 11pt; font-weight: 400; line-height: 1.6667; margin-top: 12px;}.qnVSj{color: rgba(33,33,33,1);}.Glwbz{color: rgba(33,33,33,1);}.duRjpb{color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 34pt; font-weight: 300; letter-spacing: 0.5px; line-height: 1.2; margin-top: 30px;}.Ap4VC{margin-bottom: -30px;}.qLrapd{color: rgba(34,110,147,1);}.JYVBee{color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 19pt; font-weight: 400; line-height: 1.4; margin-top: 20px;}.CobnVe{margin-bottom: -20px;}.aHM7ed{color: rgba(34,110,147,1);}.OmQG5e{color: rgba(33,33,33,1); font-family: Lato, sans-serif; font-size: 15pt; font-style: normal; font-weight: 400; line-height: 1.25; margin-top: 16px;}.GV3q8e{margin-bottom: -16px;}.NHD4Gf{color: rgba(33,33,33,1);}.LB7kq .duRjpb{font-size: 64pt; letter-spacing: 2px; line-height: 1; margin-top: 40px;}.LB7kq .JYVBee{font-size: 25pt; font-weight: 300; line-height: 1.1; margin-top: 25px;}@media only screen and (max-width: 479px){.LB7kq .duRjpb{font-size: 40pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.LB7kq .duRjpb{font-size: 53pt;}}@media only screen and (max-width: 479px){.LB7kq .JYVBee{font-size: 19pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.LB7kq .JYVBee{font-size: 22pt;}}.O13XJf{height: 340px; padding-bottom: 60px; padding-top: 60px;}@media only screen and (min-width: 480px) and (max-width: 767px){.O13XJf{height: 280px; padding-bottom: 40px; padding-top: 40px;}}@media only screen and (max-width: 479px){.O13XJf{height: 250px; padding-bottom: 30px; padding-top: 30px;}}.SBrW1{height: 520px;}@media only screen and (min-width: 480px) and (max-width: 767px){.SBrW1{height: 520px;}}@media only screen and (max-width: 479px){.SBrW1{height: 400px;}}.Wew9ke{fill: rgba(255,255,255,1);}.gk8rDe{height: 180px; padding-bottom: 32px; padding-top: 60px;}.gk8rDe .zfr3Q{color: rgba(0,0,0,1);}.gk8rDe .duRjpb{color: rgba(34,110,147,1); font-size: 45pt; line-height: 1.1;}.gk8rDe .qLrapd{color: rgba(34,110,147,1);}.gk8rDe .JYVBee{color: rgba(34,110,147,1); font-size: 27pt; line-height: 1.35; margin-top: 15px;}.gk8rDe .aHM7ed{color: rgba(34,110,147,1);}.gk8rDe .OmQG5e{color: rgba(33,33,33,1);}.gk8rDe .NHD4Gf{color: rgba(33,33,33,1);}@media only screen and (max-width: 479px){.gk8rDe .duRjpb{font-size: 30pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.gk8rDe .duRjpb{font-size: 38pt;}}@media only screen and (max-width: 479px){.gk8rDe .JYVBee{font-size: 20pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.gk8rDe .JYVBee{font-size: 24pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.gk8rDe{padding-top: 45px;}}@media only screen and (max-width: 479px){.gk8rDe{padding-bottom: 0px; padding-top: 30px;}}.dhtgD{text-decoration: underline;}.JzO0Vc{background-color: rgba(33,33,33,1); font-family: Lato, sans-serif; width: 250px;}@media only screen and (min-width: 1280px){.JzO0Vc{padding-top: 48.5px;}}.TlfmSc{font-family: Lato, sans-serif; font-size: 15pt; font-weight: 300; line-height: 1.333;}.PsKE7e{font-family: Lato, sans-serif; font-size: 12pt;}.IKA38e{line-height: 1.21;}.hDrhEe{padding-bottom: 11.5px; padding-top: 11.5px;}.zDUgLc{opacity: 1;}.QmpIrf{background-color: rgba(30,108,147,1); border-color: rgba(255,255,255,1); color: rgba(255,255,255,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.xkUom{border-color: rgba(30,108,147,1); color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.xkUom:hover{background-color: rgba(30,108,147,0.1000000015);}.KjwKmc{color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal; line-height: normal;}.KjwKmc:hover{background-color: rgba(30,108,147,0.1000000015);}.lQAHbd .QmpIrf{background-color: rgba(255,255,255,1); border-color: rgba(34,110,147,1); color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.lQAHbd .xkUom{border-color: rgba(242,242,242,1); color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.lQAHbd .xkUom:hover{background-color: rgba(255,255,255,0.1000000015);}.lQAHbd .KjwKmc{color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.lQAHbd .KjwKmc:hover{background-color: rgba(255,255,255,0.1000000015);}.lQAHbd .Mt0nFe{border-color: rgba(255,255,255,0.200000003);}.cJgDec .QmpIrf{background-color: rgba(255,255,255,1); border-color: rgba(34,110,147,1); color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.cJgDec .xkUom{border-color: rgba(242,242,242,1); color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.cJgDec .xkUom:hover{background-color: rgba(255,255,255,0.1000000015);}.cJgDec .KjwKmc{color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.cJgDec .KjwKmc:hover{background-color: rgba(255,255,255,0.1000000015);}.tpmmCb .QmpIrf{background-color: rgba(255,255,255,1); border-color: rgba(34,110,147,1); color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.tpmmCb .xkUom{border-color: rgba(30,108,147,1); color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.tpmmCb .xkUom:hover{background-color: rgba(30,108,147,0.1000000015);}.tpmmCb .KjwKmc{color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.tpmmCb .KjwKmc:hover{background-color: rgba(30,108,147,0.1000000015);}.gk8rDe .QmpIrf{background-color: rgba(30,108,147,1); border-color: rgba(255,255,255,1); color: rgba(255,255,255,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.gk8rDe .xkUom{border-color: rgba(30,108,147,1); color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.gk8rDe .xkUom:hover{background-color: rgba(30,108,147,0.1000000015);}.gk8rDe .KjwKmc{color: rgba(30,108,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.gk8rDe .KjwKmc:hover{background-color: rgba(30,108,147,0.1000000015);}.O13XJf .QmpIrf{background-color: rgba(255,255,255,1); border-color: rgba(34,110,147,1); color: rgba(34,110,147,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.O13XJf .xkUom{border-color: rgba(242,242,242,1); color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.O13XJf .xkUom:hover{background-color: rgba(255,255,255,0.1000000015);}.O13XJf .KjwKmc{color: rgba(242,242,242,1); font-family: Lato, sans-serif; font-size: 11pt; line-height: normal;}.O13XJf .KjwKmc:hover{background-color: rgba(255,255,255,0.1000000015);}.Y4CpGd{font-family: Lato, sans-serif; font-size: 11pt;}.CMArNe{background-color: rgba(242,242,242,1);}.LBrwzc .TlfmSc{color: rgba(0,0,0,0.8000000119);}.LBrwzc .YTv4We{color: rgba(0,0,0,0.6399999857);}.LBrwzc .YTv4We.chg4Jd:focus:before{border-color: rgba(0,0,0,0.6399999857); display: block;}.LBrwzc .Mz8gvb{color: rgba(0,0,0,0.6399999857);}.LBrwzc .tCHXDc{color: rgba(0,0,0,0.6399999857);}.LBrwzc .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.LBrwzc .wgxiMe{background-color: rgba(255,255,255,1);}.LBrwzc .qV4dIc{color: rgba(0,0,0,0.6399999857);}.LBrwzc .M9vuGd{color: rgba(0,0,0,0.8000000119); font-weight: bold;}.LBrwzc .Zjiec{color: rgba(0,0,0,0.8000000119);}.LBrwzc .IKA38e{color: rgba(0,0,0,0.6399999857);}.LBrwzc .lhZOrc.IKA38e{color: rgba(0,0,0,0.8000000119); font-weight: bold;}.LBrwzc .j10yRb:hover{color: rgba(0,0,0,0.8000000119);}.LBrwzc .eBSUbc{color: rgba(0,0,0,0.8000000119);}.LBrwzc .hDrhEe:hover{color: rgba(0,0,0,0.8000000119);}.LBrwzc .jgXgSe:hover{color: rgba(0,0,0,0.8000000119);}.LBrwzc .M9vuGd:hover{color: rgba(0,0,0,0.8000000119);}.LBrwzc .zDUgLc{border-bottom-color: rgba(204,204,204,1); border-bottom-width: 1px; border-bottom-style: solid;}.fOU46b .LBrwzc .M9vuGd{color: rgba(0,0,0,0.8000000119);}.fOU46b .LBrwzc .jgXgSe:hover{color: rgba(0,0,0,0.8000000119);}.fOU46b .LBrwzc .zDUgLc{opacity: 1; border-bottom-style: none;}.fOU46b .LBrwzc .tCHXDc{color: rgba(0,0,0,0.6399999857);}.fOU46b .LBrwzc .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.fOU46b .GBy4H .M9vuGd{color: rgba(255,255,255,1);}.fOU46b .GBy4H .jgXgSe:hover{color: rgba(255,255,255,1);}.fOU46b .GBy4H .zDUgLc{opacity: 1;}.fOU46b .GBy4H .tCHXDc{color: rgba(255,255,255,0.8700000048);}.fOU46b .GBy4H .iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}.XeSM4.G9Qloe.fOU46b .LBrwzc .tCHXDc{color: rgba(0,0,0,0.6399999857);}.XeSM4.G9Qloe.fOU46b .LBrwzc .iWs3gf.chg4Jd:focus{background-color: rgba(0,0,0,0.1199999973);}.GBy4H .lhZOrc.IKA38e{color: rgba(255,255,255,1);}.GBy4H .eBSUbc{color: rgba(255,255,255,0.8700000048);}.GBy4H .hDrhEe:hover{color: rgba(255,255,255,1);}.GBy4H .j10yRb:hover{color: rgba(255,255,255,1);}.GBy4H .YTv4We{color: rgba(255,255,255,1);}.GBy4H .YTv4We.chg4Jd:focus:before{border-color: rgba(255,255,255,1); display: block;}.GBy4H .tCHXDc{color: rgba(255,255,255,0.8700000048);}.GBy4H .iWs3gf.chg4Jd:focus{background-color: rgba(255,255,255,0.1199999973);}.GBy4H .jgXgSe:hover{color: rgba(255,255,255,1);}.GBy4H .jgXgSe:hover{color: rgba(255,255,255,1);}.GBy4H .M9vuGd{color: rgba(255,255,255,1);}.GBy4H .M9vuGd:hover{color: rgba(255,255,255,1);}.QcmuFb{padding-left: 20px;}.vDPrib{padding-left: 40px;}.TBDXjd{padding-left: 60px;}.bYeK8e{padding-left: 80px;}.CuqSDe{padding-left: 100px;}.Havqpe{padding-left: 120px;}.JvDrRe{padding-left: 140px;}.o5lrIf{padding-left: 160px;}.yOJW7c{padding-left: 180px;}.rB8cye{padding-left: 200px;}.RuayVd{padding-right: 20px;}.YzcKX{padding-right: 40px;}.reTV0b{padding-right: 60px;}.vSYeUc{padding-right: 80px;}.PxtZIe{padding-right: 100px;}.ahQMed{padding-right: 120px;}.rzhcXb{padding-right: 140px;}.PBhj0b{padding-right: 160px;}.TlN46c{padding-right: 180px;}.GEdNnc{padding-right: 200px;}.TMjjoe{font-family: Lato, sans-serif; font-size: 9pt; line-height: 1.2; margin-top: 0px;}@media only screen and (min-width: 1280px){.yxgWrb{margin-left: 250px;}}@media only screen and (max-width: 479px){.Zjiec{font-size: 15pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.Zjiec{font-size: 17pt;}}@media only screen and (max-width: 479px){.TlfmSc{font-size: 13pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.TlfmSc{font-size: 14pt;}}@media only screen and (max-width: 479px){.PsKE7e{font-size: 12pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.PsKE7e{font-size: 12pt;}}@media only screen and (max-width: 479px){.duRjpb{font-size: 24pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.duRjpb{font-size: 29pt;}}@media only screen and (max-width: 479px){.JYVBee{font-size: 15pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.JYVBee{font-size: 17pt;}}@media only screen and (max-width: 479px){.OmQG5e{font-size: 13pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.OmQG5e{font-size: 14pt;}}@media only screen and (max-width: 479px){.TlfmSc{font-size: 13pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.TlfmSc{font-size: 14pt;}}@media only screen and (max-width: 479px){.PsKE7e{font-size: 12pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.PsKE7e{font-size: 12pt;}}@media only screen and (max-width: 479px){.TMjjoe{font-size: 9pt;}}@media only screen and (min-width: 480px) and (max-width: 767px){.TMjjoe{font-size: 9pt;}}section[id="h.9f2bec207002637_265"] .IFuOkc:before{opacity: 0.6;}section[id="h.9f2bec207002637_314"] .IFuOkc:before{opacity: 0.0;}section[id="h.9f2bec207002637_148"] .IFuOkc:before{opacity: 0.0;}</style><script nonce="8-mTV4AAsQYTPM4ekq5k_g">_at_config = [null,"AIzaSyChg3MFqzdi1P5J-YvEyakkSA1yU7HRcDI","897606708560-a63d8ia0t9dhtpdt4i3djab2m42see7o.apps.googleusercontent.com",null,null,null,null,null,null,null,null,null,null,null,"SITES_%s",null,null,null,null,null,null,null,null,null,["AHKXmL3rCJO5gy2OktnAkY8PqdzotcJfY6OG75qxJPAGWUMggphVec715325Z7pDeHI97Ibv69A3",1,"CMr6tqWe-4kDFcs0bwYd-rkGYQ",1732666658241865,[5703839,5704621,5706832,5706836,5707711,5737784,5737800,5738513,5738529,5740798,5740814,5743108,5743124,5747265,5748013,5748029,5752678,5752694,5753313,5753329,5754213,5754229,5755080,5755096,5758807,5758823,5762243,5762259,5764252,5764268,5765535,5765551,5766761,5766777,5773662,5773678,5774331,5774347,5774836,5774852,5776501,5776517,5784931,5784947,5784951,5784967,5791766,5791782,5796457,5796473,14101306,14101502,14101510,14101534,49372435,49372443,49375314,49375322,49472063,49472071,49622823,49622831,49623173,49623181,49643568,49643576,49644015,49644023,49769337,49769345,49822921,49822929,49823164,49823172,49833462,49833470,49842855,49842863,49924706,49924714,50221720,50221728,50266222,50266230,50273528,50273536,50297076,50297084,50297426,50297434,50498907,50498915,50529103,50529111,50561333,50561341,50586962,50586970,70971256,70971264,71035517,71035525,71038255,71038263,71079938,71079946,71085241,71085249,71185170,71185178,71197826,71197834,71238946,71238954,71289146,71289154,71387889,71387897,71429507,71429515,71478200,71478208,71478589,71478597,71502841,71502849,71528597,71528605,71530083,71530091,71544834,71544842,71545513,71545521,71546425,71546433,71560069,71560077,71561541,71561549,71573870,71573878,71642103,71642111,71652840,71652848,71658040,71658048,71659813,71659821,71689860,71689868,71699841,71699849,71720760,71721087,71721095,71733073,71733081,71798420,71798436,71798440,71798456,71849655,71849663,71882106,71882114,71897827,71897835,71960540,71960548,71961126,71961134,94327661,94327669,94333153,94333161,94353368,94353376,94390153,94390161,94413607,94413615,94420737,94420745,94434257,94434265,94435578,94435586,94444292,94444300,94484634,94484642,94489858,94489866,94502654,94502662,94526768,94526776,94545004,94545012,94597639,94597647,94630911,94661802,94661810,94707424,94707432,94784571,94784579,94875009,94875017,94904089,94904097,94929210,94929218,94942490,94942498,95065889,95065897,95086191,95086199,95087186,95087194,95087227,95087235,95112873,95112881,95118551,95118559,95135933,95135941,95234185,95234871,95234879,95251262,95251270,95254920,95254928,95270945,95270953,95314802,95314810,95317975,99237681,99237689,99247596,99247604,99310979,99310987,99338440,99338448,99368792,99368800,99401881,99401889,99402331,99402339,99437441,99437449,99460069,100130662,100130678,101406734,101406742,101442805,101442813,101456452,101456460,101488823,101488831,101489187,101489195,101507186,101507194,101519280,101519288,101606928,101606936,101617516,101617524,101631040,101631048,101705087,101708583,101708591,101771970,101771978,101776366,101776374,101783430,101783446,101875084,101875092,102047762,102047770]],null,null,null,null,0,null,null,null,null,null,null,null,null,null,"https://drive.google.com",null,null,null,null,null,null,null,null,null,0,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,"v2internal","https://docs.google.com",null,null,null,null,null,null,"https://sites.google.com/new/",null,null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,"",null,null,null,null,null,null,null,null,null,null,null,null,6,null,null,"https://accounts.google.com/o/oauth2/auth","https://accounts.google.com/o/oauth2/postmessageRelay",null,null,null,null,78,"https://sites.google.com/new/?usp\u003dviewer_footer",null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,"https://www.gstatic.com/atari/embeds/83a60601c213b72fb19c1855fb0c5f26/intermediate-frame-minified.html",0,null,"v2beta",null,null,null,null,null,null,4,"https://accounts.google.com/o/oauth2/iframe",null,null,null,null,null,null,"https://608726431-atari-embeds.googleusercontent.com/embeds/16cb204cf3a9d4d223a0a3fd8b0eec5d/inner-frame-minified.html",null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,"https://sites.google.com/view/aimlnet-2022/accepted-papers",null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,0,null,null,null,null,null,null,0,null,"",null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,null,null,[1732666658242,"editors.sites-viewer-frontend_20241112.02_p1","695977640",null,1,1,""],null,null,null,null,0,null,null,0,null,null,null,null,null,null,null,null,20,500,"https://domains.google.com",null,0,null,null,null,null,null,null,null,null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,1,0,1,0,0,0,0,null,null,null,null,null,"https://www.google.com/calendar/embed",null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,0,null,null,null,null,null,null,null,null,null,null,null,null,null,"PROD",0,null,0,null,1]; window.globals = {"enableAnalytics":true,"webPropertyId":"","showDebug":false,"hashedSiteId":"9c24f0a2fdca6269c34898aaffe978811c1ac53268301b818d79cf056e176574","normalizedPath":"view/aimlnet-2022/accepted-papers","pageTitle":"Accepted Papers"}; function gapiLoaded() {if (globals.gapiLoaded == undefined) {globals.gapiLoaded = true;} else {globals.gapiLoaded();}}window.messages = []; window.addEventListener && window.addEventListener('message', function(e) {if (window.messages && e.data && e.data.magic == 'SHIC') {window.messages.push(e);}});</script><script src="https://apis.google.com/js/client.js?onload=gapiLoaded" nonce="8-mTV4AAsQYTPM4ekq5k_g"></script><script nonce="8-mTV4AAsQYTPM4ekq5k_g">(function(){}).call(this); </script><script nonce="8-mTV4AAsQYTPM4ekq5k_g">const imageUrl = 'https:\/\/lh6.googleusercontent.com\/YANZwWKI_7BA1eLGYUcXoclrtIdOm3xmcfMsa_mbQfb7TN4znVkv1LzcTyDosTey2Y2L5my13A3GeUKg-pT1vlo\x3dw16383'; function bgImgLoaded() { if (!globals.headerBgImgLoaded) { globals.headerBgImgLoaded = new Date().getTime(); } else { globals.headerBgImgLoaded(); } } if (imageUrl) { const img = new Image(); img.src = imageUrl; img.onload = bgImgLoaded; globals.headerBgImgExists = true; } else { globals.headerBgImgExists = false; } </script></head><body dir="ltr" itemscope itemtype="http://schema.org/WebPage" id="yDmH0d" css="yDmH0d"><div jscontroller="pc62j" jsmodel="iTeaXe" jsaction="rcuQ6b:WYd;GvneHb:og1FDd;vbaUQc:uAM5ec;"><div id="docs-banner-container"><div id="docs-banners"><div id="HB1eCd-mzNpsf-r8s4j-ORHb"></div><div id="HB1eCd-TZk80d-r8s4j-ORHb" aria-live="assertive" aria-atomic="true"></div></div><div class="HB1eCd-Vkfede-NBtyUd-PvRhvb-LwH6nd"></div></div><div jscontroller="X4BaPc" jsaction="rcuQ6b:WYd;o6xM5b:Pg9eo;HuL2Hd:mHeCvf;VMhF5:FFYy5e;sk3Qmb:HI1Mdd;JIbuQc:rSzFEd(z2EeY),aSaF6e(ilzYPe);"><div jscontroller="o1L5Wb" data-sitename="aimlnet-2022" data-search-scope="1" data-universe="1" jsmodel="fNFZH" jsaction="Pe9H6d:cZFEp;WMZaJ:VsGN3;hJluRd:UADL7b;zuqEgd:HI9w0;tr6QDd:Y8aXB;MxH79b:xDkBfb;JIbuQc:SPXMTb(uxAMZ),LjG1Ed(a6mxbb);" jsname="G0jgYd"><div jsname="gYwusb" class="p9b27"></div><div jscontroller="RrXLpc" jsname="XeeWQc" role="banner" jsaction="keydown:uiKYid(OH0EC);rcuQ6b:WYd;zuqEgd:ufqpf;JIbuQc:XfTnxb(lfEfFf),AlTiYc(GeGHKb),AlTiYc(m1xNUe),zZlNMe(pZn8Oc);YqO5N:ELcyfe;"><div jsname="bF1uUb" class="BuY5Fd" jsaction="click:xVuwSc;"></div><div jsname="MVsrn" class="TbNlJb "><div role="button" class="U26fgb mUbCce fKz7Od h3nfre M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="GeGHKb" aria-label="Back to site" aria-disabled="false" tabindex="0" data-tooltip="Back to site" data-tooltip-vertical-offset="-12" data-tooltip-horizontal-offset="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="V4YR2c" viewBox="0 0 24 24" focusable="false"><path d="M0 0h24v24H0z" fill="none"/><path d="M20 11H7.83l5.59-5.59L12 4l-8 8 8 8 1.41-1.41L7.83 13H20v-2z"/></svg></span></span></div><div class="E2UJ5" jsname="M6JdT"><div class="rFrNMe b7AJhc zKHdkd" jscontroller="pxq3x" jsaction="clickonly:KjsqPd; focus:Jt1EX; blur:fpfTEe; input:Lg5SV" jsshadow jsname="OH0EC" aria-expanded="true"><div class="aCsJod oJeWuf"><div class="aXBtI I0VJ4d Wic03c"><span jsslot class="A37UZe qgcB3c iHd5yb"><div role="button" class="U26fgb mUbCce fKz7Od i3PoXe M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="lfEfFf" aria-label="Search" aria-disabled="false" tabindex="0" data-tooltip="Search" data-tooltip-vertical-offset="-12" data-tooltip-horizontal-offset="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="vu8Pwe" viewBox="0 0 24 24" focusable="false"><path d="M15.5 14h-.79l-.28-.27C15.41 12.59 16 11.11 16 9.5 16 5.91 13.09 3 9.5 3S3 5.91 3 9.5 5.91 16 9.5 16c1.61 0 3.09-.59 4.23-1.57l.27.28v.79l5 4.99L20.49 19l-4.99-5zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z"/><path d="M0 0h24v24H0z" fill="none"/></svg></span></span></div><div class="EmVfjc SKShhf" data-loadingmessage="Loading…" jscontroller="qAKInc" jsaction="animationend:kWijWc;dyRcpb:dyRcpb" jsname="aZ2wEe"><div class="Cg7hO" aria-live="assertive" jsname="vyyg5"></div><div jsname="Hxlbvc" class="xu46lf"><div class="ir3uv uWlRce co39ub"><div class="xq3j6 ERcjC"><div class="X6jHbb GOJTSe"></div></div><div class="HBnAAc"><div class="X6jHbb GOJTSe"></div></div><div class="xq3j6 dj3yTd"><div class="X6jHbb GOJTSe"></div></div></div><div class="ir3uv GFoASc Cn087"><div class="xq3j6 ERcjC"><div class="X6jHbb GOJTSe"></div></div><div class="HBnAAc"><div class="X6jHbb GOJTSe"></div></div><div class="xq3j6 dj3yTd"><div class="X6jHbb GOJTSe"></div></div></div><div class="ir3uv WpeOqd hfsr6b"><div class="xq3j6 ERcjC"><div class="X6jHbb GOJTSe"></div></div><div class="HBnAAc"><div class="X6jHbb GOJTSe"></div></div><div class="xq3j6 dj3yTd"><div class="X6jHbb GOJTSe"></div></div></div><div class="ir3uv rHV3jf EjXFBf"><div class="xq3j6 ERcjC"><div class="X6jHbb GOJTSe"></div></div><div class="HBnAAc"><div class="X6jHbb GOJTSe"></div></div><div class="xq3j6 dj3yTd"><div class="X6jHbb GOJTSe"></div></div></div></div></div><div role="button" class="U26fgb mUbCce fKz7Od JyJRXe M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="m1xNUe" aria-label="Back to site" aria-disabled="false" tabindex="0" data-tooltip="Back to site" data-tooltip-vertical-offset="-12" data-tooltip-horizontal-offset="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="V4YR2c" viewBox="0 0 24 24" focusable="false"><path d="M0 0h24v24H0z" fill="none"/><path d="M20 11H7.83l5.59-5.59L12 4l-8 8 8 8 1.41-1.41L7.83 13H20v-2z"/></svg></span></span></div></span><div class="Xb9hP"><input type="search" class="whsOnd zHQkBf" jsname="YPqjbf" autocomplete="off" tabindex="0" aria-label="Search this site" value="" aria-disabled="false" autofocus role="combobox" data-initial-value=""/><div jsname="LwH6nd" class="ndJi5d snByac" aria-hidden="true">Search this site</div></div><span jsslot class="A37UZe sxyYjd MQL3Ob"><div role="button" class="U26fgb mUbCce fKz7Od Kk06A M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="pZn8Oc" aria-label="Clear search" aria-disabled="false" tabindex="0" data-tooltip="Clear search" data-tooltip-vertical-offset="-12" data-tooltip-horizontal-offset="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="fAUEUd" viewBox="0 0 24 24" focusable="false"><path d="M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z"></path><path d="M0 0h24v24H0z" fill="none"></path></svg></span></span></div></span><div class="i9lrp mIZh1c"></div><div jsname="XmnwAc" class="OabDMe cXrdqd"></div></div></div><div class="LXRPh"><div jsname="ty6ygf" class="ovnfwe Is7Fhb"></div></div></div></div></div></div></div><div jsname="tiN4bf"><style nonce="4b2AbZSLtLCYjvGkxBLgpg">.rrJNTc{opacity: 0;}.bKy5e{pointer-events: none; position: absolute; top: 0;}</style><div class="bKy5e"><div class="rrJNTc" tabindex="-1"><div class="VfPpkd-dgl2Hf-ppHlrf-sM5MNb" data-is-touch-wrapper='true'><button class="VfPpkd-LgbsSe VfPpkd-LgbsSe-OWXEXe-dgl2Hf LjDxcd XhPA0b LQeN7 WsSUlf jz7fPb" jscontroller="soHxf" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc; touchcancel:JMtRjd; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;mlnRJb:fLiPzd;" data-idom-class="LjDxcd XhPA0b LQeN7 WsSUlf jz7fPb" jsname="z2EeY" tabindex="0"><div class="VfPpkd-Jh9lGc"></div><div class="VfPpkd-J1Ukfc-LhBDec"></div><div class="VfPpkd-RLmnJb"></div><span jsname="V67aGc" class="VfPpkd-vQzf8d">Skip to main content</span></button></div><div class="VfPpkd-dgl2Hf-ppHlrf-sM5MNb" data-is-touch-wrapper='true'><button class="VfPpkd-LgbsSe VfPpkd-LgbsSe-OWXEXe-dgl2Hf LjDxcd XhPA0b LQeN7 WsSUlf br90J" jscontroller="soHxf" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc; touchcancel:JMtRjd; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;mlnRJb:fLiPzd;" data-idom-class="LjDxcd XhPA0b LQeN7 WsSUlf br90J" jsname="ilzYPe" tabindex="0"><div class="VfPpkd-Jh9lGc"></div><div class="VfPpkd-J1Ukfc-LhBDec"></div><div class="VfPpkd-RLmnJb"></div><span jsname="V67aGc" class="VfPpkd-vQzf8d">Skip to navigation</span></button></div></div></div><div class="M63kCb N63NQ"></div><div class="QZ3zWd"><div class="fktJzd AKpWA fOU46b G9Qloe XeSM4 XxIgdb" jsname="UzWXSb" data-uses-custom-theme="false" data-legacy-theme-name="QualityBasics" data-legacy-theme-font-kit="Light" data-legacy-theme-color-kit="Blue" jscontroller="Md9ENb" jsaction="gsiSmd:Ffcznf;yj5fUd:cpPetb;HNXL3:q0Vyke;e2SXKd:IPDu5e;BdXpgd:nhk7K;rcuQ6b:WYd;"><header id="atIdViewHeader"><div class="BbxBP HP6J1d K5Zlne" jsname="WA9qLc" jscontroller="RQOkef" jsaction="rcuQ6b:JdcaS;MxH79b:JdcaS;VbOlFf:ywL4Jf;FaOgy:ywL4Jf; keydown:Hq2uPe; wheel:Ut4Ahc;" data-top-navigation="true" data-is-preview="false"><div class="DXsoRd YTv4We oNsfjf" role="button" tabindex="0" jsaction="click:LUvzV" jsname="z4Tpl" id="s9iPrd" aria-haspopup="true" aria-controls="yuynLe" aria-expanded="false"><svg class="wFCWne" viewBox="0 0 24 24" stroke="currentColor" jsname="B1n9ub" focusable="false"><g transform="translate(12,12)"><path class="hlJH0" d="M-9 -5 L9 -5" fill="none" stroke-width="2"/><path class="HBu6N" d="M-9 0 L9 0" fill="none" stroke-width="2"/><path class="cLAGQe" d="M-9 5 L9 5" fill="none" stroke-width="2"/></g></svg></div><nav class="JzO0Vc" jsname="ihoMLd" role="navigation" tabindex="-1" id="yuynLe" jsaction="transitionend:UD2r5"><a class="XMyrgf" href="/view/aimlnet-2022/home"><img src="https://lh6.googleusercontent.com/kfETP3k0EbtQQVwXD0apgaA7Kejmii_mIMheofjc3WjkuudjB3aOHpmVE2Q6Ia2v3nNInxZISBrv7662kvw0gsY=w16383" class="r9CsCb" role="img" aria-label="Site home"></a><ul class="jYxBte Fpy8Db" tabindex="-1"><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/home" data-url="/view/aimlnet-2022/home" data-type="1" data-level="1">Home</a></div></div></li><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/paper-submission" data-url="/view/aimlnet-2022/paper-submission" data-type="1" data-level="1">Paper Submission</a></div></div></li><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/program-committee" data-url="/view/aimlnet-2022/program-committee" data-type="1" data-level="1">Program Committee</a></div></div></li><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf lhZOrc" aria-current="true"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" aria-selected="true" href="/view/aimlnet-2022/accepted-papers" data-url="/view/aimlnet-2022/accepted-papers" data-type="1" data-level="1">Accepted Papers</a></div></div></li><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/venue" data-url="/view/aimlnet-2022/venue" data-type="1" data-level="1">Venue</a></div></div></li><li jsname="ibnC6b" data-nav-level="1"><div class="PsKE7e r8s4j-R6PoUb IKA38e baH5ib oNsfjf"><div class="I35ICb" jsaction="keydown:mPuKz(QwLHlb); click:vHQTA(QwLHlb);"><a class="aJHbb dk90Ob hDrhEe HlqNPb" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/contact" data-url="/view/aimlnet-2022/contact" data-type="1" data-level="1">Contact</a></div></div></li></ul></nav><div class="VLoccc K5Zlne QDWEj U8eYrb" jsname="rtFGi"><div class="Pvc6xe"><div jsname="I8J07e" class="TlfmSc YSH9J"><a class="GAuSPc" jsname="jIujaf" href="/view/aimlnet-2022/home"><img src="https://lh6.googleusercontent.com/kfETP3k0EbtQQVwXD0apgaA7Kejmii_mIMheofjc3WjkuudjB3aOHpmVE2Q6Ia2v3nNInxZISBrv7662kvw0gsY=w16383" class="lzy1Td" role="img" aria-label="Site home" jsname="SwcDWb"></a>&nbsp;</div><nav class="plFg0c" jscontroller="HXO1uc" jsaction="rcuQ6b:rcuQ6b;MxH79b:CfS0pe;" id="WDxLfe" data-is-preview="false" style="visibility: hidden;" role="navigation" tabindex="-1"><ul jsname="waIgnc" class="K1Ci7d oXBWEc jYxBte"><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/home" data-url="/view/aimlnet-2022/home" data-type="1" data-level="1">Home</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/paper-submission" data-url="/view/aimlnet-2022/paper-submission" data-type="1" data-level="1">Paper Submission</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/program-committee" data-url="/view/aimlnet-2022/program-committee" data-type="1" data-level="1">Program Committee</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J M9vuGd" aria-current="true"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" aria-selected="true" href="/view/aimlnet-2022/accepted-papers" data-url="/view/aimlnet-2022/accepted-papers" data-type="1" data-level="1">Accepted Papers</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/venue" data-url="/view/aimlnet-2022/venue" data-type="1" data-level="1">Venue</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/contact" data-url="/view/aimlnet-2022/contact" data-type="1" data-level="1">Contact</a></div></div><div class="rgLkl"></div></li><li jsname="ibnC6b" data-nav-level="1" class="VsJjtf ZmrVpf oXBWEc" more-menu-item jsaction="mouseenter:Vx8Jlb; mouseleave:ysDRUd"><div class="PsKE7e qV4dIc Qrrb5 YSH9J"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb dk90Ob jgXgSe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" aria-expanded="false" aria-haspopup="true" data-level="1">More</a><div class="mBHtvb u5fiyc" role="presentation" title="Expand/Collapse" jsaction="click:oESVTe" jsname="ix0Hvc"><svg class="dvmRw" viewBox="0 0 24 24" stroke="currentColor" jsname="HIH2V" focusable="false"><g transform="translate(9.7,12) rotate(45)"><path class="K4B8Y" d="M-4.2 0 L4.2 0" stroke-width="2"/></g><g transform="translate(14.3,12) rotate(-45)"><path class="MrYMx" d="M-4.2 0 L4.2 0" stroke-width="2"/></g></svg></div></div></div><div class="oGuwee eWDljc RPRy1e Mkt3Tc" style="display:none;" jsname="QXE97" jsaction="transitionend:SJBdh" role="group"><ul class="VcS63b"><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/home" data-url="/view/aimlnet-2022/home" data-type="1" data-in-more-submenu="true" data-level="2">Home</a></div></div></li><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/paper-submission" data-url="/view/aimlnet-2022/paper-submission" data-type="1" data-in-more-submenu="true" data-level="2">Paper Submission</a></div></div></li><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/program-committee" data-url="/view/aimlnet-2022/program-committee" data-type="1" data-in-more-submenu="true" data-level="2">Program Committee</a></div></div></li><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf lhZOrc" aria-current="true"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" aria-selected="true" href="/view/aimlnet-2022/accepted-papers" data-url="/view/aimlnet-2022/accepted-papers" data-type="1" data-in-more-submenu="true" data-level="2">Accepted Papers</a></div></div></li><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/venue" data-url="/view/aimlnet-2022/venue" data-type="1" data-in-more-submenu="true" data-level="2">Venue</a></div></div></li><li jsname="ibnC6b" data-nav-level="2" class="ijMPi ZmrVpf" in-more-item><div class="PsKE7e IKA38e oNsfjf"><div class="I35ICb" jsaction="click:vHQTA(QwLHlb); keydown:mPuKz(QwLHlb);"><a class="aJHbb hDrhEe HlqNPb" jscontroller="yUHiM" jsaction="rcuQ6b:WYd;" jsname="QwLHlb" role="link" tabindex="0" data-navtype="1" href="/view/aimlnet-2022/contact" data-url="/view/aimlnet-2022/contact" data-type="1" data-in-more-submenu="true" data-level="2">Contact</a></div></div></li></ul></div></li></ul></nav><div jscontroller="gK4msf" class="RBEWZc" jsname="h04Zod" jsaction="rcuQ6b:WYd;JIbuQc:AT95Ub;VbOlFf:HgE5D;FaOgy:HgE5D;MxH79b:JdcaS;" data-side-navigation="false"><div role="button" class="U26fgb mUbCce fKz7Od iWs3gf Wdnjke M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="R9oOZd" aria-label="Open search bar" aria-disabled="false" tabindex="0" data-tooltip="Open search bar" aria-expanded="false" data-tooltip-vertical-offset="-12" data-tooltip-horizontal-offset="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="vu8Pwe tCHXDc YSH9J" viewBox="0 0 24 24" focusable="false"><path d="M15.5 14h-.79l-.28-.27C15.41 12.59 16 11.11 16 9.5 16 5.91 13.09 3 9.5 3S3 5.91 3 9.5 5.91 16 9.5 16c1.61 0 3.09-.59 4.23-1.57l.27.28v.79l5 4.99L20.49 19l-4.99-5zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z"/><path d="M0 0h24v24H0z" fill="none"/></svg></span></span></div></div></div><div jsname="mADGA" class="zDUgLc"></div></div><div class="TxnWlb" jsname="BDdyze" jsaction="click:LUvzV"></div></div></header><div role="main" tabindex="-1" class="UtePc RCETm" dir="ltr"><section id="h.9f2bec207002637_265" class="yaqOZd LB7kq cJgDec nyKByd O13XJf KEFykf" style=""><div class="Nu95r"><div class="IFuOkc" style="background-size: cover; background-position: center center; background-image: url(https://lh6.googleusercontent.com/YANZwWKI_7BA1eLGYUcXoclrtIdOm3xmcfMsa_mbQfb7TN4znVkv1LzcTyDosTey2Y2L5my13A3GeUKg-pT1vlo=w16383);" jsname="LQX2Vd"></div></div><div class="mYVXT"><div class="LS81yb VICjCf j5pSsc db35Fc" tabindex="-1"><div class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd purZT-AhqUyc-II5mzb ZcASvf-AhqUyc-II5mzb pSzOP-AhqUyc-qWD73c Ktthjf-AhqUyc-qWD73c JNdkSc SQVYQc"><div class="JNdkSc-SmKAyb LkDMRd"><div class="" jscontroller="sGwD4d" jsaction="zXBUYb:zTPCnb;zQF9Uc:Qxe3nd;" jsname="F57UId"><div class="oKdM2c ZZyype Kzv0Me"><div id="h.9f2bec207002637_268" class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd jXK9ad D2fZ2 zu5uec OjCsFc dmUFtb wHaque g5GTcb JYTMs"><div class="jXK9ad-SmKAyb"><div class="tyJCtd mGzaTb Depvyb baZpAe lkHyyc"><h3 id="h.601bovw9r1um" dir="ltr" class="CDt4Ke zfr3Q OmQG5e" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 4pt; margin-top: 14pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: center;"><span class=" jgG6ef" style="color: #ffffff; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-variant: normal; font-weight: normal; vertical-align: baseline;">2</span><sup style="color: #ffffff; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 10.799999999999999pt; font-variant: normal; font-weight: normal;">nd</sup><span class=" jgG6ef" style="color: #ffffff; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-variant: normal; font-weight: normal; vertical-align: baseline;"> International conference on AI, Machine Learning in Communications and Networks (AIMLNET 2022)</span></h3><p id="h.y2ua2lkmrnv5" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: center;"><span style="color: #ffffff; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 14pt; font-variant: normal; font-weight: normal; vertical-align: baseline;">October 29 ~ 30, 2022, Vienna, Austria</span></p></div></div></div></div></div></div></div></div></div><div class="DnLU4" jsaction="JIbuQc:v5IJLd(ipHvib);"><div role="button" class="U26fgb mUbCce fKz7Od HqAAld Wew9ke M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="ipHvib" aria-label="Scroll down" aria-disabled="false" tabindex="0"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg class="XE8yyf" viewBox="0 0 24 24" focusable="false"><path d="M7.41 7.84L12 12.42l4.59-4.58L18 9.25l-6 6-6-6z"/><path d="M0-.75h24v24H0z" fill="none"/></svg></span></span></div></div></section><section id="h.9f2bec207002637_306" class="yaqOZd" style=""><div class="IFuOkc"></div><div class="mYVXT"><div class="LS81yb VICjCf j5pSsc db35Fc" tabindex="-1"><div class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd purZT-AhqUyc-II5mzb ZcASvf-AhqUyc-II5mzb pSzOP-AhqUyc-qWD73c Ktthjf-AhqUyc-qWD73c JNdkSc SQVYQc"><div class="JNdkSc-SmKAyb LkDMRd"><div class="" jscontroller="sGwD4d" jsaction="zXBUYb:zTPCnb;zQF9Uc:Qxe3nd;" jsname="F57UId"><div class="oKdM2c ZZyype Kzv0Me"><div id="h.9f2bec207002637_303" class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd jXK9ad D2fZ2 zu5uec OjCsFc dmUFtb wHaque g5GTcb JYTMs"><div class="jXK9ad-SmKAyb"><div class="tyJCtd mGzaTb Depvyb baZpAe"><p id="h.dqt4nlpzp4w8" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 2.4354; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: center;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 14pt; font-weight: normal; vertical-align: baseline;">Accepted Papers</span></p></div></div></div></div></div></div></div></div></div></section><section id="h.9f2bec207002637_310" class="yaqOZd" style=""><div class="IFuOkc"></div><div class="mYVXT"><div class="LS81yb VICjCf j5pSsc db35Fc" tabindex="-1"><div class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd purZT-AhqUyc-II5mzb ZcASvf-AhqUyc-II5mzb pSzOP-AhqUyc-qWD73c Ktthjf-AhqUyc-qWD73c JNdkSc SQVYQc"><div class="JNdkSc-SmKAyb LkDMRd"><div class="" jscontroller="sGwD4d" jsaction="zXBUYb:zTPCnb;zQF9Uc:Qxe3nd;" jsname="F57UId"><div class="oKdM2c ZZyype Kzv0Me"><div id="h.9f2bec207002637_307" class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd jXK9ad D2fZ2 zu5uec OjCsFc dmUFtb wHaque g5GTcb JYTMs"><div class="jXK9ad-SmKAyb"><div class="tyJCtd mGzaTb Depvyb baZpAe"><p id="h.7s0qgyyw8igy" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Analysis and Prediction of Fine Dust for the Smart Farm</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Hong-Jin Park, Department of Computer Science and Engineering, SangJi University, WonJu, Korea</span></p><p id="h.t8skc3h0hmld" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Fine dust, which is a class 1 carcinogen, such as asbestos and benzene, is the cause of various diseases. The spread of ultra-fine dust is one of the important causes of the spread of the corona virus. This paper analyzes and predicts fine dust and ultra-fine dust from 2015 to 2019 based on weather data such as average temperature, precipitation, and average wind speed in Seoul and atmospheric environment data such as SO2, NO2, and O3. Linear regression, SVM, and ensemble models among machine learning models were compared and analyzed to predict fine dust by grasping and analyzing the status of air pollution and ultra-air pollution by season and month. In addition, important features(attributes) that affect the generation of fine dust and ultra-air pollution are identified. The highest ultra-air pollution was found in March, and the lowest ultra-air pollution was observed from August to September. In the case of meteorological data, the data that has the most influence on ultra-air pollution is average temperature, and in the case of meteorological data and atmospheric environment data, NO2 has the greatest effect on ultra-air pollution generation.</span></p><p id="h.jfgw5ivzkzxg" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Fine Dust, Ultra-Fine Dust, Machine Learning, Smart Farm.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.74u18bcmm9ha" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Induction of Knowledge, Attitude and Practice (KAP) of People Towards Covid-19 from Twitter Data: A Comprehensive Model based on Opinion Mining and Deep Learning</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Parvin Reisinezhad and Mostafa Fakhrahmad, Department of Computer Science &amp; Engineering &amp; IT, Shiraz University, Shiraz, Iran</span></p><p id="h.3xj4iez5egy2" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Since the outbreak of the pandemic of coronavirus disease 2019 (COVID-19), a great number of studies in the form of questionnaires have been made available to a study population to evaluate the public knowledge, attitudes, and practices (KAP). This research aims to apply the data from the social media and employ the text-processing approach to achieve similar results, apart from the questionnaires. For the public knowledge and practice, we labeled around a thousand cases on a random basis, based on principles of medical guidelines of the World Health Organization where each tweet can meet zero or several items of the hygiene guidelines and employed Krippendorffs Alpha Coefficient and achieved inter-annotator agreement of 87%. For the public attitudes, we also similarly categorized 1000 instances of viewpoints as positive, negative and neutral, where Krippendorffs Alpha coefficient annotators’ agreement of 95% has been achieved between labelers at this point. We evaluated XLNet and BERT models as basic models with several machine learning and state-of-the-art deep learning approaches. Consequently, XLNet achieved to be of the highest efficiency. The use of social network data can aid in truth discovery in terms of “self-presentation” and higher self-awareness that can solely explore the public points of interest and reduce the negligence of responses.</span></p><p id="h.rk958lwky337" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Text Mining, Sentiment Analysis, Deep Learning, KAP, Cognitive Science.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.u8y3h0y9toae" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Towards Devising a Fund Management System using Blockchain</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Nibula Bente Rashid, Joyeeta Saha, Raonak Islam Prova, Nowshin Tasfia, Md. Nazrul Huda Shanto and Jannatun Noor, School of Data and Sciences, BRAC University, Dhaka, Bangladesh</span></p><p id="h.t4u9lpcoqfq" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">State government operations comprise a large number of transactions for different processes that must be carried out across the state. This comprises new projects, maintenance and repairs, public employee compensation, and agricultural schemes. Low-level corruption, which is sometimes difficult to trace and hinders state growth, is a big challenge for the top administration. In order to eradicate corruption and bring transparency, technology can be used in an efficient way. An important task to exterminate corruption is to keep track of all the financial transactions of an undergoing project. Our suggested research uses blockchain technology to keep track of fund management systems and assure the transparency of any financial statement. We propose to use a gateway where all transaction records are updated in the system and visible to all stakeholders. We find research gaps in the literature and focus on including government funds and local currency usage. We propose to generate a funding model that attains two sub-goals: designing a fund management methodology in which authorized individuals can receive and withdraw allocated funds in cryptocurrency, and evaluating a smart contract to incorporate the money and identify transparency and tracking.</span></p><p id="h.1zdjjwgc7b0s" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Blockchain, Ethereum, Smart contract, Government Funding.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.g49d651hv5yy" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>F-Low: A Promising Countermeasure against DDOS Attacks based on Split Sketch and PCA</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Fei Wang, Zhenxing Li* and Xiaofeng Wang, School of Computer, National University of Defense Technology, Changsha, China</span></p><p id="h.kbmhouh7bsba" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Distributed Denial of Service (DDoS) is Achilles heel of cloud security. This paper thus focuses on detection of such attack, and more importantly, victim identification to promote attack reaction. We present a collaborative system, called F-LOW. Profiting from bitwise-based hash function, split sketch, and lightweight IP reconstruction, F-LOW can defeat shortcomings of principle component analysis (PCA) and regular sketch. Outperforming previous work, our system fits all Four-LOW properties, low profile, low dimensional, low overhead and low transmission, of a promising DDoS countermeasure. Through simulation and theoretical analysis, we demonstrate such properties and remarkable efficacy of our approach in DDoS mitigation.</span></p><p id="h.cwntchnwdyt9" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">DDoS detection, victim identification, principle component analysis, split sketch, bitwise-based hash function.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.8rnuwcgxsemy" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Visualizing Data with Augmented Reality in Smart Environment Based IOT</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Abdessamad Badouch And Salah Eddine Krit, Polydisciplinary faculty of Ouarzazate, Ibn Zohr universty Agadir, Morocco</span></p><p id="h.yqp9wdy7m1dz" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The Augmented Reality and the Internet of Things are trending technologies that have gained popularity in many fields. For example, using AR to visualize data coming from IoT devices is one of these useful field. In this paper, we proposed a system that combine the two technologies to develop an energy monitoring system. The various tools and technologies used to find this system have been described in detail and how they work together to deliver the desired results. Finally, the paper indicates the prospects of these technologies and the challenges the development of these technologies is facing.</span></p><p id="h.vkng25stbb57" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Augmented Reality, Internet of Things, visualization, sensors, data.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.kw9s9kz95is8" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Coverting Real Human Avatar to Cartoon Avatar using Cyclegan</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Wenxin Tian, Graduate School of Information Sciences and Arts, Toyo University, Kawagoe, Saitama, Japan, Dept. of Information Sciences and Arts, Toyo University, Kawagoe, Saitama, Japan</span></p><p id="h.5gulkduv1tk9" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Cartoons are an important art style, which not only has a unique drawing ef ect but also reflects thecharacter itself, which is gradually loved by people. With the development of image processingtechnology, peoples research on image research is no longer limited to image recognition, target detection, and tracking, but also images In this paper, we use deep learning based image processingtogenerate cartoon caricatures of human faces. Therefore, this paper investigates the use of deep learning- based methods to learn face features and convert image styles while preserving the original content features, to automatically generate natural cartoon avatars. In this paper, we study a face cartoongeneration method based on content invariance. In the task of image style conversion, the content is fusedwith dif erent style features based on the invariance of content information, to achieve the styleconversion.</span></p><p id="h.wv9a0ni13q71" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Deep learning, CNN, Style transfer, Cartoon style.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.eevkko6uzhvd" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Tensor-based Multi-Modality Feature Selection and Regression for Alzheimer’s Disease Diagnosis</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Jun Yu</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Zhaoming Kong</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Liang Zhan</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Li Shen</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Lifang He</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Computer Science and Engineering, Lehigh University, Bethlehem,Pennsylvania, USA, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Electrical and Computer Engineering, University of Pittsburgh,Pittsburgh, Pennsylvania, USA, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Biostatistics, Epidemiology and Informatics, The Perelman School of Medicine, University of Pennsylvania, Philadelphia, Pennsylvania, USA</span></p><p id="h.apcr21sknd5s" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The assessment of Alzheimers Disease (AD) and Mild Cognitive Impairment (MCI) associated with brain changes remains a challenging task. Recent studies have demonstrated that combination of multi-modality imaging techniques can better reflect pathological characteristics and contribute to more accurate diagnosis of AD and MCI. In this paper, we propose a novel tensor-based multi-modality feature selection and regression method for diagnosis and biomarker identification of AD and MCI from normal controls. Specifically, we leverage the tensor structure to exploit high-level correlation information inherent in the multi-modality data, and investigate tensor-level sparsity in the multilinear regression model. We present the practical advantages of our method for the analysis of ADNI data using three imaging modalities (VBMMRI, FDG-PET and AV45-PET) with clinical parameters of disease severity and cognitive scores. The experimental results demonstrate the superior performance of our proposed method against the state-ofthe- art for the disease diagnosis and the identification of disease-specific regions and modality-related differences. The code for this work is publicly available at https://github.com/junfish/BIOS22.</span></p><p id="h.aqgobzd88vn6" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Alzheimers disease, multi-modality imaging, brain network, tensor, feature selection, regression.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.s3vcjeg0f0pd" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Machine Learning GUI based for Detecting Alzheimer’s</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Fatema Nafa, Evelyn RodriguezArgueta, Annie Dequit and Changqing Chen, Salem State University, Salem, MA</span></p><p id="h.tuagi1t3xpjw" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Alzheimers disease (AD), a kind of dementia, is marked by progressive cognitive and behavioral problems that appear in middle or late life. Alzheimers disease must be detected early in order to create more effective therapies. Dr. Alois Alzheimer was the first doctor in the medical field to notice an unusual state of change in the brains of his deceased patients with mental illness, which marked the start of Alzheimers study. Machine learning (ML) techniques nowadays employ a variety of probabilistic and optimization strategies to allow computers to learn from vast and complex datasets. Because of the limited number of labeled data and the prevalence of outliers in the current datasets, accurate dementia prediction is extremely difficult. In this research, we propose a sustainable framework for dementia prediction based on ML techniques such as Support Vector Machine, Decision Tree, AdaBoost, Random Forest, and XGmodel. All the experiments, in this literature, were conducted under the same experimental conditions using the longitudinal MRI Dataset.</span></p><p id="h.wt4gegqsxxci" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Machine learning, Alzheimer’s disease, Feature selection, Biomechanical parameters.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.928ivi21nfj8" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>An Approach using Machine Learning Model for Breast Cancer Prediction</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Fatema Nafa, Enoc Gonzalez and Gurpreet Kaur, Department of Computer Science, Salem State University, Salem, MA</span></p><p id="h.3v3elyphar0w" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Breast cancer is one of the most common diseases that causes the death of several women around the world. So, early detection is required to help decrease breast cancer mortality rates and save the lives of cancer patients. Hence early detection is a significant process to have a healthy lifestyle. Machine learning provides the greatest support to detect breast cancer in the early stage, since it cannot be cured and brings great complications to our health system. In this paper, novel models are generated for prediction of breast cancer using Gaussian Naive Bayes (GNB), Neighbour’s Classifier, Support Vector Classifier (SVC) and Decision Tree Classifier (CART). This paper presents a comparative machine learning study based to detect breast cancer by employing four different Machine Learning models. Experiment analysis carried out on a Wisconsin Breast Cancer dataset to evaluate the performance for the models. The computation of the model is simple; hence enabling an efficient process for prediction. The best overall accuracy for breast cancer detection is achieved equal to 94%. using Gaussian Naive Bayes.</span></p><p id="h.145ah2w159kl" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Machine Learning, Breast Cancer, representation learning, gene embeddings.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.3n7y1iemf69m" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Machine Learning based to Predict B-Cell Epitope Region Utilizing Protein Features</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Fatema Nafa and Ryan Kanoff, Department of Computer Science, Salem State University, Salem</span></p><p id="h.jh1rtpd0wead" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Considering the current state of Covid-19 pandemic, vaccine research and production is more important than ever. Antibodies recognize epitopes, which are immunogenic regions of antigen, in a very specific manner, to trigger an immune response. It is extremely difficult to predict such locations, yet they have substantial implications for complex humoral immunogenicity pathways. This paper presents a machine learning epitope prediction model. The research creates several models to test the accuracy of B-cell epitope prediction based solely on protein features. The goal is to establish a quantitative comparison of the accuracy of three machine learning models, XGBoost, CatBoost, and LightGbM. Our results found similar accuracy between the XGBoost and LightGbM models with the CatBoost model having the highest accuracy of 82%. Though this accuracy is not high enough to be considered reliable it does warrant further research on the subject.</span></p><p id="h.x2sdzxxhz6b3" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">machine learning models, data exploratory techniques, B-cell epitope prediction.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.j2hxtg2dmnie" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>LabBuddy: A Game-based Interactive and Immersive Educational Platform for Physics Lab Learning using Artificial Intelligence and 3D Game Engine</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Yuxing Ji</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Mingze Gao</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Yu Sun</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Oaks Christian School, 31749 la tienda rd, westlake village, CA, 91362, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">University of California, Irvine, Irvine, CA 92697, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">California State Polytechnic University, Pomona, CA, 91768, Irvine, CA 92620</span></p><p id="h.lvxkvbve8i2u" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The concepts of physics play an important role in many fields of people’s lives, and physics learning is abstract, challenging, and sometimes intimidating [1]. However, how to motivate students to learn physics in a fun way becomes a question. This paper develops a gamic, interactive, educational application to allow students to learn abstract physics in an illustrative way. We have implemented a visual physics lab by using a 3D game engine supporting the immersive environment of visualization and providing a playful learning tool for physics experiments at the same time [2].</span></p><p id="h.e1yhh4pv0tsn" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Physics, Virtualization, Artificial Intelligence, 3D Game Engine.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.w34t0aabeq65" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Explainable Network Pruning for Model Acceleration based on Filter Similarity and Importance</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Jinrong Wu, Su Nguyen, and Damminda Alahakoon, Research Centre for Data Analytics and Cognition, La Trobe University, Melbourne, Australia</span></p><p id="h.sglg1ugmd8gs" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Filter-level network pruning has effectively reduced computational cost, as well as energy and memory usage, for parameterized deep networks without damaging performance, particularly in computer vision applications. Most filter-level network pruning algorithms focus on minimizing the impact of pruning on network performance using either importance-based or similarity-based pruning approaches. However, no study has attempted to compare the effectiveness of the two approaches across different network configurations and datasets. To address these issues, this paper developed two explainable network pruning methods based on importance-based and similarity-based approaches to understand their key benefits and limitations. Based on the analysis findings, we also propose an innovative hybrid pruning method and evaluated it using various models and datasets. The comparisons with other state-of-the-art filter pruning methods show the superiority of our hybrid method.</span></p><p id="h.2w3e4sf6wpnp" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Model Compression, Network Pruning, Explainable AI (XAI), Visualization, Convolutional Neural Networks.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.98y389xybi9e" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Application of Sentiment Analysis and Event Classification based on XLNet in the Financial Area</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Xinli He</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Jiajun Yan</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Bihuan Chen</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Engineering, The University of Hong Kong, HongKong, China, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Data Technology, PingAn Wealth Management Company, Shenzhen, China</span></p><p id="h.i3en6eu889qf" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Investment research and analysis in the financial area use a large number of real-time market information. Due to financial terms and characters in financial news, sentiment analysis and event classification have great difficulties. In this situation, this paper trains a corpus of the Chinese financial area based on the XLNet, and proposes a Fin-XLNet model to complete sentiment analysis and event classification of financial news. Our experimental results show that this model outperforms both native and related models. Based on the Fin-XLNet model, we build a real-time service system to output the results of sentiment analysis and event classification to the real-time data warehouse architecture. This paper also supports complex finance scenarios through micro-service clusters.</span></p><p id="h.xmilor18m28b" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Fin-XLNet, XLNet, sentiment analysis, event classification.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.cg5irl1v4z3q" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Improving Explanations of Image Classification with Ensembles of Learners</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Aadil Ahamed</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Kamran Alipour</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Sateesh Kumar</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Severine Soltani</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Michael Pazzani</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Computer Science and Engineering, University of California, San Diego, La Jolla, CA, USA, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Department of Bioengineering, University of California, San Diego, La Jolla, CA, USA, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Information Sciences Institute, Marina Del Rey, CA, USA</span></p><p id="h.mifml09lppfy" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Alzheimers disease (AD), a kind of dementia, is marked by progressive cognitive and behavioral problems that appear in middle or late life. Alzheimers disease must be detected early in order to create more effective therapies. Dr. Alois Alzheimer was the first doctor in the medical field to notice an unusual state of change in the brains of his deceased patients with mental illness, which marked the start of Alzheimers study. Machine learning (ML) techniques nowadays employ a variety of probabilistic and optimization strategies to allow computers to learn from vast and complex datasets. Because of the limited number of labeled data and the prevalence of outliers in the current datasets, accurate dementia prediction is extremely difficult. In this research, we propose a sustainable framework for dementia prediction based on ML techniques such as Support Vector Machine, Decision Tree, AdaBoost, Random Forest, and XGmodel. All the experiments, in this literature, were conducted under the same experimental conditions using the longitudinal MRI Dataset.</span></p><p id="h.2b9vrp6cpktc" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Machine learning, Alzheimer’s disease, Feature selection, Biomechanical parameters.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.ka9ah93q0hfn" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>An Context-Aware and Adaptive System to Automate the Control of the AC Windshield using AI and Internet of Things</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Joshua Tian</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Yu Sun</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Arnold O. Beckman High School, 3588 Bryan Ave, Irvine, CA 92602, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">California State Polytechnic University, Pomona, CA, 91768, Irvine, CA 92620</span></p><p id="h.fwxu6wwucrj8" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">In recent years, we have seen a huge increase in air conditioning usage [4]. However, much of this energy put into air conditioning is being wasted, which contributes to a far less environmentally friendly world and is inconvenient for many [5][6]. This paper develops a smart vent and a mobile app to regulate temperatures in different rooms of a home to create an efficient solution to save energy. This conservation of energy allows both the environment to be preserved as well as decreasing the financial burden on families in need. Controlled studies of the system provide evidence of the systems automated ability to be energy efficient.</span></p><p id="h.1nk3933tqsyj" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">AI, AC, Raspberry PI.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.h886fi1j4o0b" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>The Non-deterministic Polynomialtime-completeness of the quay crane scheduling problem in a maritime port</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Ali Skaf, Samir Dawaliby and Arezki Aberkane, Caplogy SAS, Paris, France</span></p><p id="h.1gjjbz1vns2z" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">This paper discusses the computational complexity of the quay crane scheduling problem (QCSP) in a maritime port. To prove that a problem is NP-complete, there should be no polynomial time algorithm for the exact solution, and only heuristic approaches are used to obtain near-optimal solutions but in reasonable time complexity. To address this, first we formulate the QCSP as a mixed integer linear programming to solve it to optimal, and next we theoretically prove that the examined problem is NP-complete.</span></p><p id="h.hhx0gr132hzn" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Quay crane, Container, Scheduling, Optimization, MILP, NP-complete.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.8n1fad6fwl4i" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>A Unity Microscope simulation to help students get more access to lab equipment online during COVID-19 pandemic</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Kaiwen Chen</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Yu Sun</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Margarita Catholic High School, 22062 Antonio Pkwy, Rancho Santa Margarita, CA 92688, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">California State Polytechnic University, Pomona, CA, 91768, Irvine, CA 92620</span></p><p id="h.g406ti6isx3l" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Something that still remains an issue to this day is how students and other individuals can become educated in matters that are generally taught in person and are difficult to translate to an online environment in particular [5]. In particular, teaching how to operate lab equipment without having hands-on experience is incredibly difficult. With the COVID-19 pandemic, the need for sufficient online learning materials and tools has become much greater in recent years [6]. To resolve this issue, a simulation was made in Unity that aims to educate its users on how to work with a microscope [7]. Sliders are provided in the simulation to control the X-axis, Y-axis, Z-axis, and focus. The simulation was tested for its effectiveness by gathering fifteen participants to download and test the simulation, then asking each participant to fill out a survey. In the survey, the participants graded the educational value and convenience of using the application on a scale from one to ten, and they were encouraged to leave any other feedback in a free-response section of the survey [8].Results indicated that the general public would find this simulation practical in daily life, as participants generally rated the simulation as both educational and convenient to use.</span></p><p id="h.owzqq2xi4cg" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Simulation, Microscope, Unity.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.p9vnxd8vi9uz" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Robust Discriminative Non-Negative Matrix Factorization with Maximum Correntropy Criterion</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Hang Cheng, National Innovation Institute of Defense Technology,Academy of Military Sciences, Beijing, China</span></p><p id="h.d23m0lk5qnti" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Non-negative matrix factorization (NMF) is an effective dimension reduction tool and has been widely used in pattern recognition and computer vision. However, conventional NMF models are neither robust enough as their objective functions are sensitive to outliers, nor discriminative enough as they completely ignore the discriminative information in data. In this paper, we proposed a robust discriminative NMF model (RDNMF) for learning an effective discriminative subspace from noisy dataset. In particular, RDNMF approximates observations by their reconstructions in the subspace via maximum correntropy criterion to prohibit outliers from influencing the subspace. To incorporate the discriminative information, RDNMF builds adjacent graphs by using maximum correntropy criterion based robust representation, and regularizes the model by margin maximization criterion. We developed a multiplicative update rule to optimize RDNMF and theoretically proved its convergence. Experimental results on popular datasets verify the effectiveness of RDNMF comparing with conventional NMF models, discriminative NMF models, and robust NMF models.</span></p><p id="h.ymbjrogq77pt" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Dimension reduction, non-negative matrix factorization, maximum correntropy criterion, supervised learning, margin maximization.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.s93pe4kqu3lv" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Trustworthy Artificial Intelligence for Blockchain-based Cryptocurrency</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Tiffany Zhan, USAOT, Las Vegas, Nevada, USA</span></p><p id="h.142j049hcah" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Blockchain-based cryptocurrency has attracted the immersive attention of individuals and businesses. With distributed ledger technology (DLT) consisting of growing list of record blocks and securely linked together using cryptography, each block contains a cryptographic hash of the previous block, a timestamp, and transaction data. The timestamp proves that the transaction data existed when the block was created. Since each block contains information about the block previous to it, they effectively form a chain (compare linked list data structure), with each additional block linking to the ones before it. Consequently, blockchain transactions are irreversible in that, once they are recorded, the data in any given block cannot be altered retroactively without altering all subsequent blocks. The blockchain-based technologies have been emerging with a fleet speed. In this paper, the trustworthy Artificial Intelligence (AI) will be explored for blockchain-based cryptocurrency where the prohibitive price leap creates a challenge financial analysis and prediction.</span></p><p id="h.gabtemqfd4j3" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Trustworthy Artificial Intelligence, Blockchain, Cryptocurrency, Financial Prediction.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.xndivvdbjqvd" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>An Intelligent Video Editing Automate Framework using AI and Computer Vision</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Mei Yi Yang</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Yu Sun</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Southpointe Academy, 1900 56 St, Delta, BC V4L 2B1, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">California State Polytechnic University, Pomona, CA, 91768, Irvine, CA 92620</span></p><p id="h.ek3vo8jx5fmt" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">The issue to be resolved was videos may be a difficult and time-consuming process to edit, specifically with cropping videos [4]. The solution that was implemented was a mobile application that was capable of cropping videos using a body tracking solution called MediaPipe Pose [5]. Using a landmark model that labeled the body parts of a person, the application can recognize where the person is located in the video frame by saving the minimum and maximum x- and y- coordinates. In the image array, the rows and columns not included within those coordinates are deleted, which leaves only the area with the person inside. To prove the effectiveness of the application in daily life, a survey was performed on fifteen participants. Each participant was shown the same video demonstration of the application being used, then the participants answered questions regarding how useful the application would likely be in daily life and how convenient the application would be to use. Results indicate that the general public would be willing to use the application as a long-term solution for video cropping [7].</span></p><p id="h.ubpexrxbyb0n" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Video Editor, Dancing, Video Cropping, Flutter.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.taidy3o7dkz3" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>A Single Level Detection Model for Traffic Sign Detection using Channel Shuffle Residual Structure</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Yuanzhi Luo and Jie Hao, College of Computer Science and Technology, Nanjing University of Aeronautics and Astronautics, China</span></p><p id="h.ny16b7c9abay" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Traffic sign recognition (TSR) is a challenging task for unmanned systems, especially because the traffic signs are small in the road view image. In order to ensure the real-time and robustness of traffic sign detection in automated driving systems, we present a single level detection model for TSR which consists of three core components. The first is we use channel shuffle residual network structure to ensure the real-time performance of the system, which mainly uses low-level features to enhance the representation of small target feature information. Secondly, we use dilated convolution residual block to enhance the receptive field to detect multi-scale targets. Thirdly, we propose a dynamic and adaptive matching method for the anchor frame selection problem of small traffic signs. The experimental surface on Tsinghua-Tencent 100k Dataset and Chinese Traffic Sign Dataset benchmark has better accuracy and robustness compared with existing detection networks. With an image size of 800 × 800, the proposed model achieves 92.9 running at 120 FPS on 2080Ti.</span></p><p id="h.mrcp0yd6meki" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Computer Vision, Traffic Sign Detection, Convolutional Neural Networks, Label Assignment.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.t8g62zhyogkv" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Deep Q Networks based Multi Agent Reinforcement learning for 3D MRI Brain Tumor Localization</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Ashish Kumar Chauhan, Yuvaram Singh and Kameshwar Rao JV, NEXT.ai CoE, HCL Technologies Limited, Noida, India</span></p><p id="h.8zr9bl6866mo" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">In the medical domain, the availability of labeled data for development of DNN based models is challenging. Brain tumor detection in the medical domain is a process to find tumors region on the 3D MRI scan report of the patient. Most of the time this task is done manually by trained professionals as it requires precision and domain knowledge. Traditional 3D object detection-based methods are used for brain tumor detection. Here we utilize RL and Vision transformer-based method to train multi agent RL system capable of detecting brain tumor in 3D MRI. A Deep Q network agent is trained to detect tumor present on brain from 3D MRI scan of a patient. Here a multi agent RL system is trained on BraTS dataset. Our 3D CNN base transformer model was able to achieve an average of 1.7mm error in distance with the centroid of the tumor.</span></p><p id="h.d32u34jbs0t8" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Multi agent RL, Vision transformer, 3D MRI, Medical diagnosis.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.coo6zwqd7yx9" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Hyper-Parameter Tuning in Deep Neural Network Learning</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Tiffany Zhan, USAOT, Las Vegas, Nevada, USA</span></p><p id="h.wmq7e42ak5ut" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Deep learning has been increasingly used in various applications such as image and video recognition, recommender systems, image classification, image segmentation, medical image analysis, natural language processing, brain–computer interfaces, and financial time series. In deep learning, a convolutional neural network (CNN) is regularized versions of multilayer perceptrons. Multilayer perceptrons usually mean fully connected networks, that is, each neuron in one layer is connected to all neurons in the next layer. The full connectivity of these networks makes them prone to overfitting data. Typical ways of regularization, or preventing overfitting, include penalizing parameters during training or trimming connectivity. CNNs use relatively little pre-processing compared to other image classification algorithms. Given the rise in popularity and use of deep neural network learning, the problem of tuning hyper-parameters is increasingly prominent tasks in constructing efficient deep neural networks. In this paper, the tuning of deep neural network learning (DNN) hyper-parameters is explored using an evolutionary based approach popularized for use in estimating solutions to problems where the problem space is too large to get an exact solution.</span></p><p id="h.srhujaqpqqkf" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Deep Learning, Convolutional Neural Network, Deep Neural Network Learning, Hyper-Parameters.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.ualoafrpp8ou" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Word Predictability is Based on Context - and/or Frequency</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Rodolfo Delmonte, Ca Foscari University, Venice (Italy), Nicolò Busetto, Accenture TTS Computational Linguistics</span></p><p id="h.35hiw9rstz3u" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">In this paper we present an experiment carried out with BERT on a small number of Italian sentences taken from two domains: newspapers and poetry domain. They represent two levels of increasing difficulty in the possibility to predict the masked word that we intended to test. The experiment is organized on the hypothesis of increasing difficulty in predictability at the three levels of linguistic complexity that we intend to monitor: lexical, syntactic and semantic level. To test this hypothesis we alternate canonical and non-canonical versions of the same sentence before processing them with the same DL model. The result shows that DL models are highly sensitive to presence of non-canonical structures and to local non-literal meaning compositional effect. However, DL are also very sensitive to word frequency by predicting preferentially function vs content words, collocates vs infrequent word phrases. To measure differences in performance we created a linguistically based “predictability parameter” which is highly correlated with a cosine based classification but produces better distinctions between classes.</span></p><p id="h.rhnh8a656nyb" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Deep Learning Models, BERT, Masked Word Task, Word Embeddings, Canonical vs Non-canonical sentence structures, Frequency Ranking, Dictionary of Wordforms, Linguistic Similarity Measures, Predictability Parameter.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.iyw58gy9yto4" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>An Intelligent and Social-Oriented Sentiment Analytical Model for Stock Market Prediction using Machine Learning and Big Data Analysis</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Muqing Bai</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Yu Sun</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Brookfield Academy, 3215 N Brookfield Rd, Brookfield, WI 53045, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">California State Polytechnic University, Pomona, CA, 91768, Irvine, CA 92620</span></p><p id="h.tjyjwouyljc3" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">In an era of machine learning, many fields outside of computer science have implemented machine learning as a tool [5]. In the financial world, a variety of machine learning models are used to predict the future prices of a stock in order to optimize profit. This paper focuses on the correlation between the price of a stock and its public sentiments shown on social media [6]. Public sentiment refers to the polarity of people’s comments. Done by a sentiment analysis bot, it tells us the amount of people We trained dif erent machine learning algorithms to find the best model at predicting stock prices given its sentiment. And for the public to access this model, a web-based server and a mobile application is created. We used Thunkable, a powerful no code platform, to produce our mobile application [7]. It allows anyone to check the predictions of stocks, helping people with their investment decisions.</span></p><p id="h.glykdb5ij70x" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Stock, Machine learning, Thunkable.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.j9iypke94g7p" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>The Problem Solver: A Mobile Platform to Mediate Teenager Family Relationship using Dart and Machine Learning</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Ziheng Guan</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Ang Li</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Arcadia High School, 180 Campus Dr, Arcadia, CA 91006, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">California State University, Long Beach, 1250 Bellflower Blvd, Long Beach, CA 90840</span></p><p id="h.hqr7g16h8hyi" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Family conflicts between parents and their children are nothing new and are something experienced by many in such situations [1]. These conflicts can even be exacerbated by cultural differences that exist between the two parties, especially in cases where the parents and child were raised in different countries, cultures and/or generations [2]. This description illustrates my personal experiences of conflict with my parents, which is what inspired me to create this app: The Problem Solver app. The app differs from other methods that could be applied to resolve these conflicts in that it facilitates more direct communication between the two conflicting parties, which would hopefully result in a more rapid and successful conflict resolution [3]. Naturally, there were challenges I faced in the making of the app, but I was eventually able to work through these and build a working product. I will also explore some related works and research into this topic that were helpful in supporting the idea that cultural differences between differently raised generations can have an impact on familial relations [4]. Then, I give a general overview of the system of the app and finally delve into possible limitations of the app and further steps I could take in the development of the app.</span></p><p id="h.88aq2wzb3nzq" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Machine Learning, Communication, Cultural Differences, Flutter.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.3xh9h7lvr92a" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>An Interactive and Scenario-based Simulation Gaming System for Business Education Using Game Engine and Machine Learning</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Yingzhi Ma</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Yu Sun</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Crean Lutheran High School, 12500 Sand Canyon Ave, Irvine, CA 92618, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">California State Polytechnic University, Pomona, CA, 91768, Irvine, CA 92620</span></p><p id="h.yw00bclhgx0j" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Technology has become increasingly vital in society. The COVID-19 pandemic demonstrated how useful technology was in keeping society running, especially education [15]. One major trend is the use of simulations as a tool for education. Business is one of the fields that could benefit massively from the implementation of new educational simulations. For this study, a survey was conducted to gauge their prior educational experience and interest in fields such as business and computer science. Additionally, the survey participants were questioned on their previous experiences with using interactive simulations. The study had fifty-one participants both complete the survey and give consent to have their data shared in this research paper. These participants were given an additional survey to either test a business simulation or watch a video of one and respond whether they learned from it. The results indicate that although most people would want to play a game that taught more about business, only roughly 45% of individuals expressed interest in the topic of business. Furthermore, the survey responses also indicated that a large majority of individuals would also prefer more interactive educational simulations for other topics. The reception to the business simulation was mostly positive, and participants indicated that it was effective at helping them learn business. Overall, it was concluded that there is not enough access to business simulations to meet the public’s interest, and either more should be created or existing ones should be made better known.</span></p><p id="h.u67kuyrfkc49" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Machine Learning, Game development, business education.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.sc66nm1vvb2y" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Research on Low-Carbon Innovation Decision of Manufacturing Resources Considering Quality Difference and Government Subsidy Under Tripartite Trading Platform</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Longzhen Zhou</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Zusheng Zhang</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Xu Wang</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1,*</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Yingbo Wu</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Longxiao Li</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">College of Mechanical Engineering, Chongqing University, Chongqing, 400030, PR China, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">School of Big Data &amp; software engineering, Chongqing University, Chongqing, 401331, PR China, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">3</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">College of Mechanical Engineering, Chongqing University of science &amp; technology, Chongqing, 401331, PR China</span></p><p id="h.iuli90jw0tzt" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">In the context of low-carbon innovation, reasonable subsidy, innovation, and pricing strategies are important to achieve resource decarbonization and supply-demand matching, while the quality differentiation of resources has a significant impact on the strategy formulation. In this paper, we study low-carbon innovation and government subsidy in different innovation scenarios with two providers offering differentiated manufacturing resources on a resource trading platform, integrating two variables of resource quality difference and demand-side low-carbon preference. Using utility theory and the Stackelberg game, a decision model of low carbon innovation and government subsidy is constructed, and the equilibrium solution is obtained with inverse induction. Then, the low-carbon innovation and subsidy strategies under different innovation scenarios are compared and the effects of relative coefficients of quality and innovation cost coefficients on the strategies are analyzed. The findings show that when the difference in resource quality is small, the level of green innovation is higher in the low carbon innovation scenario with high-quality resources compared to the low carbon innovation scenario with low-quality resources, and the rate of government subsidy for innovation investment is also higher. In case of the large difference in resource quality, the relative magnitudes of green innovation level and government subsidy rate for innovation inputs in different scenarios are related to innovation cost coefficients.</span></p><p id="h.88jn4hbskcot" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Low-carbon innovation, Quality differentiation of resources, Government subsidies, Manufacturing resource trading platform.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.mx5l49i1ha5s" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>The Quarry Industry and The Cameroonian Law</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ENOW Godwill BAIYE, Ph.D. in English Private Law, University of Dschang-Cameroon</span></p><p id="h.b0f3pmzawnqq" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Quarrying like other mining sectors contributes significantly to the socio-economic development of many countries, providing raw materials for local industries and for exports, facilitating the provision of infrastructures such as roads and the opening up of remote areas as well as providing employment opportunities. To promote the development of this sector in Cameroon, Law No.2016/017 of 14 December 2016 on the Cameroonian Mining Code amending and supplementing Law No. 2001/01 of 16 April 2001 was promulgated. In this regards, this article seeks to examine the extent to which the Cameroonian laws regulates the operations and activities of quarry operators in Cameroon. The study relied on published and unpublished studies carried out in the quarry sector in Cameroon to gather the secondary data that facilitated the development of this work. The article reveals that although quarry activities have contributed to sprout economic development in the country, the environmental and health changeless associated with this activity cannot be overlooked. In this light to promote sustainable quarry operations in Cameroon, the researcher emphasised on proper follow-up and monitoring of quarry operations in Cameroon with stringent sanctions instituted against any operators who fails to comply with these laws that regulates this sector of the economy.</span></p><p id="h.9j6lr2xb4rrr" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Quarry, quarry operators, quarrying, quarry permits, quarry licences.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.rg04ur3to5ir" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Anomaly Detection based on Alarms Data</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Michel Kamel, Anis Hoayek and Mireille Batton-Hubert, Mathematics and industrial engineering department, Ecole des Mines de Saint-Etienne, University Clermont Auvergne, CNRS UMR 6158 LIMOS</span></p><p id="h.ylanyd7bjll7" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Alarms data is a very important source of information for network operation center (NOC) teams to aggregate and display alarming events occurring within a network element. However, on a large network, a long list of alarms is generated almost continuously. Intelligent analytical reporting of these alarms is needed to help the NOC team to eliminate noise and focus on primary events. Hence, there is a need for an anomaly detection model to learn from and use historical alarms data to achieve this. It is also important to indicate the root cause of anomalies so that immediate corrective action can be taken. In this paper, we introduce a new algorithm to derive four features based on historical data and aggregate them to generate a final score that is optimized through supervised labels for greater accuracy. These four features reflect the likelihood of occurrence of events, the sequence of events and the importance of relatively new events not seen in the historical data. Certain assumptions are tested on the data using the relevant statistical tests. After validating these assumptions, we measure the accuracy on labelled data, revealing that the proposed algorithm performs with a high anomaly detection accuracy.</span></p><p id="h.ybgixkdxxkzh" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Alarms, Anomaly detection, Events data, Probabilistic scoring distribution.</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><br></p><p id="h.jozi0syjb4qz" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; vertical-align: baseline;"><strong>Image Segmentation in Shape Synthesis, Shape Optimization, And Reverse Engineering</strong></span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Milan Ćurković</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Andrijana Ćurković</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, Damir Vučina</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> and Domagoj Samardžić</span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">1</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Faculty of Electrical Engineering, Mechanical Engineering and Naval Architecture, University of Split, Croatia, </span><sup style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 7.199999999999999pt; font-weight: normal;">2</sup><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Faculty of science Split, University of Split, Croatia</span></p><p id="h.n0xir6w4n8td" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">ABSTRACT</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Image segmentation and segmentation of geometry are one of the basic requirements for reverse engineering, shape synthesis, and shape optimization. In terms of shape optimization and shape synthesis where the original geometry should be faithfully replaced with some mathematical parametric model (NURBS, hierarchical NURBS, T-Spline, …) segmentation of geometry may be done directly on 3D geometry and its corresponding parametric values in the 2D parametric domain. In our approach, we are focused on segmentation of 2D parametric domain as an image instead of 3D geometry. The reason for this lies in our dynamic hierarchical parametric model, which controls the results of various operators from image processing applied to the parametric domain.</span></p><p id="h.rkndyelano82" dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 2pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">KEYWORDS</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 0; margin-top: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: justify; white-space: normal;"><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">Image Segmentation, Shape Optimization, Shape Synthesis, Reverse Engineering.</span></p><p dir="ltr" class="CDt4Ke zfr3Q"><br></p></div></div></div></div></div></div></div></div></div></section><section id="h.9f2bec207002637_314" class="yaqOZd cJgDec tpmmCb" style=""><div class="IFuOkc" style="background-size: cover; background-position: center center; background-image: url(https://lh5.googleusercontent.com/b5fT690EpCb6MrFY-SiPb1HQskvP7gKfZZVzqG-e0M8nAp-R-9a5Z5LjZW2wsmRJ55wgmcGThc7mJKOP0P2WEkE=w16383);"></div><div class="mYVXT"><div class="LS81yb VICjCf j5pSsc db35Fc" tabindex="-1"><div class="hJDwNd-AhqUyc-c5RTEf Ft7HRd-AhqUyc-c5RTEf purZT-AhqUyc-II5mzb ZcASvf-AhqUyc-II5mzb pSzOP-AhqUyc-qWD73c Ktthjf-AhqUyc-qWD73c JNdkSc SQVYQc"><div class="JNdkSc-SmKAyb LkDMRd"><div class="" jscontroller="sGwD4d" jsaction="zXBUYb:zTPCnb;zQF9Uc:Qxe3nd;" jsname="F57UId"><div class="oKdM2c ZZyype Kzv0Me"><div id="h.9f2bec207002637_311" class="hJDwNd-AhqUyc-c5RTEf Ft7HRd-AhqUyc-c5RTEf purZT-AhqUyc-II5mzb ZcASvf-AhqUyc-II5mzb pSzOP-AhqUyc-qWD73c Ktthjf-AhqUyc-qWD73c jXK9ad D2fZ2 zu5uec OjCsFc dmUFtb wHaque g5GTcb JYTMs"><div class="jXK9ad-SmKAyb"><div class="tyJCtd mGzaTb Depvyb baZpAe"><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 11pt; margin-left: 15pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: center; text-indent: 0;"><span style="font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-variant: normal; font-weight: normal;">Contact</span></p><p dir="ltr" class="CDt4Ke zfr3Q" style="background-color: transparent; border-bottom: none; border-left: none; border-right: none; border-top: none; line-height: 1.2; margin-bottom: 11pt; margin-left: 15pt; margin-top: 11pt; padding-bottom: 0; padding-left: 0; padding-right: 0; padding-top: 0; text-align: center; text-indent: 0;"><span class=" aw5Odc" style="font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-variant: normal; font-weight: normal;"><a class="XqQF9c" href="mailto:aimlnet@yahoo.com" target="_blank">aimlnet@yahoo.com </a></span></p></div></div></div></div></div></div></div><div class="hJDwNd-AhqUyc-Clt0zb Ft7HRd-AhqUyc-Clt0zb JNdkSc SQVYQc L6cTce-purZT L6cTce-pSzOP"><div class="JNdkSc-SmKAyb LkDMRd"><div class="" jscontroller="sGwD4d" jsaction="zXBUYb:zTPCnb;zQF9Uc:Qxe3nd;" jsname="F57UId"></div></div></div></div></div></section></div><div class="Xpil1b xgQ6eb"></div><footer jsname="yePe5c"><section id="h.9f2bec207002637_148" class="yaqOZd cJgDec nyKByd" style=""><div class="IFuOkc" style="background-size: cover; background-position: center center; background-image: url(https://lh4.googleusercontent.com/JlOanLD-gHioysORUQCuwYxzkrz_bKm336wyVfprc4d_gI-NtiN6MVbcNWHEX3lPQrppl2_Zn2I_fnOybyauvJc=w16383);"></div><div class="mYVXT"><div class="LS81yb VICjCf j5pSsc db35Fc" tabindex="-1"><div class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd purZT-AhqUyc-II5mzb ZcASvf-AhqUyc-II5mzb pSzOP-AhqUyc-qWD73c Ktthjf-AhqUyc-qWD73c JNdkSc SQVYQc"><div class="JNdkSc-SmKAyb LkDMRd"><div class="" jscontroller="sGwD4d" jsaction="zXBUYb:zTPCnb;zQF9Uc:Qxe3nd;" jsname="F57UId"><div class="oKdM2c ZZyype Kzv0Me"><div id="h.9f2bec207002637_145" class="hJDwNd-AhqUyc-uQSCkd Ft7HRd-AhqUyc-uQSCkd jXK9ad D2fZ2 zu5uec OjCsFc dmUFtb wHaque g5GTcb JYTMs"><div class="jXK9ad-SmKAyb"><div class="tyJCtd mGzaTb Depvyb baZpAe"><small id="h.5141uqu99aht" dir="ltr" class="CDt4Ke zfr3Q TMjjoe" style="display: block; text-align: center;"><span style="color: #ffffff; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;">All Rights Reserved ® AIMLNET 2022</span><span style="color: #000000; font-family: &#39;Times New Roman&#39;, &#39;Arial&#39;; font-size: 12pt; font-weight: normal; vertical-align: baseline;"> </span></small></div></div></div></div></div></div></div></div></div></section></footer><div jscontroller="j1RDQb" jsaction="rcuQ6b:rcuQ6b;MxH79b:JdcaS;FaOgy:XuHpsb;" class="dZA9kd ynRLnc" data-last-updated-at-time="1668845127804" data-is-preview="false"><div role="button" class="U26fgb JRtysb WzwrXb I12f0b K2mXPb zXBiaf ynRLnc" jscontroller="iSvg6e" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;;keydown:I481le;" jsshadow jsname="Bg3gkf" aria-label="Site actions" aria-disabled="false" tabindex="0" aria-haspopup="true" aria-expanded="false" data-menu-corner="bottom-start" data-anchor-corner="top-start"><div class="NWlf3e MbhUzd" jsname="ksKsZd"></div><span jsslot class="MhXXcc oJeWuf"><span class="Lw7GHd snByac"><svg width="24" height="24" viewBox="0 0 24 24" focusable="false" class=" NMm5M"><path d="M11 17h2v-6h-2v6zm1-15C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 18c-4.41 0-8-3.59-8-8s3.59-8 8-8 8 3.59 8 8-3.59 8-8 8zM11 9h2V7h-2v2z"/></svg></span></span><div jsname="xl07Ob" style="display:none" aria-hidden="true"><div class="JPdR6b hVNH5c" jscontroller="uY3Nvd" jsaction="IpSVtb:TvD9Pc;fEN2Ze:xzS4ub;frq95c:LNeFm;cFpp9e:J9oOtd; click:H8nU8b; mouseup:H8nU8b; keydown:I481le; keypress:Kr2w4b; blur:O22p3e; focus:H8nU8b" role="menu" tabindex="0" style="position:fixed"><div class="XvhY1d" jsaction="mousedown:p8EH2c; touchstart:p8EH2c;"><div class="JAPqpe K0NPx"><span jsslot class="z80M1 FeRvI" jsaction="click:o6ZaF(preventDefault=true); mousedown:lAhnzb; mouseup:Osgxgf; mouseenter:SKyDAe; mouseleave:xq3APb;touchstart:jJiBRc; touchmove:kZeBdd; touchend:VfAz8" jsname="j7LFlb" aria-label="Google Sites" role="menuitem" tabindex="-1"><div class="aBBjbd MbhUzd" jsname="ksKsZd"></div><div class="uyYuVb oJeWuf" jsaction="JIbuQc:Toy3n;" jsname="V2zOu"><div class="jO7h3c">Google Sites</div></div></span><span jsslot class="z80M1 FeRvI" jsaction="click:o6ZaF(preventDefault=true); mousedown:lAhnzb; mouseup:Osgxgf; mouseenter:SKyDAe; mouseleave:xq3APb;touchstart:jJiBRc; touchmove:kZeBdd; touchend:VfAz8" jsname="j7LFlb" data-disabled-tooltip="Report abuse is not available in preview mode" aria-label="Report abuse" role="menuitem" tabindex="-1"><div class="aBBjbd MbhUzd" jsname="ksKsZd"></div><div class="uyYuVb oJeWuf" jscontroller="HYv29e" jsaction="JIbuQc:dQ6O0c;" jsname="lV5oke" data-abuse-proto="%.@.null,null,&quot;https://sites.google.com/view/aimlnet-2022/accepted-papers&quot;]" data-abuse-reporting-widget-proto="%.@.null,&quot;https://sites.google.com/view/aimlnet-2022/accepted-papers&quot;]"><div class="jO7h3c">Report abuse</div></div></span><span jsslot class="z80M1 FeRvI" jsaction="click:o6ZaF(preventDefault=true); mousedown:lAhnzb; mouseup:Osgxgf; mouseenter:SKyDAe; mouseleave:xq3APb;touchstart:jJiBRc; touchmove:kZeBdd; touchend:VfAz8" jsname="j7LFlb" aria-label="Page details" role="menuitem" tabindex="-1"><div class="aBBjbd MbhUzd" jsname="ksKsZd"></div><div class="uyYuVb oJeWuf" jsaction="JIbuQc:hriXLd;" jsname="Rg8K2c"><div class="jO7h3c">Page details</div></div></span></div></div></div></div></div></div><div jscontroller="j1RDQb" jsaction="focusin:gBxDVb(srlkmf); focusout:zvXhGb(srlkmf); click:ro2KTd(psdQ5e);JIbuQc:DSypkd(Bg3gkf);MxH79b:JdcaS;rcuQ6b:rcuQ6b;" class="LqzjUe ynRLnc" data-last-updated-at-time="1668845127804" data-is-preview="false"><div jsname="psdQ5e" class="Q0cSn"></div><div jsname="bN97Pc" class="hBW7Hb"><div role="button" class="U26fgb mUbCce fKz7Od kpPxtd QMuaBc M9Bg4d" jscontroller="VXdfxd" jsaction="click:cOuCgd; mousedown:UX7yZ; mouseup:lbsD7e; mouseenter:tfO1Yc; mouseleave:JywGue; focus:AHmuwe; blur:O22p3e; contextmenu:mg9Pef;touchstart:p6p2H; touchmove:FwuNnf; touchend:yfqBxc(preventDefault=true); touchcancel:JMtRjd;" jsshadow jsname="Bg3gkf" aria-label="Site actions" aria-disabled="false" tabindex="-1" aria-hidden="true"><div class="VTBa7b MbhUzd" jsname="ksKsZd"></div><span jsslot class="xjKiLb"><span class="Ce1Y1c" style="top: -12px"><svg width="24" height="24" viewBox="0 0 24 24" focusable="false" class=" NMm5M"><path d="M11 17h2v-6h-2v6zm1-15C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 18c-4.41 0-8-3.59-8-8s3.59-8 8-8 8 3.59 8 8-3.59 8-8 8zM11 9h2V7h-2v2z"/></svg></span></span></div><div jsname="srlkmf" class="hUphyc"><div class="YkaBSd"><div class="iBkmkf"><span>Page updated</span> <span jsname="CFIm1b" class="dji00c" jsaction="AHmuwe:eGiyHb; mouseover:eGiyHb;" tabindex="0" role="contentinfo"></span></div></div><div class="YkaBSd" jsaction="click:Toy3n;"><div role="button" class="U26fgb kpPxtd J7BuEb" jsshadow jsname="V2zOu" aria-disabled="false" tabindex="0">Google Sites</div></div><div class="YkaBSd" jscontroller="HYv29e" jsaction="click:dQ6O0c;" data-abuse-proto="%.@.null,null,&quot;https://sites.google.com/view/aimlnet-2022/accepted-papers&quot;]" data-abuse-reporting-widget-proto="%.@.null,&quot;https://sites.google.com/view/aimlnet-2022/accepted-papers&quot;]"><div role="button" class="U26fgb kpPxtd J7BuEb" jsshadow aria-label="Report abuse" aria-disabled="false" tabindex="0">Report abuse</div></div></div></div></div></div></div></div><script nonce="8-mTV4AAsQYTPM4ekq5k_g">DOCS_timing['cov']=new Date().getTime();</script><script src="https://www.gstatic.com/_/atari/_/js/k=atari.vw.en_US.fw_mAcuwUyE.O/am=MBiA/d=1/rs=AGEqA5lwNXFYaHUUDGYHiMqlOO36DqQAOw/m=view" id="base-js" nonce="8-mTV4AAsQYTPM4ekq5k_g"></script></div></div><div jscontroller="YV8yqd" jsaction="rcuQ6b:npT2md"></div></body></html>

Pages: 1 2 3 4 5 6 7 8 9 10