CINXE.COM

Pattern Recognition Research Papers - Academia.edu

<!DOCTYPE html> <html lang="en" xmlns:fb="http://www.facebook.com/2008/fbml" class="wf-loading"> <head prefix="og: https://ogp.me/ns# fb: https://ogp.me/ns/fb# academia: https://ogp.me/ns/fb/academia#"> <meta charset="utf-8"> <meta name=viewport content="width=device-width, initial-scale=1"> <meta rel="search" type="application/opensearchdescription+xml" href="/open_search.xml" title="Academia.edu"> <title>Pattern Recognition Research Papers - Academia.edu</title> <!-- _ _ _ | | (_) | | __ _ ___ __ _ __| | ___ _ __ ___ _ __ _ ___ __| |_ _ / _` |/ __/ _` |/ _` |/ _ \ '_ ` _ \| |/ _` | / _ \/ _` | | | | | (_| | (_| (_| | (_| | __/ | | | | | | (_| || __/ (_| | |_| | \__,_|\___\__,_|\__,_|\___|_| |_| |_|_|\__,_(_)___|\__,_|\__,_| We're hiring! See https://www.academia.edu/hiring --> <link href="//a.academia-assets.com/images/favicons/favicon-production.ico" rel="shortcut icon" type="image/vnd.microsoft.icon"> <link rel="apple-touch-icon" sizes="57x57" href="//a.academia-assets.com/images/favicons/apple-touch-icon-57x57.png"> <link rel="apple-touch-icon" sizes="60x60" href="//a.academia-assets.com/images/favicons/apple-touch-icon-60x60.png"> <link rel="apple-touch-icon" sizes="72x72" href="//a.academia-assets.com/images/favicons/apple-touch-icon-72x72.png"> <link rel="apple-touch-icon" sizes="76x76" href="//a.academia-assets.com/images/favicons/apple-touch-icon-76x76.png"> <link rel="apple-touch-icon" sizes="114x114" href="//a.academia-assets.com/images/favicons/apple-touch-icon-114x114.png"> <link rel="apple-touch-icon" sizes="120x120" href="//a.academia-assets.com/images/favicons/apple-touch-icon-120x120.png"> <link rel="apple-touch-icon" sizes="144x144" href="//a.academia-assets.com/images/favicons/apple-touch-icon-144x144.png"> <link rel="apple-touch-icon" sizes="152x152" href="//a.academia-assets.com/images/favicons/apple-touch-icon-152x152.png"> <link rel="apple-touch-icon" sizes="180x180" href="//a.academia-assets.com/images/favicons/apple-touch-icon-180x180.png"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-32x32.png" sizes="32x32"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-194x194.png" sizes="194x194"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-96x96.png" sizes="96x96"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/android-chrome-192x192.png" sizes="192x192"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-16x16.png" sizes="16x16"> <link rel="manifest" href="//a.academia-assets.com/images/favicons/manifest.json"> <meta name="msapplication-TileColor" content="#2b5797"> <meta name="msapplication-TileImage" content="//a.academia-assets.com/images/favicons/mstile-144x144.png"> <meta name="theme-color" content="#ffffff"> <script> window.performance && window.performance.measure && window.performance.measure("Time To First Byte", "requestStart", "responseStart"); </script> <script> (function() { if (!window.URLSearchParams || !window.history || !window.history.replaceState) { return; } var searchParams = new URLSearchParams(window.location.search); var paramsToDelete = [ 'fs', 'sm', 'swp', 'iid', 'nbs', 'rcc', // related content category 'rcpos', // related content carousel position 'rcpg', // related carousel page 'rchid', // related content hit id 'f_ri', // research interest id, for SEO tracking 'f_fri', // featured research interest, for SEO tracking (param key without value) 'f_rid', // from research interest directory for SEO tracking 'f_loswp', // from research interest pills on LOSWP sidebar for SEO tracking 'rhid', // referrring hit id ]; if (paramsToDelete.every((key) => searchParams.get(key) === null)) { return; } paramsToDelete.forEach((key) => { searchParams.delete(key); }); var cleanUrl = new URL(window.location.href); cleanUrl.search = searchParams.toString(); history.replaceState({}, document.title, cleanUrl); })(); </script> <script async src="https://www.googletagmanager.com/gtag/js?id=G-5VKX33P2DS"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-5VKX33P2DS', { cookie_domain: 'academia.edu', send_page_view: false, }); gtag('event', 'page_view', { 'controller': "by_tag", 'action': "show_one", 'controller_action': 'by_tag#show_one', 'logged_in': 'false', 'edge': 'unknown', // Send nil if there is no A/B test bucket, in case some records get logged // with missing data - that way we can distinguish between the two cases. // ab_test_bucket should be of the form <ab_test_name>:<bucket> 'ab_test_bucket': null, }) </script> <script type="text/javascript"> window.sendUserTiming = function(timingName) { if (!(window.performance && window.performance.measure)) return; var entries = window.performance.getEntriesByName(timingName, "measure"); if (entries.length !== 1) return; var timingValue = Math.round(entries[0].duration); gtag('event', 'timing_complete', { name: timingName, value: timingValue, event_category: 'User-centric', }); }; window.sendUserTiming("Time To First Byte"); </script> <meta name="csrf-param" content="authenticity_token" /> <meta name="csrf-token" content="FD4aa9FuJVAAPmP5UXtY3suGmtbLH6TVndLa7XF27BIY_3ggaUzGtyk5q1yjx36oOVi49TeIJHmsaFfaH12uLQ" /> <link href="/Documents/in/Pattern_Recognition?after=50%2C70025709" rel="next" /><link crossorigin="" href="https://fonts.gstatic.com/" rel="preconnect" /><link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,opsz,wght@0,9..40,100..1000;1,9..40,100..1000&amp;family=Gupter:wght@400;500;700&amp;family=IBM+Plex+Mono:wght@300;400&amp;family=Material+Symbols+Outlined:opsz,wght,FILL,GRAD@20,400,0,0&amp;display=swap" rel="stylesheet" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/common-2b6f90dbd75f5941bc38f4ad716615f3ac449e7398313bb3bc225fba451cd9fa.css" /> <meta name="description" content="View Pattern Recognition Research Papers on Academia.edu for free." /> <meta name="google-site-verification" content="bKJMBZA7E43xhDOopFZkssMMkBRjvYERV-NaN4R6mrs" /> <script> var $controller_name = 'by_tag'; var $action_name = "show_one"; var $rails_env = 'production'; var $app_rev = '075e914b9e16164113b5b9afd7238a56a7292942'; var $domain = 'academia.edu'; var $app_host = "academia.edu"; var $asset_host = "academia-assets.com"; var $start_time = new Date().getTime(); var $recaptcha_key = "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB"; var $recaptcha_invisible_key = "6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj"; var $disableClientRecordHit = false; </script> <script> window.Aedu = { hit_data: null }; window.Aedu.SiteStats = {"premium_universities_count":14035,"monthly_visitors":"102 million","monthly_visitor_count":102411812,"monthly_visitor_count_in_millions":102,"user_count":283253505,"paper_count":55203019,"paper_count_in_millions":55,"page_count":432000000,"page_count_in_millions":432,"pdf_count":16500000,"pdf_count_in_millions":16}; window.Aedu.serverRenderTime = new Date(1740043582000); window.Aedu.timeDifference = new Date().getTime() - 1740043582000; window.Aedu.isUsingCssV1 = false; window.Aedu.enableLocalization = true; window.Aedu.activateFullstory = false; window.Aedu.serviceAvailability = { status: {"attention_db":"on","bibliography_db":"on","contacts_db":"on","email_db":"on","indexability_db":"on","mentions_db":"on","news_db":"on","notifications_db":"on","offsite_mentions_db":"on","redshift":"on","redshift_exports_db":"on","related_works_db":"on","ring_db":"on","user_tests_db":"on"}, serviceEnabled: function(service) { return this.status[service] === "on"; }, readEnabled: function(service) { return this.serviceEnabled(service) || this.status[service] === "read_only"; }, }; window.Aedu.viewApmTrace = function() { // Check if x-apm-trace-id meta tag is set, and open the trace in APM // in a new window if it is. var apmTraceId = document.head.querySelector('meta[name="x-apm-trace-id"]'); if (apmTraceId) { var traceId = apmTraceId.content; // Use trace ID to construct URL, an example URL looks like: // https://app.datadoghq.com/apm/traces?query=trace_id%31298410148923562634 var apmUrl = 'https://app.datadoghq.com/apm/traces?query=trace_id%3A' + traceId; window.open(apmUrl, '_blank'); } }; </script> <!--[if lt IE 9]> <script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.2/html5shiv.min.js"></script> <![endif]--> <link href="https://fonts.googleapis.com/css?family=Roboto:100,100i,300,300i,400,400i,500,500i,700,700i,900,900i" rel="stylesheet"> <link rel="preload" href="//maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css" as="style" onload="this.rel='stylesheet'"> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/libraries-a9675dcb01ec4ef6aa807ba772c7a5a00c1820d3ff661c1038a20f80d06bb4e4.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/academia-40698df34f913bd208bb70f09d2feb7c6286046250be17a4db35bba2c08b0e2f.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system_legacy-056a9113b9a0f5343d013b29ee1929d5a18be35fdcdceb616600b4db8bd20054.css" /> <script src="//a.academia-assets.com/assets/webpack_bundles/runtime-bundle-005434038af4252ca37c527588411a3d6a0eabb5f727fac83f8bbe7fd88d93bb.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/webpack_libraries_and_infrequently_changed.wjs-bundle-6145545f49b709c1199990a76c559bd4c35429284884cbcb3cf7f1916215e941.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/core_webpack.wjs-bundle-5e022a2ab081599fcedc76886fa95a606f8073416cae1641695a9906c9a80b81.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/sentry.wjs-bundle-5fe03fddca915c8ba0f7edbe64c194308e8ce5abaed7bffe1255ff37549c4808.js"></script> <script> jade = window.jade || {}; jade.helpers = window.$h; jade._ = window._; </script> <!-- Google Tag Manager --> <script id="tag-manager-head-root">(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0], j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src= 'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f); })(window,document,'script','dataLayer_old','GTM-5G9JF7Z');</script> <!-- End Google Tag Manager --> <script> window.gptadslots = []; window.googletag = window.googletag || {}; window.googletag.cmd = window.googletag.cmd || []; </script> <script type="text/javascript"> // TODO(jacob): This should be defined, may be rare load order problem. // Checking if null is just a quick fix, will default to en if unset. // Better fix is to run this immedietely after I18n is set. if (window.I18n != null) { I18n.defaultLocale = "en"; I18n.locale = "en"; I18n.fallbacks = true; } </script> <link rel="canonical" href="https://www.academia.edu/Documents/in/Pattern_Recognition" /> </head> <!--[if gte IE 9 ]> <body class='ie ie9 c-by_tag a-show_one logged_out u-bgColorWhite'> <![endif]--> <!--[if !(IE) ]><!--> <body class='c-by_tag a-show_one logged_out u-bgColorWhite'> <!--<![endif]--> <div id="fb-root"></div><script>window.fbAsyncInit = function() { FB.init({ appId: "2369844204", version: "v8.0", status: true, cookie: true, xfbml: true }); // Additional initialization code. if (window.InitFacebook) { // facebook.ts already loaded, set it up. window.InitFacebook(); } else { // Set a flag for facebook.ts to find when it loads. window.academiaAuthReadyFacebook = true; } };</script><script>window.fbAsyncLoad = function() { // Protection against double calling of this function if (window.FB) { return; } (function(d, s, id){ var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) {return;} js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_US/sdk.js"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk')); } if (!window.defer_facebook) { // Autoload if not deferred window.fbAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.fbAsyncLoad(); }, 5000); }</script> <div id="google-root"></div><script>window.loadGoogle = function() { if (window.InitGoogle) { // google.ts already loaded, set it up. window.InitGoogle("331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"); } else { // Set a flag for google.ts to use when it loads. window.GoogleClientID = "331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"; } };</script><script>window.googleAsyncLoad = function() { // Protection against double calling of this function (function(d) { var js; var id = 'google-jssdk'; var ref = d.getElementsByTagName('script')[0]; if (d.getElementById(id)) { return; } js = d.createElement('script'); js.id = id; js.async = true; js.onload = loadGoogle; js.src = "https://accounts.google.com/gsi/client" ref.parentNode.insertBefore(js, ref); }(document)); } if (!window.defer_google) { // Autoload if not deferred window.googleAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.googleAsyncLoad(); }, 5000); }</script> <div id="tag-manager-body-root"> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-5G9JF7Z" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <!-- Event listeners for analytics --> <script> window.addEventListener('load', function() { if (document.querySelector('input[name="commit"]')) { document.querySelector('input[name="commit"]').addEventListener('click', function() { gtag('event', 'click', { event_category: 'button', event_label: 'Log In' }) }) } }); </script> </div> <script>var _comscore = _comscore || []; _comscore.push({ c1: "2", c2: "26766707" }); (function() { var s = document.createElement("script"), el = document.getElementsByTagName("script")[0]; s.async = true; s.src = (document.location.protocol == "https:" ? "https://sb" : "http://b") + ".scorecardresearch.com/beacon.js"; el.parentNode.insertBefore(s, el); })();</script><img src="https://sb.scorecardresearch.com/p?c1=2&amp;c2=26766707&amp;cv=2.0&amp;cj=1" style="position: absolute; visibility: hidden" /> <div id='react-modal'></div> <div class='DesignSystem'> <a class='u-showOnFocus' href='#site'> Skip to main content </a> </div> <div id="upgrade_ie_banner" style="display: none;"><p>Academia.edu no longer supports Internet Explorer.</p><p>To browse Academia.edu and the wider internet faster and more securely, please take a few seconds to&nbsp;<a href="https://www.academia.edu/upgrade-browser">upgrade your browser</a>.</p></div><script>// Show this banner for all versions of IE if (!!window.MSInputMethodContext || /(MSIE)/.test(navigator.userAgent)) { document.getElementById('upgrade_ie_banner').style.display = 'block'; }</script> <div class="DesignSystem bootstrap ShrinkableNav no-sm no-md"><div class="navbar navbar-default main-header"><div class="container-wrapper" id="main-header-container"><div class="container"><div class="navbar-header"><div class="nav-left-wrapper u-mt0x"><div class="nav-logo"><a data-main-header-link-target="logo_home" href="https://www.academia.edu/"><img class="visible-xs-inline-block" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015-A.svg" width="24" height="24" /><img width="145.2" height="18" class="hidden-xs" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015.svg" /></a></div><div class="nav-search"><div class="SiteSearch-wrapper select2-no-default-pills"><form class="js-SiteSearch-form DesignSystem" action="https://www.academia.edu/search" accept-charset="UTF-8" method="get"><i class="SiteSearch-icon fa fa-search u-fw700 u-positionAbsolute u-tcGrayDark"></i><input class="js-SiteSearch-form-input SiteSearch-form-input form-control" data-main-header-click-target="search_input" name="q" placeholder="Search" type="text" value="" /></form></div></div></div><div class="nav-right-wrapper pull-right"><ul class="NavLinks js-main-nav list-unstyled"><li class="NavLinks-link"><a class="js-header-login-url Button Button--inverseGray Button--sm u-mb4x" id="nav_log_in" rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="NavLinks-link u-p0x"><a class="Button Button--inverseGray Button--sm u-mb4x" rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li></ul><button class="hidden-lg hidden-md hidden-sm u-ml4x navbar-toggle collapsed" data-target=".js-mobile-header-links" data-toggle="collapse" type="button"><span class="icon-bar"></span><span class="icon-bar"></span><span class="icon-bar"></span></button></div></div><div class="collapse navbar-collapse js-mobile-header-links"><ul class="nav navbar-nav"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li><li class="u-borderColorGrayLight u-borderBottom1 js-mobile-nav-expand-trigger"><a href="#">more&nbsp<span class="caret"></span></a></li><li><ul class="js-mobile-nav-expand-section nav navbar-nav u-m0x collapse"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/about">About</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/press">Press</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/documents">Papers</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/hiring"><i class="fa fa-briefcase"></i>&nbsp;We're Hiring!</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://support.academia.edu/hc/en-us"><i class="fa fa-question-circle"></i>&nbsp;Help Center</a></li><li class="js-mobile-nav-collapse-trigger u-borderColorGrayLight u-borderBottom1 dropup" style="display:none"><a href="#">less&nbsp<span class="caret"></span></a></li></ul></li></ul></div></div></div><script>(function(){ var $moreLink = $(".js-mobile-nav-expand-trigger"); var $lessLink = $(".js-mobile-nav-collapse-trigger"); var $section = $('.js-mobile-nav-expand-section'); $moreLink.click(function(ev){ ev.preventDefault(); $moreLink.hide(); $lessLink.show(); $section.collapse('show'); }); $lessLink.click(function(ev){ ev.preventDefault(); $moreLink.show(); $lessLink.hide(); $section.collapse('hide'); }); })() if ($a.is_logged_in() || false) { new Aedu.NavigationController({ el: '.js-main-nav', showHighlightedNotification: false }); } else { $(".js-header-login-url").attr("href", $a.loginUrlWithRedirect()); } Aedu.autocompleteSearch = new AutocompleteSearch({el: '.js-SiteSearch-form'});</script></div></div> <div id='site' class='fixed'> <div id="content" class="clearfix"> <script>document.addEventListener('DOMContentLoaded', function(){ var $dismissible = $(".dismissible_banner"); $dismissible.click(function(ev) { $dismissible.hide(); }); });</script> <div class="DesignSystem" style="margin-top:-40px"><div class="PageHeader"><div class="container"><div class="row"><style type="text/css">.sor-abstract { display: -webkit-box; overflow: hidden; text-overflow: ellipsis; -webkit-line-clamp: 3; -webkit-box-orient: vertical; }</style><div class="col-xs-12 clearfix"><div class="u-floatLeft"><h1 class="PageHeader-title u-m0x u-fs30">Pattern Recognition</h1><div class="u-tcGrayDark">68,836&nbsp;Followers</div><div class="u-tcGrayDark u-mt2x">Recent papers in&nbsp;<b>Pattern Recognition</b></div></div></div></div></div></div><div class="TabbedNavigation"><div class="container"><div class="row"><div class="col-xs-12 clearfix"><ul class="nav u-m0x u-p0x list-inline u-displayFlex"><li class="active"><a href="https://www.academia.edu/Documents/in/Pattern_Recognition">Top Papers</a></li><li><a href="https://www.academia.edu/Documents/in/Pattern_Recognition/MostCited">Most Cited Papers</a></li><li><a href="https://www.academia.edu/Documents/in/Pattern_Recognition/MostDownloaded">Most Downloaded Papers</a></li><li><a href="https://www.academia.edu/Documents/in/Pattern_Recognition/MostRecent">Newest Papers</a></li><li><a class="" href="https://www.academia.edu/People/Pattern_Recognition">People</a></li></ul></div><style type="text/css">ul.nav{flex-direction:row}@media(max-width: 567px){ul.nav{flex-direction:column}.TabbedNavigation li{max-width:100%}.TabbedNavigation li.active{background-color:var(--background-grey, #dddde2)}.TabbedNavigation li.active:before,.TabbedNavigation li.active:after{display:none}}</style></div></div></div><div class="container"><div class="row"><div class="col-xs-12"><div class="u-displayFlex"><div class="u-flexGrow1"><div class="works"><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_68790366" data-work_id="68790366" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/68790366/Detecting_imminent_eruptive_activity_at_Mt_Etna_Italy_in_2007_2008_through_pattern_classification_of_volcanic_tremor_data">Detecting imminent eruptive activity at Mt Etna, Italy, in 2007-2008 through pattern classification of volcanic tremor data</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Volcano monitoring aims at the recognition of changes in instrumentally observable parameters before hazardous activity in order to alert governmental authorities. Among these parameters seismic data in general and volcanic tremor in... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_68790366" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Volcano monitoring aims at the recognition of changes in instrumentally observable parameters before hazardous activity in order to alert governmental authorities. Among these parameters seismic data in general and volcanic tremor in particular play a key role. Recent major explosive eruptions such as Okmok (Aleutians) and Chaitén (Chile) in 2008 and numerous smaller events at Mt Etna (Italy), have shown that the period of premonitory seismic activity can be short (only a few hours), which entails the necessity of effective automatic data processing near on-line. Here we present a synoptic pattern classification analysis based on Self Organizing Maps and Fuzzy Cluster Analysis which is applied to volcanic tremor data recorded during a series of paroxysmal eruptive episodes and a flank eruption at Etna in 2007-2008. In total, eight episodes were analyzed; in six of these significant changes in the dynamic regime of the volcano were detected up to 9 h prior to the onset of eruptive activity, and long before changes in volcanic tremor amplitude and spectral content became evident in classical analysis. In two cases, the state transition was b 1 h before the onset of eruptive activity, which we interpret as evidence for very rapid magma ascent through an open conduit. We further detected twenty failed paroxysms, that is episodes of volcanic unrest that did not culminate in eruptive activity, between March and April 2007. As the application of the software for this synoptic pattern classification is straightforward and requires only moderate computational resources, it was possible to exploit it in an on-line application, which was tested and now is in use at the Istituto Nazionale di Geofisica e Vulcanologia in Catania for the monitoring of Etna. We believe that the pattern classification presented here may become a powerful addition to the repertoire of volcano monitoring tools and early warning techniques worldwide.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/68790366" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="6ed5da905a714887fa43a7fe282ce4b5" rel="nofollow" data-download="{&quot;attachment_id&quot;:79140892,&quot;asset_id&quot;:68790366,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/79140892/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="36976637" href="https://ingv.academia.edu/horstlanger">horst langer</a><script data-card-contents-for-user="36976637" type="text/json">{"id":36976637,"first_name":"horst","last_name":"langer","domain_name":"ingv","page_name":"horstlanger","display_name":"horst langer","profile_url":"https://ingv.academia.edu/horstlanger?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_68790366 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="68790366"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 68790366, container: ".js-paper-rank-work_68790366", }); });</script></li><li class="js-percentile-work_68790366 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 68790366; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_68790366"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_68790366 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="68790366"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 68790366; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=68790366]").text(description); $(".js-view-count-work_68790366").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_68790366").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="68790366"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">10</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="406" rel="nofollow" href="https://www.academia.edu/Documents/in/Geology">Geology</a>,&nbsp;<script data-card-contents-for-ri="406" type="text/json">{"id":406,"name":"Geology","url":"https://www.academia.edu/Documents/in/Geology?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="407" rel="nofollow" href="https://www.academia.edu/Documents/in/Geochemistry">Geochemistry</a>,&nbsp;<script data-card-contents-for-ri="407" type="text/json">{"id":407,"name":"Geochemistry","url":"https://www.academia.edu/Documents/in/Geochemistry?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="409" rel="nofollow" href="https://www.academia.edu/Documents/in/Geophysics">Geophysics</a>,&nbsp;<script data-card-contents-for-ri="409" type="text/json">{"id":409,"name":"Geophysics","url":"https://www.academia.edu/Documents/in/Geophysics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1370" rel="nofollow" href="https://www.academia.edu/Documents/in/Volcanology">Volcanology</a><script data-card-contents-for-ri="1370" type="text/json">{"id":1370,"name":"Volcanology","url":"https://www.academia.edu/Documents/in/Volcanology?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=68790366]'), work: {"id":68790366,"title":"Detecting imminent eruptive activity at Mt Etna, Italy, in 2007-2008 through pattern classification of volcanic tremor data","created_at":"2022-01-20T01:48:09.267-08:00","url":"https://www.academia.edu/68790366/Detecting_imminent_eruptive_activity_at_Mt_Etna_Italy_in_2007_2008_through_pattern_classification_of_volcanic_tremor_data?f_ri=5109","dom_id":"work_68790366","summary":"Volcano monitoring aims at the recognition of changes in instrumentally observable parameters before hazardous activity in order to alert governmental authorities. Among these parameters seismic data in general and volcanic tremor in particular play a key role. Recent major explosive eruptions such as Okmok (Aleutians) and Chaitén (Chile) in 2008 and numerous smaller events at Mt Etna (Italy), have shown that the period of premonitory seismic activity can be short (only a few hours), which entails the necessity of effective automatic data processing near on-line. Here we present a synoptic pattern classification analysis based on Self Organizing Maps and Fuzzy Cluster Analysis which is applied to volcanic tremor data recorded during a series of paroxysmal eruptive episodes and a flank eruption at Etna in 2007-2008. In total, eight episodes were analyzed; in six of these significant changes in the dynamic regime of the volcano were detected up to 9 h prior to the onset of eruptive activity, and long before changes in volcanic tremor amplitude and spectral content became evident in classical analysis. In two cases, the state transition was b 1 h before the onset of eruptive activity, which we interpret as evidence for very rapid magma ascent through an open conduit. We further detected twenty failed paroxysms, that is episodes of volcanic unrest that did not culminate in eruptive activity, between March and April 2007. As the application of the software for this synoptic pattern classification is straightforward and requires only moderate computational resources, it was possible to exploit it in an on-line application, which was tested and now is in use at the Istituto Nazionale di Geofisica e Vulcanologia in Catania for the monitoring of Etna. We believe that the pattern classification presented here may become a powerful addition to the repertoire of volcano monitoring tools and early warning techniques worldwide.","downloadable_attachments":[{"id":79140892,"asset_id":68790366,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":36976637,"first_name":"horst","last_name":"langer","domain_name":"ingv","page_name":"horstlanger","display_name":"horst langer","profile_url":"https://ingv.academia.edu/horstlanger?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":406,"name":"Geology","url":"https://www.academia.edu/Documents/in/Geology?f_ri=5109","nofollow":true},{"id":407,"name":"Geochemistry","url":"https://www.academia.edu/Documents/in/Geochemistry?f_ri=5109","nofollow":true},{"id":409,"name":"Geophysics","url":"https://www.academia.edu/Documents/in/Geophysics?f_ri=5109","nofollow":true},{"id":1370,"name":"Volcanology","url":"https://www.academia.edu/Documents/in/Volcanology?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":38511,"name":"Fuzzy Clustering","url":"https://www.academia.edu/Documents/in/Fuzzy_Clustering?f_ri=5109"},{"id":62208,"name":"Early Warning","url":"https://www.academia.edu/Documents/in/Early_Warning?f_ri=5109"},{"id":221389,"name":"Pattern Classification","url":"https://www.academia.edu/Documents/in/Pattern_Classification?f_ri=5109"},{"id":557803,"name":"Self Organized Map","url":"https://www.academia.edu/Documents/in/Self_Organized_Map?f_ri=5109"},{"id":2226477,"name":"Automatic data processing","url":"https://www.academia.edu/Documents/in/Automatic_data_processing?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_52828716" data-work_id="52828716" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/52828716/Object_based_image_analysis_a_review_of_developments_and_future_directions_of_automated_feature_detection_in_landscape_archaeology">Object-based image analysis: a review of developments and future directions of automated feature detection in landscape archaeology</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Object-based image analysis (OBIA) is a method of assessing remote sensing data that uses morphometric and spectral parameters simultaneously to identify features in remote sensing imagery. Over the past 10-15 years, OBIA methods have... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_52828716" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Object-based image analysis (OBIA) is a method of assessing remote sensing data that uses morphometric and spectral parameters simultaneously to identify features in remote sensing imagery. Over the past 10-15 years, OBIA methods have been introduced to detect archaeological features. Improvements in accuracy have been attained by using a greater number of morphometric variables and multiple scales of analysis. This article highlights the developments that have occurred in the application of OBIA within archaeology and argues that OBIA is both a useful and necessary tool for archaeological research. Additionally, I discuss future research paths using this method. Some of the suggestions put forth here include: pushing for multifaceted research designs utilizing OBIA and manual interpretation, using OBIA methods for directly studying landscape settlement patterns, and increasing data sharing of methods between researchers.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/52828716" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="96cc5457e5c0cbef8e1db5ac02c0e603" rel="nofollow" data-download="{&quot;attachment_id&quot;:69904522,&quot;asset_id&quot;:52828716,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/69904522/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="40788222" href="https://columbia.academia.edu/DylanDavis">Dylan S Davis</a><script data-card-contents-for-user="40788222" type="text/json">{"id":40788222,"first_name":"Dylan","last_name":"Davis","domain_name":"columbia","page_name":"DylanDavis","display_name":"Dylan S Davis","profile_url":"https://columbia.academia.edu/DylanDavis?f_ri=5109","photo":"https://0.academia-photos.com/40788222/14639117/49753488/s65_dylan.davis.jpg"}</script></span></span></li><li class="js-paper-rank-work_52828716 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="52828716"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 52828716, container: ".js-paper-rank-work_52828716", }); });</script></li><li class="js-percentile-work_52828716 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 52828716; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_52828716"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_52828716 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="52828716"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 52828716; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=52828716]").text(description); $(".js-view-count-work_52828716").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_52828716").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="52828716"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">11</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="392" rel="nofollow" href="https://www.academia.edu/Documents/in/Archaeology">Archaeology</a>,&nbsp;<script data-card-contents-for-ri="392" type="text/json">{"id":392,"name":"Archaeology","url":"https://www.academia.edu/Documents/in/Archaeology?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="409" rel="nofollow" href="https://www.academia.edu/Documents/in/Geophysics">Geophysics</a>,&nbsp;<script data-card-contents-for-ri="409" type="text/json">{"id":409,"name":"Geophysics","url":"https://www.academia.edu/Documents/in/Geophysics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1252" rel="nofollow" href="https://www.academia.edu/Documents/in/Remote_Sensing">Remote Sensing</a>,&nbsp;<script data-card-contents-for-ri="1252" type="text/json">{"id":1252,"name":"Remote Sensing","url":"https://www.academia.edu/Documents/in/Remote_Sensing?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a><script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=52828716]'), work: {"id":52828716,"title":"Object-based image analysis: a review of developments and future directions of automated feature detection in landscape archaeology","created_at":"2021-09-19T05:44:04.719-07:00","url":"https://www.academia.edu/52828716/Object_based_image_analysis_a_review_of_developments_and_future_directions_of_automated_feature_detection_in_landscape_archaeology?f_ri=5109","dom_id":"work_52828716","summary":"Object-based image analysis (OBIA) is a method of assessing remote sensing data that uses morphometric and spectral parameters simultaneously to identify features in remote sensing imagery. Over the past 10-15 years, OBIA methods have been introduced to detect archaeological features. Improvements in accuracy have been attained by using a greater number of morphometric variables and multiple scales of analysis. This article highlights the developments that have occurred in the application of OBIA within archaeology and argues that OBIA is both a useful and necessary tool for archaeological research. Additionally, I discuss future research paths using this method. Some of the suggestions put forth here include: pushing for multifaceted research designs utilizing OBIA and manual interpretation, using OBIA methods for directly studying landscape settlement patterns, and increasing data sharing of methods between researchers.","downloadable_attachments":[{"id":69904522,"asset_id":52828716,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":40788222,"first_name":"Dylan","last_name":"Davis","domain_name":"columbia","page_name":"DylanDavis","display_name":"Dylan S Davis","profile_url":"https://columbia.academia.edu/DylanDavis?f_ri=5109","photo":"https://0.academia-photos.com/40788222/14639117/49753488/s65_dylan.davis.jpg"}],"research_interests":[{"id":392,"name":"Archaeology","url":"https://www.academia.edu/Documents/in/Archaeology?f_ri=5109","nofollow":true},{"id":409,"name":"Geophysics","url":"https://www.academia.edu/Documents/in/Geophysics?f_ri=5109","nofollow":true},{"id":1252,"name":"Remote Sensing","url":"https://www.academia.edu/Documents/in/Remote_Sensing?f_ri=5109","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true},{"id":2795,"name":"Landscape Archaeology","url":"https://www.academia.edu/Documents/in/Landscape_Archaeology?f_ri=5109"},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":5459,"name":"Cultural Heritage Conservation","url":"https://www.academia.edu/Documents/in/Cultural_Heritage_Conservation?f_ri=5109"},{"id":19522,"name":"Remote sensing and GIS applications in Landscape Research","url":"https://www.academia.edu/Documents/in/Remote_sensing_and_GIS_applications_in_Landscape_Research?f_ri=5109"},{"id":24244,"name":"Archaeological Prospection","url":"https://www.academia.edu/Documents/in/Archaeological_Prospection?f_ri=5109"},{"id":359290,"name":"Archaeological Remote Sensing","url":"https://www.academia.edu/Documents/in/Archaeological_Remote_Sensing?f_ri=5109"},{"id":663916,"name":"Object Based Image Analysis","url":"https://www.academia.edu/Documents/in/Object_Based_Image_Analysis?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_30556933" data-work_id="30556933" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/30556933/Neural_network_based_detection_of_local_textile_defects">Neural network based detection of local textile defects</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">A new approach for the segmentation of local textile defects using feed-forward neural network is presented. Every fabric defect alters the gray-level arrangement of neighboring pixels, and this change is used to segment the defects. The... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_30556933" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">A new approach for the segmentation of local textile defects using feed-forward neural network is presented. Every fabric defect alters the gray-level arrangement of neighboring pixels, and this change is used to segment the defects. The feature vector for every pixel is extracted from the gray-level arrangement of its neighboring pixels. Principal component analysis using singular value decomposition is used to reduce the dimension of feature vectors. Experimental results using this approach illustrate a high degree of robustness for the detection of a variety of fabric defects. The acceptance of a visual inspection system depends on economical aspects as well. Therefore, a new low-cost solution for the fast web inspection using linear neural network is also presented. The experimental results obtained from the real fabric defects, for the two approaches proposed in this paper, have conÿrmed their usefulness.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/30556933" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="c7fa68b611b010c9a91a2ab21e802a75" rel="nofollow" data-download="{&quot;attachment_id&quot;:50998949,&quot;asset_id&quot;:30556933,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/50998949/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="8738723" href="https://du-in.academia.edu/AJAY_KUMAR">AJAY KUMAR</a><script data-card-contents-for-user="8738723" type="text/json">{"id":8738723,"first_name":"AJAY","last_name":"KUMAR","domain_name":"du-in","page_name":"AJAY_KUMAR","display_name":"AJAY KUMAR","profile_url":"https://du-in.academia.edu/AJAY_KUMAR?f_ri=5109","photo":"https://0.academia-photos.com/8738723/2892703/3379166/s65_ajay.yadav.jpg"}</script></span></span></li><li class="js-paper-rank-work_30556933 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="30556933"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 30556933, container: ".js-paper-rank-work_30556933", }); });</script></li><li class="js-percentile-work_30556933 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 30556933; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_30556933"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_30556933 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="30556933"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 30556933; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=30556933]").text(description); $(".js-view-count-work_30556933").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_30556933").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="30556933"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">10</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="5069" rel="nofollow" href="https://www.academia.edu/Documents/in/Principal_Component_Analysis">Principal Component Analysis</a>,&nbsp;<script data-card-contents-for-ri="5069" type="text/json">{"id":5069,"name":"Principal Component Analysis","url":"https://www.academia.edu/Documents/in/Principal_Component_Analysis?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="11598" rel="nofollow" href="https://www.academia.edu/Documents/in/Neural_Networks">Neural Networks</a>,&nbsp;<script data-card-contents-for-ri="11598" type="text/json">{"id":11598,"name":"Neural Networks","url":"https://www.academia.edu/Documents/in/Neural_Networks?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="14417" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Vision">Machine Vision</a><script data-card-contents-for-ri="14417" type="text/json">{"id":14417,"name":"Machine Vision","url":"https://www.academia.edu/Documents/in/Machine_Vision?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=30556933]'), work: {"id":30556933,"title":"Neural network based detection of local textile defects","created_at":"2016-12-21T05:34:44.514-08:00","url":"https://www.academia.edu/30556933/Neural_network_based_detection_of_local_textile_defects?f_ri=5109","dom_id":"work_30556933","summary":"A new approach for the segmentation of local textile defects using feed-forward neural network is presented. Every fabric defect alters the gray-level arrangement of neighboring pixels, and this change is used to segment the defects. The feature vector for every pixel is extracted from the gray-level arrangement of its neighboring pixels. Principal component analysis using singular value decomposition is used to reduce the dimension of feature vectors. Experimental results using this approach illustrate a high degree of robustness for the detection of a variety of fabric defects. The acceptance of a visual inspection system depends on economical aspects as well. Therefore, a new low-cost solution for the fast web inspection using linear neural network is also presented. The experimental results obtained from the real fabric defects, for the two approaches proposed in this paper, have conÿrmed their usefulness.","downloadable_attachments":[{"id":50998949,"asset_id":30556933,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":8738723,"first_name":"AJAY","last_name":"KUMAR","domain_name":"du-in","page_name":"AJAY_KUMAR","display_name":"AJAY KUMAR","profile_url":"https://du-in.academia.edu/AJAY_KUMAR?f_ri=5109","photo":"https://0.academia-photos.com/8738723/2892703/3379166/s65_ajay.yadav.jpg"}],"research_interests":[{"id":5069,"name":"Principal Component Analysis","url":"https://www.academia.edu/Documents/in/Principal_Component_Analysis?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":11598,"name":"Neural Networks","url":"https://www.academia.edu/Documents/in/Neural_Networks?f_ri=5109","nofollow":true},{"id":14417,"name":"Machine Vision","url":"https://www.academia.edu/Documents/in/Machine_Vision?f_ri=5109","nofollow":true},{"id":26066,"name":"Neural Network","url":"https://www.academia.edu/Documents/in/Neural_Network?f_ri=5109"},{"id":59918,"name":"Quality Assurance","url":"https://www.academia.edu/Documents/in/Quality_Assurance?f_ri=5109"},{"id":85880,"name":"Singular value decomposition","url":"https://www.academia.edu/Documents/in/Singular_value_decomposition?f_ri=5109"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=5109"},{"id":2003344,"name":"Feed Forward Neural Network","url":"https://www.academia.edu/Documents/in/Feed_Forward_Neural_Network?f_ri=5109"},{"id":2364406,"name":"Visual Inspection","url":"https://www.academia.edu/Documents/in/Visual_Inspection?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_5453069" data-work_id="5453069" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/5453069/Effect_of_focus_position_on_informational_properties_of_acoustic_emission_generated_by_laser_material_interactions">Effect of focus position on informational properties of acoustic emission generated by laser–material interactions</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">To achieve desired accuracy, precision and surface roughness during laser-material removal process, monitoring and control of the process parameters related to laser, optics, workpiece material and its motion are required. Focus position,... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_5453069" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">To achieve desired accuracy, precision and surface roughness during laser-material removal process, monitoring and control of the process parameters related to laser, optics, workpiece material and its motion are required. Focus position, defined as a gap between the focusing lens and the surface of the sample workpiece, is one of the most critical process parameters, which determines the projection of the intensity of the laser beam on the surface to be ablated and therefore directly affects volume and geometry of the material removed and there by machining quality. In this paper, acoustic emission (AE) generated by laser-material interactions was statistically analyzed with respect to the variations in the focus position. The study involved on-line measurements of the AE signal from the laser-material interaction zone as a function of the focus position and the width of the machined trenches. Several basic statistical parameters, e.g. average amplitude, variance and power spectrum density were analyzed to select distinct informational parameters. Pattern recognition analysis of three informational parameters based on variances within frequency diapasons of 20-180, 180-300, and 300-500 kHz was used for reliable classification of the focus position and width of the machined trenches. The results provide important information for future development of on-line monitoring and control systems for laser-material removal process. Crown</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/5453069" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="7a9d7fa523df4addc0a73513efe4cf1d" rel="nofollow" data-download="{&quot;attachment_id&quot;:49287080,&quot;asset_id&quot;:5453069,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/49287080/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="7646368" href="https://westernu.academia.edu/EvgueniBordatchev">Evgueni Bordatchev</a><script data-card-contents-for-user="7646368" type="text/json">{"id":7646368,"first_name":"Evgueni","last_name":"Bordatchev","domain_name":"westernu","page_name":"EvgueniBordatchev","display_name":"Evgueni Bordatchev","profile_url":"https://westernu.academia.edu/EvgueniBordatchev?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_5453069 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="5453069"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 5453069, container: ".js-paper-rank-work_5453069", }); });</script></li><li class="js-percentile-work_5453069 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5453069; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_5453069"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_5453069 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="5453069"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5453069; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5453069]").text(description); $(".js-view-count-work_5453069").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_5453069").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="5453069"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">11</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5187" rel="nofollow" href="https://www.academia.edu/Documents/in/Statistical_Analysis">Statistical Analysis</a>,&nbsp;<script data-card-contents-for-ri="5187" type="text/json">{"id":5187,"name":"Statistical Analysis","url":"https://www.academia.edu/Documents/in/Statistical_Analysis?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="28235" rel="nofollow" href="https://www.academia.edu/Documents/in/Multidisciplinary">Multidisciplinary</a>,&nbsp;<script data-card-contents-for-ri="28235" type="text/json">{"id":28235,"name":"Multidisciplinary","url":"https://www.academia.edu/Documents/in/Multidisciplinary?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="29640" rel="nofollow" href="https://www.academia.edu/Documents/in/Acoustic_Emission">Acoustic Emission</a><script data-card-contents-for-ri="29640" type="text/json">{"id":29640,"name":"Acoustic Emission","url":"https://www.academia.edu/Documents/in/Acoustic_Emission?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=5453069]'), work: {"id":5453069,"title":"Effect of focus position on informational properties of acoustic emission generated by laser–material interactions","created_at":"2013-12-16T21:26:42.953-08:00","url":"https://www.academia.edu/5453069/Effect_of_focus_position_on_informational_properties_of_acoustic_emission_generated_by_laser_material_interactions?f_ri=5109","dom_id":"work_5453069","summary":"To achieve desired accuracy, precision and surface roughness during laser-material removal process, monitoring and control of the process parameters related to laser, optics, workpiece material and its motion are required. Focus position, defined as a gap between the focusing lens and the surface of the sample workpiece, is one of the most critical process parameters, which determines the projection of the intensity of the laser beam on the surface to be ablated and therefore directly affects volume and geometry of the material removed and there by machining quality. In this paper, acoustic emission (AE) generated by laser-material interactions was statistically analyzed with respect to the variations in the focus position. The study involved on-line measurements of the AE signal from the laser-material interaction zone as a function of the focus position and the width of the machined trenches. Several basic statistical parameters, e.g. average amplitude, variance and power spectrum density were analyzed to select distinct informational parameters. Pattern recognition analysis of three informational parameters based on variances within frequency diapasons of 20-180, 180-300, and 300-500 kHz was used for reliable classification of the focus position and width of the machined trenches. The results provide important information for future development of on-line monitoring and control systems for laser-material removal process. Crown","downloadable_attachments":[{"id":49287080,"asset_id":5453069,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":7646368,"first_name":"Evgueni","last_name":"Bordatchev","domain_name":"westernu","page_name":"EvgueniBordatchev","display_name":"Evgueni Bordatchev","profile_url":"https://westernu.academia.edu/EvgueniBordatchev?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":5187,"name":"Statistical Analysis","url":"https://www.academia.edu/Documents/in/Statistical_Analysis?f_ri=5109","nofollow":true},{"id":28235,"name":"Multidisciplinary","url":"https://www.academia.edu/Documents/in/Multidisciplinary?f_ri=5109","nofollow":true},{"id":29640,"name":"Acoustic Emission","url":"https://www.academia.edu/Documents/in/Acoustic_Emission?f_ri=5109","nofollow":true},{"id":30485,"name":"Time series analysis","url":"https://www.academia.edu/Documents/in/Time_series_analysis?f_ri=5109"},{"id":32890,"name":"Process Monitoring","url":"https://www.academia.edu/Documents/in/Process_Monitoring?f_ri=5109"},{"id":33296,"name":"Surface Roughness","url":"https://www.academia.edu/Documents/in/Surface_Roughness?f_ri=5109"},{"id":35399,"name":"Control system","url":"https://www.academia.edu/Documents/in/Control_system?f_ri=5109"},{"id":973996,"name":"Process Parameters","url":"https://www.academia.edu/Documents/in/Process_Parameters?f_ri=5109"},{"id":1761622,"name":"Monitoring and control","url":"https://www.academia.edu/Documents/in/Monitoring_and_control?f_ri=5109"},{"id":2521725,"name":"Power Spectrum Density","url":"https://www.academia.edu/Documents/in/Power_Spectrum_Density?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_45631677" data-work_id="45631677" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/45631677/Call_For_Papers_7_th_International_Conference_on_Artificial_Intelligence_and_Applications_AI_2021_">Call For Papers - 7 th International Conference on Artificial Intelligence and Applications (AI 2021)</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">7th International Conference on Artificial Intelligence and Applications (AI 2021) will provide an excellent international forum for sharing knowledge and results in theory, methodology and applications of Artificial Intelligence and its... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_45631677" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">7th International Conference on Artificial Intelligence and Applications (AI 2021) will provide an excellent international forum for sharing knowledge and results in theory, methodology and applications of Artificial Intelligence and its applications. The Conference <br />looks for significant contributions to all major fields of the Artificial Intelligence, Soft Computing in theoretical and practical aspects. The aim of the Conference is to provide a platform to the researchers and practitioners from both academia as well as industry to meet and <br />share cutting-edge development in the field. Authors are solicited to contribute to the conference by submitting articles that illustrate research results, projects, surveying works and industrial experiences that describe significant advances in the following areas, but are not limited to.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/45631677" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="34c4119aaffad819a1088824d1069c90" rel="nofollow" data-download="{&quot;attachment_id&quot;:74727553,&quot;asset_id&quot;:45631677,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/74727553/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="19082339" rel="nofollow" href="https://independent.academia.edu/IjaiaJournal">International Journal of Artificial Intelligence (IJAIA)</a><script data-card-contents-for-user="19082339" type="text/json">{"id":19082339,"first_name":"International Journal of Artificial Intelligence","last_name":"(IJAIA)","domain_name":"independent","page_name":"IjaiaJournal","display_name":"International Journal of Artificial Intelligence (IJAIA)","profile_url":"https://independent.academia.edu/IjaiaJournal?f_ri=5109","photo":"https://0.academia-photos.com/19082339/5301872/160061290/s65_international_journal_of_artificial_intelligence_applications._ijaia_.jpg"}</script></span></span></li><li class="js-paper-rank-work_45631677 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="45631677"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 45631677, container: ".js-paper-rank-work_45631677", }); });</script></li><li class="js-percentile-work_45631677 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 45631677; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_45631677"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_45631677 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="45631677"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 45631677; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=45631677]").text(description); $(".js-view-count-work_45631677").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_45631677").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="45631677"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">20</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="146" rel="nofollow" href="https://www.academia.edu/Documents/in/Bioinformatics">Bioinformatics</a>,&nbsp;<script data-card-contents-for-ri="146" type="text/json">{"id":146,"name":"Bioinformatics","url":"https://www.academia.edu/Documents/in/Bioinformatics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="451" rel="nofollow" href="https://www.academia.edu/Documents/in/Programming_Languages">Programming Languages</a>,&nbsp;<script data-card-contents-for-ri="451" type="text/json">{"id":451,"name":"Programming Languages","url":"https://www.academia.edu/Documents/in/Programming_Languages?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="464" rel="nofollow" href="https://www.academia.edu/Documents/in/Information_Retrieval">Information Retrieval</a><script data-card-contents-for-ri="464" type="text/json">{"id":464,"name":"Information Retrieval","url":"https://www.academia.edu/Documents/in/Information_Retrieval?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=45631677]'), work: {"id":45631677,"title":"Call For Papers - 7 th International Conference on Artificial Intelligence and Applications (AI 2021)","created_at":"2021-03-28T23:25:26.034-07:00","url":"https://www.academia.edu/45631677/Call_For_Papers_7_th_International_Conference_on_Artificial_Intelligence_and_Applications_AI_2021_?f_ri=5109","dom_id":"work_45631677","summary":"7th International Conference on Artificial Intelligence and Applications (AI 2021) will provide an excellent international forum for sharing knowledge and results in theory, methodology and applications of Artificial Intelligence and its applications. The Conference \nlooks for significant contributions to all major fields of the Artificial Intelligence, Soft Computing in theoretical and practical aspects. The aim of the Conference is to provide a platform to the researchers and practitioners from both academia as well as industry to meet and \nshare cutting-edge development in the field. Authors are solicited to contribute to the conference by submitting articles that illustrate research results, projects, surveying works and industrial experiences that describe significant advances in the following areas, but are not limited to.\n","downloadable_attachments":[{"id":74727553,"asset_id":45631677,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":19082339,"first_name":"International Journal of Artificial Intelligence","last_name":"(IJAIA)","domain_name":"independent","page_name":"IjaiaJournal","display_name":"International Journal of Artificial Intelligence (IJAIA)","profile_url":"https://independent.academia.edu/IjaiaJournal?f_ri=5109","photo":"https://0.academia-photos.com/19082339/5301872/160061290/s65_international_journal_of_artificial_intelligence_applications._ijaia_.jpg"}],"research_interests":[{"id":146,"name":"Bioinformatics","url":"https://www.academia.edu/Documents/in/Bioinformatics?f_ri=5109","nofollow":true},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true},{"id":451,"name":"Programming Languages","url":"https://www.academia.edu/Documents/in/Programming_Languages?f_ri=5109","nofollow":true},{"id":464,"name":"Information Retrieval","url":"https://www.academia.edu/Documents/in/Information_Retrieval?f_ri=5109","nofollow":true},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=5109"},{"id":1432,"name":"Natural Language Processing","url":"https://www.academia.edu/Documents/in/Natural_Language_Processing?f_ri=5109"},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109"},{"id":2009,"name":"Data Mining","url":"https://www.academia.edu/Documents/in/Data_Mining?f_ri=5109"},{"id":3414,"name":"Mechatronics","url":"https://www.academia.edu/Documents/in/Mechatronics?f_ri=5109"},{"id":3419,"name":"Multimedia","url":"https://www.academia.edu/Documents/in/Multimedia?f_ri=5109"},{"id":4165,"name":"Fuzzy Logic","url":"https://www.academia.edu/Documents/in/Fuzzy_Logic?f_ri=5109"},{"id":4233,"name":"Computational Biology","url":"https://www.academia.edu/Documents/in/Computational_Biology?f_ri=5109"},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":6132,"name":"Soft Computing","url":"https://www.academia.edu/Documents/in/Soft_Computing?f_ri=5109"},{"id":11598,"name":"Neural Networks","url":"https://www.academia.edu/Documents/in/Neural_Networks?f_ri=5109"},{"id":12022,"name":"Numerical Analysis","url":"https://www.academia.edu/Documents/in/Numerical_Analysis?f_ri=5109"},{"id":12428,"name":"Automatic Control","url":"https://www.academia.edu/Documents/in/Automatic_Control?f_ri=5109"},{"id":143038,"name":"Machine Learning and Pattern Recognition","url":"https://www.academia.edu/Documents/in/Machine_Learning_and_Pattern_Recognition?f_ri=5109"},{"id":2570288,"name":"Speech Understanding","url":"https://www.academia.edu/Documents/in/Speech_Understanding?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_9138366" data-work_id="9138366" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/9138366/A_Pattern_LAnguage_for_Systemic_Transformation_PLAST_re_Generative_of_Commons">A Pattern LAnguage for Systemic Transformation (PLAST) - (re)Generative of Commons </a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">The purpose of this paper is to lay the ground for an open source pattern language for systemic transformation (PLAST) based on systemic interpretation. This pattern language will help change agents and practitioners on the ground make... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_9138366" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">The purpose of this paper is to lay the ground for an open source pattern language for systemic transformation (PLAST) based on systemic interpretation. This pattern language will help change agents and practitioners on the ground make sense of complex systemic phenomena and dynamics so they can build truly transformative solutions and create greater coherence between disparate actions, thus leveraging and catalyzing agency and capacity for change wherever it may be found. <br /> <br />The end goal is to accelerate the transition to a sustainable and thrivable world, through the awareness and fostering of sustainable socio-economic dynamics regenerative of commons. Commons are understood here as the distributed factors of opportunity and renewal of the system, which need to be perpetually maintained to ensure the on-going sustainability and thrivability of the system and its components. <br /> <br />Paper prepared for the PurplSoc Workshop - Pursuit of Pattern Languages for Societal Change, at Danube-University, November 14/15, 2014</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/9138366" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="56a09451c3e53c2d157c840fcf22a10a" rel="nofollow" data-download="{&quot;attachment_id&quot;:35426897,&quot;asset_id&quot;:9138366,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/35426897/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="4599640" href="https://hull.academia.edu/HeleneFinidori">Helene Finidori</a><script data-card-contents-for-user="4599640" type="text/json">{"id":4599640,"first_name":"Helene","last_name":"Finidori","domain_name":"hull","page_name":"HeleneFinidori","display_name":"Helene Finidori","profile_url":"https://hull.academia.edu/HeleneFinidori?f_ri=5109","photo":"https://gravatar.com/avatar/fb9de05793eb6b32c5e94cdfecd0a738?s=65"}</script></span></span></li><li class="js-paper-rank-work_9138366 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="9138366"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 9138366, container: ".js-paper-rank-work_9138366", }); });</script></li><li class="js-percentile-work_9138366 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 9138366; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_9138366"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_9138366 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="9138366"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 9138366; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=9138366]").text(description); $(".js-view-count-work_9138366").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_9138366").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="9138366"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">8</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="3155" rel="nofollow" href="https://www.academia.edu/Documents/in/Complexity_Theory">Complexity Theory</a>,&nbsp;<script data-card-contents-for-ri="3155" type="text/json">{"id":3155,"name":"Complexity Theory","url":"https://www.academia.edu/Documents/in/Complexity_Theory?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="15348" rel="nofollow" href="https://www.academia.edu/Documents/in/Hermeneutic_Phenomenology">Hermeneutic Phenomenology</a>,&nbsp;<script data-card-contents-for-ri="15348" type="text/json">{"id":15348,"name":"Hermeneutic Phenomenology","url":"https://www.academia.edu/Documents/in/Hermeneutic_Phenomenology?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="37903" rel="nofollow" href="https://www.academia.edu/Documents/in/Visual_Language">Visual Language</a><script data-card-contents-for-ri="37903" type="text/json">{"id":37903,"name":"Visual Language","url":"https://www.academia.edu/Documents/in/Visual_Language?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=9138366]'), work: {"id":9138366,"title":"A Pattern LAnguage for Systemic Transformation (PLAST) - (re)Generative of Commons ","created_at":"2014-11-05T02:20:00.865-08:00","url":"https://www.academia.edu/9138366/A_Pattern_LAnguage_for_Systemic_Transformation_PLAST_re_Generative_of_Commons?f_ri=5109","dom_id":"work_9138366","summary":"The purpose of this paper is to lay the ground for an open source pattern language for systemic transformation (PLAST) based on systemic interpretation. This pattern language will help change agents and practitioners on the ground make sense of complex systemic phenomena and dynamics so they can build truly transformative solutions and create greater coherence between disparate actions, thus leveraging and catalyzing agency and capacity for change wherever it may be found.\r\n\r\nThe end goal is to accelerate the transition to a sustainable and thrivable world, through the awareness and fostering of sustainable socio-economic dynamics regenerative of commons. Commons are understood here as the distributed factors of opportunity and renewal of the system, which need to be perpetually maintained to ensure the on-going sustainability and thrivability of the system and its components.\r\n\r\nPaper prepared for the PurplSoc Workshop - Pursuit of Pattern Languages for Societal Change, at Danube-University, November 14/15, 2014 \r\n","downloadable_attachments":[{"id":35426897,"asset_id":9138366,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":4599640,"first_name":"Helene","last_name":"Finidori","domain_name":"hull","page_name":"HeleneFinidori","display_name":"Helene Finidori","profile_url":"https://hull.academia.edu/HeleneFinidori?f_ri=5109","photo":"https://gravatar.com/avatar/fb9de05793eb6b32c5e94cdfecd0a738?s=65"}],"research_interests":[{"id":3155,"name":"Complexity Theory","url":"https://www.academia.edu/Documents/in/Complexity_Theory?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":15348,"name":"Hermeneutic Phenomenology","url":"https://www.academia.edu/Documents/in/Hermeneutic_Phenomenology?f_ri=5109","nofollow":true},{"id":37903,"name":"Visual Language","url":"https://www.academia.edu/Documents/in/Visual_Language?f_ri=5109","nofollow":true},{"id":69676,"name":"Systems Dynamics","url":"https://www.academia.edu/Documents/in/Systems_Dynamics?f_ri=5109"},{"id":93204,"name":"Pattern languages","url":"https://www.academia.edu/Documents/in/Pattern_languages?f_ri=5109"},{"id":342382,"name":"Syntactic and Semantic Knowledge","url":"https://www.academia.edu/Documents/in/Syntactic_and_Semantic_Knowledge?f_ri=5109"},{"id":375185,"name":"Systemic Change","url":"https://www.academia.edu/Documents/in/Systemic_Change?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_53667751" data-work_id="53667751" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/53667751/CALL_FOR_PAPERS_9_th_International_Conference_of_Artificial_Intelligence_and_Fuzzy_Logic_AI_and_FL_2021_">CALL FOR PAPERS - 9 th International Conference of Artificial Intelligence and Fuzzy Logic (AI &amp; FL 2021)</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">9 th International Conference of Artificial Intelligence and Fuzzy Logic (AI &amp; FL 2021) provides a forum for researchers who address this issue and to present their work in a peerreviewed forum. Authors are solicited to contribute to the... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_53667751" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">9<br />th International Conference of Artificial Intelligence and Fuzzy Logic (AI &amp; FL 2021)<br />provides a forum for researchers who address this issue and to present their work in a peerreviewed forum. Authors are solicited to contribute to the conference by submitting articles that <br />illustrate research results, projects, surveying works and industrial experiences that describe <br />significant advances in the following areas, but are not limited to these topics only.<br />Authors are solicited to contribute to this conference by submitting articles that illustrate <br />research results, projects, surveying works and industrial experiences that describe significant <br />advances in the areas of Artificial Intelligence &amp; applications</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/53667751" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="dedb6ebf517d34af36395464a58c4504" rel="nofollow" data-download="{&quot;attachment_id&quot;:74355440,&quot;asset_id&quot;:53667751,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/74355440/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="74164909" rel="nofollow" href="https://annauniv.academia.edu/ijitcsjournal">International Journal of Information Technology Convergence and services (IJITCS)</a><script data-card-contents-for-user="74164909" type="text/json">{"id":74164909,"first_name":"International Journal of Information Technology Convergence and services","last_name":"(IJITCS)","domain_name":"annauniv","page_name":"ijitcsjournal","display_name":"International Journal of Information Technology Convergence and services (IJITCS)","profile_url":"https://annauniv.academia.edu/ijitcsjournal?f_ri=5109","photo":"https://0.academia-photos.com/74164909/18840627/84065623/s65_international_journal_of_information_technology_convergence_and_services._ijitcs_.jpg"}</script></span></span></li><li class="js-paper-rank-work_53667751 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="53667751"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 53667751, container: ".js-paper-rank-work_53667751", }); });</script></li><li class="js-percentile-work_53667751 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 53667751; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_53667751"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_53667751 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="53667751"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 53667751; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=53667751]").text(description); $(".js-view-count-work_53667751").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_53667751").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="53667751"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">14</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="146" rel="nofollow" href="https://www.academia.edu/Documents/in/Bioinformatics">Bioinformatics</a>,&nbsp;<script data-card-contents-for-ri="146" type="text/json">{"id":146,"name":"Bioinformatics","url":"https://www.academia.edu/Documents/in/Bioinformatics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="451" rel="nofollow" href="https://www.academia.edu/Documents/in/Programming_Languages">Programming Languages</a>,&nbsp;<script data-card-contents-for-ri="451" type="text/json">{"id":451,"name":"Programming Languages","url":"https://www.academia.edu/Documents/in/Programming_Languages?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="464" rel="nofollow" href="https://www.academia.edu/Documents/in/Information_Retrieval">Information Retrieval</a>,&nbsp;<script data-card-contents-for-ri="464" type="text/json">{"id":464,"name":"Information Retrieval","url":"https://www.academia.edu/Documents/in/Information_Retrieval?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="465" rel="nofollow" href="https://www.academia.edu/Documents/in/Artificial_Intelligence">Artificial Intelligence</a><script data-card-contents-for-ri="465" type="text/json">{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=53667751]'), work: {"id":53667751,"title":"CALL FOR PAPERS - 9 th International Conference of Artificial Intelligence and Fuzzy Logic (AI \u0026 FL 2021)","created_at":"2021-09-28T03:25:36.094-07:00","url":"https://www.academia.edu/53667751/CALL_FOR_PAPERS_9_th_International_Conference_of_Artificial_Intelligence_and_Fuzzy_Logic_AI_and_FL_2021_?f_ri=5109","dom_id":"work_53667751","summary":"9\nth International Conference of Artificial Intelligence and Fuzzy Logic (AI \u0026 FL 2021)\nprovides a forum for researchers who address this issue and to present their work in a peer\u0002reviewed forum. Authors are solicited to contribute to the conference by submitting articles that \nillustrate research results, projects, surveying works and industrial experiences that describe \nsignificant advances in the following areas, but are not limited to these topics only.\nAuthors are solicited to contribute to this conference by submitting articles that illustrate \nresearch results, projects, surveying works and industrial experiences that describe significant \nadvances in the areas of Artificial Intelligence \u0026 applications","downloadable_attachments":[{"id":74355440,"asset_id":53667751,"asset_type":"Work","always_allow_download":false},{"id":73581714,"asset_id":53667751,"asset_type":"Work","always_allow_download":false},{"id":75365725,"asset_id":53667751,"asset_type":"Work","always_allow_download":false},{"id":72987600,"asset_id":53667751,"asset_type":"Work","always_allow_download":false},{"id":74719557,"asset_id":53667751,"asset_type":"Work","always_allow_download":false},{"id":75791926,"asset_id":53667751,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":74164909,"first_name":"International Journal of Information Technology Convergence and services","last_name":"(IJITCS)","domain_name":"annauniv","page_name":"ijitcsjournal","display_name":"International Journal of Information Technology Convergence and services (IJITCS)","profile_url":"https://annauniv.academia.edu/ijitcsjournal?f_ri=5109","photo":"https://0.academia-photos.com/74164909/18840627/84065623/s65_international_journal_of_information_technology_convergence_and_services._ijitcs_.jpg"}],"research_interests":[{"id":146,"name":"Bioinformatics","url":"https://www.academia.edu/Documents/in/Bioinformatics?f_ri=5109","nofollow":true},{"id":451,"name":"Programming Languages","url":"https://www.academia.edu/Documents/in/Programming_Languages?f_ri=5109","nofollow":true},{"id":464,"name":"Information Retrieval","url":"https://www.academia.edu/Documents/in/Information_Retrieval?f_ri=5109","nofollow":true},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109","nofollow":true},{"id":1432,"name":"Natural Language Processing","url":"https://www.academia.edu/Documents/in/Natural_Language_Processing?f_ri=5109"},{"id":3414,"name":"Mechatronics","url":"https://www.academia.edu/Documents/in/Mechatronics?f_ri=5109"},{"id":4165,"name":"Fuzzy Logic","url":"https://www.academia.edu/Documents/in/Fuzzy_Logic?f_ri=5109"},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":11598,"name":"Neural Networks","url":"https://www.academia.edu/Documents/in/Neural_Networks?f_ri=5109"},{"id":12428,"name":"Automatic Control","url":"https://www.academia.edu/Documents/in/Automatic_Control?f_ri=5109"},{"id":17167,"name":"Parallel Processing","url":"https://www.academia.edu/Documents/in/Parallel_Processing?f_ri=5109"},{"id":22615,"name":"Knowledge Representation","url":"https://www.academia.edu/Documents/in/Knowledge_Representation?f_ri=5109"},{"id":62507,"name":"Hybrid Intelligent Systems","url":"https://www.academia.edu/Documents/in/Hybrid_Intelligent_Systems?f_ri=5109"},{"id":84577,"name":"Knowledge-Based Systems","url":"https://www.academia.edu/Documents/in/Knowledge-Based_Systems?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_63063976" data-work_id="63063976" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/63063976/Digital_Image_Based_Tree_Measurement_for_Forest_Inventory">Digital Image Based Tree Measurement for Forest Inventory</a></div></div><div class="u-pb4x u-mt3x"></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/63063976" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="6611251ef65066f8164a01a12a74098e" rel="nofollow" data-download="{&quot;attachment_id&quot;:75615176,&quot;asset_id&quot;:63063976,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/75615176/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="37555394" rel="nofollow" href="https://independent.academia.edu/JukkaHeikkonen">Jukka Heikkonen</a><script data-card-contents-for-user="37555394" type="text/json">{"id":37555394,"first_name":"Jukka","last_name":"Heikkonen","domain_name":"independent","page_name":"JukkaHeikkonen","display_name":"Jukka Heikkonen","profile_url":"https://independent.academia.edu/JukkaHeikkonen?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_63063976 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="63063976"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 63063976, container: ".js-paper-rank-work_63063976", }); });</script></li><li class="js-percentile-work_63063976 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 63063976; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_63063976"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_63063976 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="63063976"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 63063976; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=63063976]").text(description); $(".js-view-count-work_63063976").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_63063976").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="63063976"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">15</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="854" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Vision">Computer Vision</a>,&nbsp;<script data-card-contents-for-ri="854" type="text/json">{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="93217" rel="nofollow" href="https://www.academia.edu/Documents/in/Segmentation">Segmentation</a>,&nbsp;<script data-card-contents-for-ri="93217" type="text/json">{"id":93217,"name":"Segmentation","url":"https://www.academia.edu/Documents/in/Segmentation?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="96893" rel="nofollow" href="https://www.academia.edu/Documents/in/Calibration">Calibration</a><script data-card-contents-for-ri="96893" type="text/json">{"id":96893,"name":"Calibration","url":"https://www.academia.edu/Documents/in/Calibration?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=63063976]'), work: {"id":63063976,"title":"Digital Image Based Tree Measurement for Forest Inventory","created_at":"2021-12-02T23:41:06.767-08:00","url":"https://www.academia.edu/63063976/Digital_Image_Based_Tree_Measurement_for_Forest_Inventory?f_ri=5109","dom_id":"work_63063976","summary":null,"downloadable_attachments":[{"id":75615176,"asset_id":63063976,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":37555394,"first_name":"Jukka","last_name":"Heikkonen","domain_name":"independent","page_name":"JukkaHeikkonen","display_name":"Jukka Heikkonen","profile_url":"https://independent.academia.edu/JukkaHeikkonen?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":93217,"name":"Segmentation","url":"https://www.academia.edu/Documents/in/Segmentation?f_ri=5109","nofollow":true},{"id":96893,"name":"Calibration","url":"https://www.academia.edu/Documents/in/Calibration?f_ri=5109","nofollow":true},{"id":99818,"name":"Camera Calibration","url":"https://www.academia.edu/Documents/in/Camera_Calibration?f_ri=5109"},{"id":117033,"name":"Forests","url":"https://www.academia.edu/Documents/in/Forests?f_ri=5109"},{"id":128803,"name":"Distortion","url":"https://www.academia.edu/Documents/in/Distortion?f_ri=5109"},{"id":167397,"name":"Image recognition","url":"https://www.academia.edu/Documents/in/Image_recognition?f_ri=5109"},{"id":540162,"name":"Forest Inventory","url":"https://www.academia.edu/Documents/in/Forest_Inventory?f_ri=5109"},{"id":661097,"name":"Scots Pine","url":"https://www.academia.edu/Documents/in/Scots_Pine?f_ri=5109"},{"id":907382,"name":"Digital Image","url":"https://www.academia.edu/Documents/in/Digital_Image?f_ri=5109"},{"id":1258317,"name":"Lens Distortion","url":"https://www.academia.edu/Documents/in/Lens_Distortion?f_ri=5109"},{"id":3122382,"name":"imaging system","url":"https://www.academia.edu/Documents/in/imaging_system?f_ri=5109"},{"id":3701731,"name":"Digital camera","url":"https://www.academia.edu/Documents/in/Digital_camera?f_ri=5109"},{"id":3749602,"name":"Distance Measure","url":"https://www.academia.edu/Documents/in/Distance_Measure?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_45055518" data-work_id="45055518" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/45055518/Top_10_Cited_Articles_of_AI_2021_International_Journal_of_Artificial_Intelligence_and_Applications_IJAIA_">Top 10 Cited Articles of AI 2021 - International Journal of Artificial Intelligence &amp; Applications (IJAIA)</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">The International Journal of Artificial Intelligence &amp; Applications (IJAIA) is a bi monthly open access peer-reviewed journal that publishes articles which contribute new results in all areas of the Artificial Intelligence &amp; Applications... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_45055518" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">The International Journal of Artificial Intelligence &amp; Applications (IJAIA) is a bi monthly open access peer-reviewed journal that publishes articles which contribute new results in all areas of the Artificial Intelligence &amp; Applications (IJAIA). It is an international journal intended for professionals and researchers in all fields of AI for researchers, programmers, and software and hardware manufacturers. The journal also aims to publish new attempts in the form of special issues on emerging areas in Artificial Intelligence and applications.<br /><br />Authors are solicited to contribute to the journal by submitting articles that illustrate research results, projects, surveying works and industrial experiences that describe significant advances in the areas of Artificial Intelligence &amp; applications.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/45055518" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="23d98ff6a0f2d6ede68589a5e3ec2afe" rel="nofollow" data-download="{&quot;attachment_id&quot;:65609819,&quot;asset_id&quot;:45055518,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/65609819/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="19082339" rel="nofollow" href="https://independent.academia.edu/IjaiaJournal">International Journal of Artificial Intelligence (IJAIA)</a><script data-card-contents-for-user="19082339" type="text/json">{"id":19082339,"first_name":"International Journal of Artificial Intelligence","last_name":"(IJAIA)","domain_name":"independent","page_name":"IjaiaJournal","display_name":"International Journal of Artificial Intelligence (IJAIA)","profile_url":"https://independent.academia.edu/IjaiaJournal?f_ri=5109","photo":"https://0.academia-photos.com/19082339/5301872/160061290/s65_international_journal_of_artificial_intelligence_applications._ijaia_.jpg"}</script></span></span></li><li class="js-paper-rank-work_45055518 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="45055518"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 45055518, container: ".js-paper-rank-work_45055518", }); });</script></li><li class="js-percentile-work_45055518 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 45055518; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_45055518"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_45055518 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="45055518"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 45055518; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=45055518]").text(description); $(".js-view-count-work_45055518").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_45055518").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="45055518"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">20</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="77" rel="nofollow" href="https://www.academia.edu/Documents/in/Robotics">Robotics</a>,&nbsp;<script data-card-contents-for-ri="77" type="text/json">{"id":77,"name":"Robotics","url":"https://www.academia.edu/Documents/in/Robotics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="146" rel="nofollow" href="https://www.academia.edu/Documents/in/Bioinformatics">Bioinformatics</a>,&nbsp;<script data-card-contents-for-ri="146" type="text/json">{"id":146,"name":"Bioinformatics","url":"https://www.academia.edu/Documents/in/Bioinformatics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="451" rel="nofollow" href="https://www.academia.edu/Documents/in/Programming_Languages">Programming Languages</a>,&nbsp;<script data-card-contents-for-ri="451" type="text/json">{"id":451,"name":"Programming Languages","url":"https://www.academia.edu/Documents/in/Programming_Languages?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="464" rel="nofollow" href="https://www.academia.edu/Documents/in/Information_Retrieval">Information Retrieval</a><script data-card-contents-for-ri="464" type="text/json">{"id":464,"name":"Information Retrieval","url":"https://www.academia.edu/Documents/in/Information_Retrieval?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=45055518]'), work: {"id":45055518,"title":"Top 10 Cited Articles of AI 2021 - International Journal of Artificial Intelligence \u0026 Applications (IJAIA)","created_at":"2021-02-04T21:16:24.116-08:00","url":"https://www.academia.edu/45055518/Top_10_Cited_Articles_of_AI_2021_International_Journal_of_Artificial_Intelligence_and_Applications_IJAIA_?f_ri=5109","dom_id":"work_45055518","summary":"The International Journal of Artificial Intelligence \u0026 Applications (IJAIA) is a bi monthly open access peer-reviewed journal that publishes articles which contribute new results in all areas of the Artificial Intelligence \u0026 Applications (IJAIA). It is an international journal intended for professionals and researchers in all fields of AI for researchers, programmers, and software and hardware manufacturers. The journal also aims to publish new attempts in the form of special issues on emerging areas in Artificial Intelligence and applications.\n\nAuthors are solicited to contribute to the journal by submitting articles that illustrate research results, projects, surveying works and industrial experiences that describe significant advances in the areas of Artificial Intelligence \u0026 applications.","downloadable_attachments":[{"id":65609819,"asset_id":45055518,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":19082339,"first_name":"International Journal of Artificial Intelligence","last_name":"(IJAIA)","domain_name":"independent","page_name":"IjaiaJournal","display_name":"International Journal of Artificial Intelligence (IJAIA)","profile_url":"https://independent.academia.edu/IjaiaJournal?f_ri=5109","photo":"https://0.academia-photos.com/19082339/5301872/160061290/s65_international_journal_of_artificial_intelligence_applications._ijaia_.jpg"}],"research_interests":[{"id":77,"name":"Robotics","url":"https://www.academia.edu/Documents/in/Robotics?f_ri=5109","nofollow":true},{"id":146,"name":"Bioinformatics","url":"https://www.academia.edu/Documents/in/Bioinformatics?f_ri=5109","nofollow":true},{"id":451,"name":"Programming Languages","url":"https://www.academia.edu/Documents/in/Programming_Languages?f_ri=5109","nofollow":true},{"id":464,"name":"Information Retrieval","url":"https://www.academia.edu/Documents/in/Information_Retrieval?f_ri=5109","nofollow":true},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=5109"},{"id":1432,"name":"Natural Language Processing","url":"https://www.academia.edu/Documents/in/Natural_Language_Processing?f_ri=5109"},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109"},{"id":2009,"name":"Data Mining","url":"https://www.academia.edu/Documents/in/Data_Mining?f_ri=5109"},{"id":3414,"name":"Mechatronics","url":"https://www.academia.edu/Documents/in/Mechatronics?f_ri=5109"},{"id":3419,"name":"Multimedia","url":"https://www.academia.edu/Documents/in/Multimedia?f_ri=5109"},{"id":4165,"name":"Fuzzy Logic","url":"https://www.academia.edu/Documents/in/Fuzzy_Logic?f_ri=5109"},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":9134,"name":"Pervasive Computing","url":"https://www.academia.edu/Documents/in/Pervasive_Computing?f_ri=5109"},{"id":11598,"name":"Neural Networks","url":"https://www.academia.edu/Documents/in/Neural_Networks?f_ri=5109"},{"id":12428,"name":"Automatic Control","url":"https://www.academia.edu/Documents/in/Automatic_Control?f_ri=5109"},{"id":17167,"name":"Parallel Processing","url":"https://www.academia.edu/Documents/in/Parallel_Processing?f_ri=5109"},{"id":22615,"name":"Knowledge Representation","url":"https://www.academia.edu/Documents/in/Knowledge_Representation?f_ri=5109"},{"id":84577,"name":"Knowledge-Based Systems","url":"https://www.academia.edu/Documents/in/Knowledge-Based_Systems?f_ri=5109"},{"id":287095,"name":"Knowledge Representation and Reasoning","url":"https://www.academia.edu/Documents/in/Knowledge_Representation_and_Reasoning?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_51016843" data-work_id="51016843" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/51016843/Call_For_Papers_September_Issue_International_Journal_of_Artificial_Intelligence_and_Applications_IJAIA_">Call For Papers - September Issue - International Journal of Artificial Intelligence &amp; Applications (IJAIA)</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">The International Journal of Artificial Intelligence &amp; Applications (IJAIA) is a bi monthly open access peer-reviewed journal that publishes articles which contribute new results in all areas of the Artificial Intelligence &amp; Applications... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_51016843" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">The International Journal of Artificial Intelligence &amp; Applications (IJAIA) is a bi monthly open access peer-reviewed journal that publishes articles which contribute new results in all areas of the Artificial Intelligence &amp; Applications (IJAIA). It is an international journal intended for professionals and researchers in all fields of AI for researchers, programmers, and software and hardware manufacturers. The journal also aims to publish new attempts in the form of special issues on emerging areas in Artificial Intelligence and applications.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/51016843" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="5e65804c1c802372ae42a10318ed0dac" rel="nofollow" data-download="{&quot;attachment_id&quot;:90409784,&quot;asset_id&quot;:51016843,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/90409784/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="19082339" rel="nofollow" href="https://independent.academia.edu/IjaiaJournal">International Journal of Artificial Intelligence (IJAIA)</a><script data-card-contents-for-user="19082339" type="text/json">{"id":19082339,"first_name":"International Journal of Artificial Intelligence","last_name":"(IJAIA)","domain_name":"independent","page_name":"IjaiaJournal","display_name":"International Journal of Artificial Intelligence (IJAIA)","profile_url":"https://independent.academia.edu/IjaiaJournal?f_ri=5109","photo":"https://0.academia-photos.com/19082339/5301872/160061290/s65_international_journal_of_artificial_intelligence_applications._ijaia_.jpg"}</script></span></span></li><li class="js-paper-rank-work_51016843 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="51016843"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 51016843, container: ".js-paper-rank-work_51016843", }); });</script></li><li class="js-percentile-work_51016843 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 51016843; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_51016843"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_51016843 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="51016843"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 51016843; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=51016843]").text(description); $(".js-view-count-work_51016843").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_51016843").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="51016843"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">20</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="77" rel="nofollow" href="https://www.academia.edu/Documents/in/Robotics">Robotics</a>,&nbsp;<script data-card-contents-for-ri="77" type="text/json">{"id":77,"name":"Robotics","url":"https://www.academia.edu/Documents/in/Robotics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="146" rel="nofollow" href="https://www.academia.edu/Documents/in/Bioinformatics">Bioinformatics</a>,&nbsp;<script data-card-contents-for-ri="146" type="text/json">{"id":146,"name":"Bioinformatics","url":"https://www.academia.edu/Documents/in/Bioinformatics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="451" rel="nofollow" href="https://www.academia.edu/Documents/in/Programming_Languages">Programming Languages</a><script data-card-contents-for-ri="451" type="text/json">{"id":451,"name":"Programming Languages","url":"https://www.academia.edu/Documents/in/Programming_Languages?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=51016843]'), work: {"id":51016843,"title":"Call For Papers - September Issue - International Journal of Artificial Intelligence \u0026 Applications (IJAIA)","created_at":"2021-08-26T06:27:31.015-07:00","url":"https://www.academia.edu/51016843/Call_For_Papers_September_Issue_International_Journal_of_Artificial_Intelligence_and_Applications_IJAIA_?f_ri=5109","dom_id":"work_51016843","summary":"The International Journal of Artificial Intelligence \u0026 Applications (IJAIA) is a bi monthly open access peer-reviewed journal that publishes articles which contribute new results in all areas of the Artificial Intelligence \u0026 Applications (IJAIA). It is an international journal intended for professionals and researchers in all fields of AI for researchers, programmers, and software and hardware manufacturers. The journal also aims to publish new attempts in the form of special issues on emerging areas in Artificial Intelligence and applications.","downloadable_attachments":[{"id":90409784,"asset_id":51016843,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":19082339,"first_name":"International Journal of Artificial Intelligence","last_name":"(IJAIA)","domain_name":"independent","page_name":"IjaiaJournal","display_name":"International Journal of Artificial Intelligence (IJAIA)","profile_url":"https://independent.academia.edu/IjaiaJournal?f_ri=5109","photo":"https://0.academia-photos.com/19082339/5301872/160061290/s65_international_journal_of_artificial_intelligence_applications._ijaia_.jpg"}],"research_interests":[{"id":77,"name":"Robotics","url":"https://www.academia.edu/Documents/in/Robotics?f_ri=5109","nofollow":true},{"id":146,"name":"Bioinformatics","url":"https://www.academia.edu/Documents/in/Bioinformatics?f_ri=5109","nofollow":true},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true},{"id":451,"name":"Programming Languages","url":"https://www.academia.edu/Documents/in/Programming_Languages?f_ri=5109","nofollow":true},{"id":464,"name":"Information Retrieval","url":"https://www.academia.edu/Documents/in/Information_Retrieval?f_ri=5109"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=5109"},{"id":1432,"name":"Natural Language Processing","url":"https://www.academia.edu/Documents/in/Natural_Language_Processing?f_ri=5109"},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109"},{"id":2009,"name":"Data Mining","url":"https://www.academia.edu/Documents/in/Data_Mining?f_ri=5109"},{"id":3414,"name":"Mechatronics","url":"https://www.academia.edu/Documents/in/Mechatronics?f_ri=5109"},{"id":3419,"name":"Multimedia","url":"https://www.academia.edu/Documents/in/Multimedia?f_ri=5109"},{"id":4165,"name":"Fuzzy Logic","url":"https://www.academia.edu/Documents/in/Fuzzy_Logic?f_ri=5109"},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":6132,"name":"Soft Computing","url":"https://www.academia.edu/Documents/in/Soft_Computing?f_ri=5109"},{"id":10977,"name":"Intelligent Systems","url":"https://www.academia.edu/Documents/in/Intelligent_Systems?f_ri=5109"},{"id":11598,"name":"Neural Networks","url":"https://www.academia.edu/Documents/in/Neural_Networks?f_ri=5109"},{"id":12022,"name":"Numerical Analysis","url":"https://www.academia.edu/Documents/in/Numerical_Analysis?f_ri=5109"},{"id":12428,"name":"Automatic Control","url":"https://www.academia.edu/Documents/in/Automatic_Control?f_ri=5109"},{"id":54123,"name":"Artificial Neural Networks","url":"https://www.academia.edu/Documents/in/Artificial_Neural_Networks?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_60781699" data-work_id="60781699" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/60781699/High_order_pattern_discovery_from_discrete_valued_data">High-order pattern discovery from discrete-valued data</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">To uncover qualitative and quantitative patterns in a data set is a challenging task for research in the area of machine learning and data analysis. Due to the complexity of real-world data, high-order (polythetic) patterns or event... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_60781699" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">To uncover qualitative and quantitative patterns in a data set is a challenging task for research in the area of machine learning and data analysis. Due to the complexity of real-world data, high-order (polythetic) patterns or event associations, in addition to first-order class-dependent relationships, have to be acquired. Once the patterns of different orders are found, they should be represented in a form appropriate for further analysis and interpretation. In this paper, we propose a novel method to discover qualitative and quantitative patterns (or event associations) inherent in a data set. It uses the adjusted residual analysis in statistics to test the significance of the occurrence of a pattern candidate against its expectation. To avoid exhaustive search of all possible combinations of primary events, techniques of eliminating the impossible pattern candidates are developed. The detected patterns of different orders are then represented in an attributed hypergraph which is lucid for pattern interpretation and analysis. Test results on artificial and real-world data are discussed toward the end of the paper.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/60781699" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="8487008172b9dac0149b02cdad81715f" rel="nofollow" data-download="{&quot;attachment_id&quot;:74073043,&quot;asset_id&quot;:60781699,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/74073043/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="199832047" href="https://independent.academia.edu/AndrewWong220">Andrew Wong</a><script data-card-contents-for-user="199832047" type="text/json">{"id":199832047,"first_name":"Andrew","last_name":"Wong","domain_name":"independent","page_name":"AndrewWong220","display_name":"Andrew Wong","profile_url":"https://independent.academia.edu/AndrewWong220?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_60781699 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="60781699"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 60781699, container: ".js-paper-rank-work_60781699", }); });</script></li><li class="js-percentile-work_60781699 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 60781699; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_60781699"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_60781699 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="60781699"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 60781699; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=60781699]").text(description); $(".js-view-count-work_60781699").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_60781699").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="60781699"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">11</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2616" rel="nofollow" href="https://www.academia.edu/Documents/in/Graph_Theory">Graph Theory</a>,&nbsp;<script data-card-contents-for-ri="2616" type="text/json">{"id":2616,"name":"Graph Theory","url":"https://www.academia.edu/Documents/in/Graph_Theory?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="4116" rel="nofollow" href="https://www.academia.edu/Documents/in/Motor_Learning">Motor Learning</a>,&nbsp;<script data-card-contents-for-ri="4116" type="text/json">{"id":4116,"name":"Motor Learning","url":"https://www.academia.edu/Documents/in/Motor_Learning?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="4205" rel="nofollow" href="https://www.academia.edu/Documents/in/Data_Analysis">Data Analysis</a><script data-card-contents-for-ri="4205" type="text/json">{"id":4205,"name":"Data Analysis","url":"https://www.academia.edu/Documents/in/Data_Analysis?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=60781699]'), work: {"id":60781699,"title":"High-order pattern discovery from discrete-valued data","created_at":"2021-11-02T02:40:47.169-07:00","url":"https://www.academia.edu/60781699/High_order_pattern_discovery_from_discrete_valued_data?f_ri=5109","dom_id":"work_60781699","summary":"To uncover qualitative and quantitative patterns in a data set is a challenging task for research in the area of machine learning and data analysis. Due to the complexity of real-world data, high-order (polythetic) patterns or event associations, in addition to first-order class-dependent relationships, have to be acquired. Once the patterns of different orders are found, they should be represented in a form appropriate for further analysis and interpretation. In this paper, we propose a novel method to discover qualitative and quantitative patterns (or event associations) inherent in a data set. It uses the adjusted residual analysis in statistics to test the significance of the occurrence of a pattern candidate against its expectation. To avoid exhaustive search of all possible combinations of primary events, techniques of eliminating the impossible pattern candidates are developed. The detected patterns of different orders are then represented in an attributed hypergraph which is lucid for pattern interpretation and analysis. Test results on artificial and real-world data are discussed toward the end of the paper.","downloadable_attachments":[{"id":74073043,"asset_id":60781699,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":199832047,"first_name":"Andrew","last_name":"Wong","domain_name":"independent","page_name":"AndrewWong220","display_name":"Andrew Wong","profile_url":"https://independent.academia.edu/AndrewWong220?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true},{"id":2616,"name":"Graph Theory","url":"https://www.academia.edu/Documents/in/Graph_Theory?f_ri=5109","nofollow":true},{"id":4116,"name":"Motor Learning","url":"https://www.academia.edu/Documents/in/Motor_Learning?f_ri=5109","nofollow":true},{"id":4205,"name":"Data Analysis","url":"https://www.academia.edu/Documents/in/Data_Analysis?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":155564,"name":"Search Algorithm","url":"https://www.academia.edu/Documents/in/Search_Algorithm?f_ri=5109"},{"id":181847,"name":"First-Order Logic","url":"https://www.academia.edu/Documents/in/First-Order_Logic?f_ri=5109"},{"id":405178,"name":"First Order Logic","url":"https://www.academia.edu/Documents/in/First_Order_Logic?f_ri=5109"},{"id":820369,"name":"Pattern Discovery","url":"https://www.academia.edu/Documents/in/Pattern_Discovery?f_ri=5109"},{"id":1119056,"name":"Database","url":"https://www.academia.edu/Documents/in/Database?f_ri=5109"},{"id":1576067,"name":"Exhaustive Search","url":"https://www.academia.edu/Documents/in/Exhaustive_Search?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_76485962" data-work_id="76485962" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/76485962/Identification_of_Liquid_Liquid_Flow_Pattern_in_a_Horizontal_Pipe_Using_Artificial_Neural_Networks">Identification of Liquid-Liquid Flow Pattern in a Horizontal Pipe Using Artificial Neural Networks</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Identification of flow pattern during the simultaneous flow of two immiscible liquids requires knowledge of the flow rate of each fluid as well as knowledge of other physical parameters like conduit inclination, pipe material, pipe... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_76485962" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Identification of flow pattern during the simultaneous flow of two immiscible liquids requires knowledge of the flow rate of each fluid as well as knowledge of other physical parameters like conduit inclination, pipe material, pipe diameter, viscosity of the oil, wetting characteristics of the pipe, design of the entry mixer, and fluid-fluid interfacial tension. This article presents an artificial neural</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/76485962" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="60934654" rel="nofollow" href="https://independent.academia.edu/GargiDas8">Gargi Das</a><script data-card-contents-for-user="60934654" type="text/json">{"id":60934654,"first_name":"Gargi","last_name":"Das","domain_name":"independent","page_name":"GargiDas8","display_name":"Gargi Das","profile_url":"https://independent.academia.edu/GargiDas8?f_ri=5109","photo":"https://0.academia-photos.com/60934654/16807274/17060111/s65_gargi.das.jpg"}</script></span></span></li><li class="js-paper-rank-work_76485962 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="76485962"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 76485962, container: ".js-paper-rank-work_76485962", }); });</script></li><li class="js-percentile-work_76485962 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 76485962; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_76485962"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_76485962 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="76485962"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 76485962; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=76485962]").text(description); $(".js-view-count-work_76485962").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_76485962").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="76485962"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">20</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="48" rel="nofollow" href="https://www.academia.edu/Documents/in/Engineering">Engineering</a>,&nbsp;<script data-card-contents-for-ri="48" type="text/json">{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="72" rel="nofollow" href="https://www.academia.edu/Documents/in/Chemical_Engineering">Chemical Engineering</a>,&nbsp;<script data-card-contents-for-ri="72" type="text/json">{"id":72,"name":"Chemical Engineering","url":"https://www.academia.edu/Documents/in/Chemical_Engineering?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5020" rel="nofollow" href="https://www.academia.edu/Documents/in/Liquid_Crystals">Liquid Crystals</a>,&nbsp;<script data-card-contents-for-ri="5020" type="text/json">{"id":5020,"name":"Liquid Crystals","url":"https://www.academia.edu/Documents/in/Liquid_Crystals?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a><script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=76485962]'), work: {"id":76485962,"title":"Identification of Liquid-Liquid Flow Pattern in a Horizontal Pipe Using Artificial Neural Networks","created_at":"2022-04-14T22:38:49.624-07:00","url":"https://www.academia.edu/76485962/Identification_of_Liquid_Liquid_Flow_Pattern_in_a_Horizontal_Pipe_Using_Artificial_Neural_Networks?f_ri=5109","dom_id":"work_76485962","summary":"Identification of flow pattern during the simultaneous flow of two immiscible liquids requires knowledge of the flow rate of each fluid as well as knowledge of other physical parameters like conduit inclination, pipe material, pipe diameter, viscosity of the oil, wetting characteristics of the pipe, design of the entry mixer, and fluid-fluid interfacial tension. This article presents an artificial neural","downloadable_attachments":[],"ordered_authors":[{"id":60934654,"first_name":"Gargi","last_name":"Das","domain_name":"independent","page_name":"GargiDas8","display_name":"Gargi Das","profile_url":"https://independent.academia.edu/GargiDas8?f_ri=5109","photo":"https://0.academia-photos.com/60934654/16807274/17060111/s65_gargi.das.jpg"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering?f_ri=5109","nofollow":true},{"id":72,"name":"Chemical Engineering","url":"https://www.academia.edu/Documents/in/Chemical_Engineering?f_ri=5109","nofollow":true},{"id":5020,"name":"Liquid Crystals","url":"https://www.academia.edu/Documents/in/Liquid_Crystals?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":8067,"name":"Heat Transfer","url":"https://www.academia.edu/Documents/in/Heat_Transfer?f_ri=5109"},{"id":16682,"name":"Mathematical Modelling","url":"https://www.academia.edu/Documents/in/Mathematical_Modelling?f_ri=5109"},{"id":26066,"name":"Neural Network","url":"https://www.academia.edu/Documents/in/Neural_Network?f_ri=5109"},{"id":30561,"name":"Experimental Research","url":"https://www.academia.edu/Documents/in/Experimental_Research?f_ri=5109"},{"id":54123,"name":"Artificial Neural Networks","url":"https://www.academia.edu/Documents/in/Artificial_Neural_Networks?f_ri=5109"},{"id":63054,"name":"ANN","url":"https://www.academia.edu/Documents/in/ANN?f_ri=5109"},{"id":165199,"name":"Interfacial Tension","url":"https://www.academia.edu/Documents/in/Interfacial_Tension?f_ri=5109"},{"id":394912,"name":"Flow Regime","url":"https://www.academia.edu/Documents/in/Flow_Regime?f_ri=5109"},{"id":611713,"name":"Regime transition","url":"https://www.academia.edu/Documents/in/Regime_transition?f_ri=5109"},{"id":891612,"name":"Flow Pattern","url":"https://www.academia.edu/Documents/in/Flow_Pattern?f_ri=5109"},{"id":898062,"name":"Flow Rate","url":"https://www.academia.edu/Documents/in/Flow_Rate?f_ri=5109"},{"id":1161306,"name":"Levenberg Marquardt","url":"https://www.academia.edu/Documents/in/Levenberg_Marquardt?f_ri=5109"},{"id":1211304,"name":"Artificial Neural Network","url":"https://www.academia.edu/Documents/in/Artificial_Neural_Network?f_ri=5109"},{"id":1294768,"name":"Chemical Engineering Communications","url":"https://www.academia.edu/Documents/in/Chemical_Engineering_Communications?f_ri=5109"},{"id":2772951,"name":"Horizontal","url":"https://www.academia.edu/Documents/in/Horizontal?f_ri=5109"},{"id":2821309,"name":"learning algorithm","url":"https://www.academia.edu/Documents/in/learning_algorithm?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_76406269" data-work_id="76406269" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/76406269/Graph_based_representations_and_techniques_for_image_processing_and_image_analysis">Graph-based representations and techniques for image processing and image analysis</a></div></div><div class="u-pb4x u-mt3x"></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/76406269" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="41faba04b7c4e3da08101c3eae203193" rel="nofollow" data-download="{&quot;attachment_id&quot;:84124751,&quot;asset_id&quot;:76406269,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/84124751/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="208371162" rel="nofollow" href="https://independent.academia.edu/FrancescSerratosa">Francesc Serratosa</a><script data-card-contents-for-user="208371162" type="text/json">{"id":208371162,"first_name":"Francesc","last_name":"Serratosa","domain_name":"independent","page_name":"FrancescSerratosa","display_name":"Francesc Serratosa","profile_url":"https://independent.academia.edu/FrancescSerratosa?f_ri=5109","photo":"https://0.academia-photos.com/208371162/68397108/56778114/s65_francesc.serratosa.jpeg"}</script></span></span></li><li class="js-paper-rank-work_76406269 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="76406269"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 76406269, container: ".js-paper-rank-work_76406269", }); });</script></li><li class="js-percentile-work_76406269 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 76406269; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_76406269"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_76406269 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="76406269"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 76406269; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=76406269]").text(description); $(".js-view-count-work_76406269").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_76406269").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="76406269"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">18</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="3413" rel="nofollow" href="https://www.academia.edu/Documents/in/Robot_Vision">Robot Vision</a>,&nbsp;<script data-card-contents-for-ri="3413" type="text/json">{"id":3413,"name":"Robot Vision","url":"https://www.academia.edu/Documents/in/Robot_Vision?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="9351" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Analysis">Image Analysis</a>,&nbsp;<script data-card-contents-for-ri="9351" type="text/json">{"id":9351,"name":"Image Analysis","url":"https://www.academia.edu/Documents/in/Image_Analysis?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="26870" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_segmentation">Image segmentation</a><script data-card-contents-for-ri="26870" type="text/json">{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=76406269]'), work: {"id":76406269,"title":"Graph-based representations and techniques for image processing and image analysis","created_at":"2022-04-14T02:50:30.534-07:00","url":"https://www.academia.edu/76406269/Graph_based_representations_and_techniques_for_image_processing_and_image_analysis?f_ri=5109","dom_id":"work_76406269","summary":null,"downloadable_attachments":[{"id":84124751,"asset_id":76406269,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":208371162,"first_name":"Francesc","last_name":"Serratosa","domain_name":"independent","page_name":"FrancescSerratosa","display_name":"Francesc Serratosa","profile_url":"https://independent.academia.edu/FrancescSerratosa?f_ri=5109","photo":"https://0.academia-photos.com/208371162/68397108/56778114/s65_francesc.serratosa.jpeg"}],"research_interests":[{"id":3413,"name":"Robot Vision","url":"https://www.academia.edu/Documents/in/Robot_Vision?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":9351,"name":"Image Analysis","url":"https://www.academia.edu/Documents/in/Image_Analysis?f_ri=5109","nofollow":true},{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=5109","nofollow":true},{"id":30665,"name":"Image Processing and Analysis","url":"https://www.academia.edu/Documents/in/Image_Processing_and_Analysis?f_ri=5109"},{"id":102887,"name":"Object Recognition","url":"https://www.academia.edu/Documents/in/Object_Recognition?f_ri=5109"},{"id":134703,"name":"Data Fusion","url":"https://www.academia.edu/Documents/in/Data_Fusion?f_ri=5109"},{"id":155938,"name":"Color Image Segmentation","url":"https://www.academia.edu/Documents/in/Color_Image_Segmentation?f_ri=5109"},{"id":187369,"name":"Active Vision","url":"https://www.academia.edu/Documents/in/Active_Vision?f_ri=5109"},{"id":213094,"name":"Pattern","url":"https://www.academia.edu/Documents/in/Pattern?f_ri=5109"},{"id":245964,"name":"Industrial Application","url":"https://www.academia.edu/Documents/in/Industrial_Application?f_ri=5109"},{"id":332654,"name":"Graph Partitioning","url":"https://www.academia.edu/Documents/in/Graph_Partitioning?f_ri=5109"},{"id":335981,"name":"Perceptual grouping","url":"https://www.academia.edu/Documents/in/Perceptual_grouping?f_ri=5109"},{"id":893452,"name":"Greedy Algorithm","url":"https://www.academia.edu/Documents/in/Greedy_Algorithm?f_ri=5109"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=5109"},{"id":3480612,"name":"Vision system","url":"https://www.academia.edu/Documents/in/Vision_system?f_ri=5109"},{"id":3749602,"name":"Distance Measure","url":"https://www.academia.edu/Documents/in/Distance_Measure?f_ri=5109"},{"id":4036488,"name":"Autonomous mobile robot","url":"https://www.academia.edu/Documents/in/Autonomous_mobile_robot?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_71406189" data-work_id="71406189" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/71406189/Facial_expression_recognition_through_pattern_analysis_of_facial_muscle_movements_utilizing_electromyogram_sensors">Facial expression recognition through pattern analysis of facial muscle movements utilizing electromyogram sensors</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Emotion recognition is one of the important highlights of human emotional intelligence and has long been studied to be incorporated with machine intelligence argued to make machines even more intelligent. This paper aims to contribute to... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_71406189" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Emotion recognition is one of the important highlights of human emotional intelligence and has long been studied to be incorporated with machine intelligence argued to make machines even more intelligent. This paper aims to contribute to this field of study by enabling machines to recognize emotion from facial electromyogram (EMG) signals. This includes a compilation of the groups attempt to recognize basic facial expressions namely happy, angry, and sad through the use of EMG signals from facial muscles. The group extracted features from the three EMG signals from the face of two human subjects, a male and a female, and analyzed these features to serve as feature templates. Using a minimum-distance classifier, recognition rates exceeded the target accuracy - 85 percent - reaching 94.44 percent for both the male and female subjects.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/71406189" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="168245105" href="https://independent.academia.edu/GraceBriones6">Grace Briones</a><script data-card-contents-for-user="168245105" type="text/json">{"id":168245105,"first_name":"Grace","last_name":"Briones","domain_name":"independent","page_name":"GraceBriones6","display_name":"Grace Briones","profile_url":"https://independent.academia.edu/GraceBriones6?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_71406189 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="71406189"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 71406189, container: ".js-paper-rank-work_71406189", }); });</script></li><li class="js-percentile-work_71406189 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 71406189; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_71406189"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_71406189 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="71406189"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 71406189; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=71406189]").text(description); $(".js-view-count-work_71406189").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_71406189").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="71406189"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">20</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="465" rel="nofollow" href="https://www.academia.edu/Documents/in/Artificial_Intelligence">Artificial Intelligence</a>,&nbsp;<script data-card-contents-for-ri="465" type="text/json">{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2671" rel="nofollow" href="https://www.academia.edu/Documents/in/Emotional_intelligence">Emotional intelligence</a>,&nbsp;<script data-card-contents-for-ri="2671" type="text/json">{"id":2671,"name":"Emotional intelligence","url":"https://www.academia.edu/Documents/in/Emotional_intelligence?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="4331" rel="nofollow" href="https://www.academia.edu/Documents/in/Biosensors">Biosensors</a><script data-card-contents-for-ri="4331" type="text/json">{"id":4331,"name":"Biosensors","url":"https://www.academia.edu/Documents/in/Biosensors?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=71406189]'), work: {"id":71406189,"title":"Facial expression recognition through pattern analysis of facial muscle movements utilizing electromyogram sensors","created_at":"2022-02-13T18:54:30.581-08:00","url":"https://www.academia.edu/71406189/Facial_expression_recognition_through_pattern_analysis_of_facial_muscle_movements_utilizing_electromyogram_sensors?f_ri=5109","dom_id":"work_71406189","summary":"Emotion recognition is one of the important highlights of human emotional intelligence and has long been studied to be incorporated with machine intelligence argued to make machines even more intelligent. This paper aims to contribute to this field of study by enabling machines to recognize emotion from facial electromyogram (EMG) signals. This includes a compilation of the groups attempt to recognize basic facial expressions namely happy, angry, and sad through the use of EMG signals from facial muscles. The group extracted features from the three EMG signals from the face of two human subjects, a male and a female, and analyzed these features to serve as feature templates. Using a minimum-distance classifier, recognition rates exceeded the target accuracy - 85 percent - reaching 94.44 percent for both the male and female subjects.","downloadable_attachments":[],"ordered_authors":[{"id":168245105,"first_name":"Grace","last_name":"Briones","domain_name":"independent","page_name":"GraceBriones6","display_name":"Grace Briones","profile_url":"https://independent.academia.edu/GraceBriones6?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109","nofollow":true},{"id":2671,"name":"Emotional intelligence","url":"https://www.academia.edu/Documents/in/Emotional_intelligence?f_ri=5109","nofollow":true},{"id":4331,"name":"Biosensors","url":"https://www.academia.edu/Documents/in/Biosensors?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":5110,"name":"Face Recognition","url":"https://www.academia.edu/Documents/in/Face_Recognition?f_ri=5109"},{"id":9173,"name":"Biometrics","url":"https://www.academia.edu/Documents/in/Biometrics?f_ri=5109"},{"id":11749,"name":"Affective Computing","url":"https://www.academia.edu/Documents/in/Affective_Computing?f_ri=5109"},{"id":21269,"name":"Facial expression","url":"https://www.academia.edu/Documents/in/Facial_expression?f_ri=5109"},{"id":53179,"name":"Machine Intelligence","url":"https://www.academia.edu/Documents/in/Machine_Intelligence?f_ri=5109"},{"id":98327,"name":"Emotion Recognition","url":"https://www.academia.edu/Documents/in/Emotion_Recognition?f_ri=5109"},{"id":111436,"name":"IEEE","url":"https://www.academia.edu/Documents/in/IEEE?f_ri=5109"},{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=5109"},{"id":220049,"name":"Accuracy","url":"https://www.academia.edu/Documents/in/Accuracy?f_ri=5109"},{"id":627110,"name":"BioSensors","url":"https://www.academia.edu/Documents/in/BioSensors-1?f_ri=5109"},{"id":638808,"name":"Precision","url":"https://www.academia.edu/Documents/in/Precision?f_ri=5109"},{"id":979686,"name":"Human Subjects","url":"https://www.academia.edu/Documents/in/Human_Subjects?f_ri=5109"},{"id":1179384,"name":"Facial Expression Recognition","url":"https://www.academia.edu/Documents/in/Facial_Expression_Recognition?f_ri=5109"},{"id":1277387,"name":"Electromyogram","url":"https://www.academia.edu/Documents/in/Electromyogram?f_ri=5109"},{"id":2500540,"name":"Pattern analysis","url":"https://www.academia.edu/Documents/in/Pattern_analysis?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_68946197" data-work_id="68946197" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/68946197/Optical_scattering_by_biological_aerosols_experimental_and_computational_results_on_spore_simulants">Optical scattering by biological aerosols: experimental and computational results on spore simulants</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">We present both a computational and an experimental approach to the problem of biological aerosol characterization, joining the expertises reached in the field of theoretical optical scattering by complex, arbitrary shaped particles... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_68946197" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">We present both a computational and an experimental approach to the problem of biological aerosol characterization, joining the expertises reached in the field of theoretical optical scattering by complex, arbitrary shaped particles (multipole expansion of the electromagnetic fields ...</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/68946197" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="ab6e9a7983302288b4b3f7a7a65069cf" rel="nofollow" data-download="{&quot;attachment_id&quot;:79236265,&quot;asset_id&quot;:68946197,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/79236265/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="20007" href="https://unime.academia.edu/RosalbaSaija">Rosalba Saija</a><script data-card-contents-for-user="20007" type="text/json">{"id":20007,"first_name":"Rosalba","last_name":"Saija","domain_name":"unime","page_name":"RosalbaSaija","display_name":"Rosalba Saija","profile_url":"https://unime.academia.edu/RosalbaSaija?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_68946197 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="68946197"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 68946197, container: ".js-paper-rank-work_68946197", }); });</script></li><li class="js-percentile-work_68946197 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 68946197; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_68946197"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_68946197 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="68946197"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 68946197; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=68946197]").text(description); $(".js-view-count-work_68946197").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_68946197").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="68946197"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">7</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="516" rel="nofollow" href="https://www.academia.edu/Documents/in/Optics">Optics</a>,&nbsp;<script data-card-contents-for-ri="516" type="text/json">{"id":516,"name":"Optics","url":"https://www.academia.edu/Documents/in/Optics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="160144" rel="nofollow" href="https://www.academia.edu/Documents/in/Feature_Extraction">Feature Extraction</a>,&nbsp;<script data-card-contents-for-ri="160144" type="text/json">{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="211387" rel="nofollow" href="https://www.academia.edu/Documents/in/Electromagnetic_Field">Electromagnetic Field</a><script data-card-contents-for-ri="211387" type="text/json">{"id":211387,"name":"Electromagnetic Field","url":"https://www.academia.edu/Documents/in/Electromagnetic_Field?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=68946197]'), work: {"id":68946197,"title":"Optical scattering by biological aerosols: experimental and computational results on spore simulants","created_at":"2022-01-21T00:33:48.825-08:00","url":"https://www.academia.edu/68946197/Optical_scattering_by_biological_aerosols_experimental_and_computational_results_on_spore_simulants?f_ri=5109","dom_id":"work_68946197","summary":"We present both a computational and an experimental approach to the problem of biological aerosol characterization, joining the expertises reached in the field of theoretical optical scattering by complex, arbitrary shaped particles (multipole expansion of the electromagnetic fields ...","downloadable_attachments":[{"id":79236265,"asset_id":68946197,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":20007,"first_name":"Rosalba","last_name":"Saija","domain_name":"unime","page_name":"RosalbaSaija","display_name":"Rosalba Saija","profile_url":"https://unime.academia.edu/RosalbaSaija?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":516,"name":"Optics","url":"https://www.academia.edu/Documents/in/Optics?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=5109","nofollow":true},{"id":211387,"name":"Electromagnetic Field","url":"https://www.academia.edu/Documents/in/Electromagnetic_Field?f_ri=5109","nofollow":true},{"id":263152,"name":"Optical physics","url":"https://www.academia.edu/Documents/in/Optical_physics?f_ri=5109"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=5109"},{"id":2698883,"name":"transition matrix","url":"https://www.academia.edu/Documents/in/transition_matrix?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_67816841" data-work_id="67816841" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/67816841/KannadaPado_Mobile_based_recognition_and_cross_lingual_transcription_of_camera_captured_text_in_Kannada">KannadaPado: Mobile-based recognition and cross-lingual transcription of camera captured text in Kannada</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">We report our recent work on the recognition of scene text captured by mobile cameras, which we have named Kannada Pado. The text region is currently manually cropped using a user-friendly interface, which permits repeated croppings from... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_67816841" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">We report our recent work on the recognition of scene text captured by mobile cameras, which we have named Kannada Pado. The text region is currently manually cropped using a user-friendly interface, which permits repeated croppings from the captured image in a hierarchical fashion. The scene text segment is then binarized using the algorithm, midline analysis, and propagation for segmentation. The segmented binary text image is recognized using Lipi Gnani Kannada OCR. The recognized text can be transcribed in Roman, Devanagari, and other principal Indian scripts. Such tools will be of immense use in metropolitan cities such as Bengaluru for business visitors and tourists to be able to read important textual information using their mobile itself. The entire implementation is of low computational complexity and hence, runs fully on the mobile itself, without any backend computation. Currently, text recognition accuracy is the bottleneck, which, when improved, will make the app immediately usable by people. Then, it will be made available to the public from Google Playstore.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/67816841" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="211076635" href="https://independent.academia.edu/SasidharanAyyavu">Sasidharan Ayyavu</a><script data-card-contents-for-user="211076635" type="text/json">{"id":211076635,"first_name":"Sasidharan","last_name":"Ayyavu","domain_name":"independent","page_name":"SasidharanAyyavu","display_name":"Sasidharan Ayyavu","profile_url":"https://independent.academia.edu/SasidharanAyyavu?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_67816841 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="67816841"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 67816841, container: ".js-paper-rank-work_67816841", }); });</script></li><li class="js-percentile-work_67816841 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 67816841; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_67816841"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_67816841 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="67816841"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 67816841; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=67816841]").text(description); $(".js-view-count-work_67816841").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_67816841").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="67816841"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">17</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="12295" rel="nofollow" href="https://www.academia.edu/Documents/in/Kannada">Kannada</a>,&nbsp;<script data-card-contents-for-ri="12295" type="text/json">{"id":12295,"name":"Kannada","url":"https://www.academia.edu/Documents/in/Kannada?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="12302" rel="nofollow" href="https://www.academia.edu/Documents/in/Tamil">Tamil</a><script data-card-contents-for-ri="12302" type="text/json">{"id":12302,"name":"Tamil","url":"https://www.academia.edu/Documents/in/Tamil?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=67816841]'), work: {"id":67816841,"title":"KannadaPado: Mobile-based recognition and cross-lingual transcription of camera captured text in Kannada","created_at":"2022-01-11T06:16:14.357-08:00","url":"https://www.academia.edu/67816841/KannadaPado_Mobile_based_recognition_and_cross_lingual_transcription_of_camera_captured_text_in_Kannada?f_ri=5109","dom_id":"work_67816841","summary":"We report our recent work on the recognition of scene text captured by mobile cameras, which we have named Kannada Pado. The text region is currently manually cropped using a user-friendly interface, which permits repeated croppings from the captured image in a hierarchical fashion. The scene text segment is then binarized using the algorithm, midline analysis, and propagation for segmentation. The segmented binary text image is recognized using Lipi Gnani Kannada OCR. The recognized text can be transcribed in Roman, Devanagari, and other principal Indian scripts. Such tools will be of immense use in metropolitan cities such as Bengaluru for business visitors and tourists to be able to read important textual information using their mobile itself. The entire implementation is of low computational complexity and hence, runs fully on the mobile itself, without any backend computation. Currently, text recognition accuracy is the bottleneck, which, when improved, will make the app immediately usable by people. Then, it will be made available to the public from Google Playstore.","downloadable_attachments":[],"ordered_authors":[{"id":211076635,"first_name":"Sasidharan","last_name":"Ayyavu","domain_name":"independent","page_name":"SasidharanAyyavu","display_name":"Sasidharan Ayyavu","profile_url":"https://independent.academia.edu/SasidharanAyyavu?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":12295,"name":"Kannada","url":"https://www.academia.edu/Documents/in/Kannada?f_ri=5109","nofollow":true},{"id":12302,"name":"Tamil","url":"https://www.academia.edu/Documents/in/Tamil?f_ri=5109","nofollow":true},{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=5109"},{"id":30347,"name":"Translation","url":"https://www.academia.edu/Documents/in/Translation?f_ri=5109"},{"id":35938,"name":"Digital Image Processing","url":"https://www.academia.edu/Documents/in/Digital_Image_Processing?f_ri=5109"},{"id":42132,"name":"Transcription","url":"https://www.academia.edu/Documents/in/Transcription?f_ri=5109"},{"id":88854,"name":"OCR","url":"https://www.academia.edu/Documents/in/OCR?f_ri=5109"},{"id":95817,"name":"Mobile","url":"https://www.academia.edu/Documents/in/Mobile?f_ri=5109"},{"id":386000,"name":"Text Detection in Natural Scene","url":"https://www.academia.edu/Documents/in/Text_Detection_in_Natural_Scene?f_ri=5109"},{"id":569139,"name":"Transliteration","url":"https://www.academia.edu/Documents/in/Transliteration?f_ri=5109"},{"id":601466,"name":"Devanagari","url":"https://www.academia.edu/Documents/in/Devanagari?f_ri=5109"},{"id":1409539,"name":"Indic Scripts","url":"https://www.academia.edu/Documents/in/Indic_Scripts?f_ri=5109"},{"id":1409884,"name":"Image Binarization","url":"https://www.academia.edu/Documents/in/Image_Binarization?f_ri=5109"},{"id":1770370,"name":"Binarization","url":"https://www.academia.edu/Documents/in/Binarization?f_ri=5109"},{"id":2437035,"name":"Grapheme to phoneme conversion","url":"https://www.academia.edu/Documents/in/Grapheme_to_phoneme_conversion?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_47212112" data-work_id="47212112" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/47212112/Authentication_of_Individuals_using_Hand_Geometry_Biometrics_A_Neural_Network_Approach">Authentication of Individuals using Hand Geometry Biometrics: A Neural Network Approach</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Biometric based systems for individual authentication are increasingly becoming indispensable for protecting life and property. They provide ways for uniquely and reliably authenticating people, and are difficult to counterfeit. Biometric... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_47212112" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Biometric based systems for individual authentication are increasingly becoming indispensable for protecting life and property. They provide ways for uniquely and reliably authenticating people, and are difficult to counterfeit. Biometric based authenticity systems are currently used in governmental, commercial and public sectors. However, these systems can be expensive to put in place and often impose physical constraint to the users. This paper introduces an inexpensive, powerful and easy to use hand geometry based biometric person authentication system using neural networks. The proposed approach followed to construct this system consists of an acquisition device, a pre-processing stage, and a neural network based classifier. One of the novelties of this work comprises on the introduction of hand geometry&#39;s related, position independent, feature extraction and identification which can be useful in problems related to image processing and pattern recognition. Another novelty of this research comprises on the use of error correction codes to enhance the level of performance of the neural network model. A dataset made of scanned images of the right hand of fifty different people was created for this study. Identification rates and Detection Cost Function (DCF) values obtained with the system were evaluated. Several strategies for coding the outputs of the neural networks were studied. Experimental results show that, when using Error Correction Output Codes (ECOC), up to 100% identification rates and 0% DCF can M. Faundez-Zanuy 123 202 M. Faundez-Zanuy et al.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/47212112" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="9cc89ecc7ec711eed7b9d404fddbdb48" rel="nofollow" data-download="{&quot;attachment_id&quot;:66430295,&quot;asset_id&quot;:47212112,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/66430295/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="4967260" href="https://tecnocampus.academia.edu/MarcosFaundezZanuy">Marcos Faundez-Zanuy</a><script data-card-contents-for-user="4967260" type="text/json">{"id":4967260,"first_name":"Marcos","last_name":"Faundez-Zanuy","domain_name":"tecnocampus","page_name":"MarcosFaundezZanuy","display_name":"Marcos Faundez-Zanuy","profile_url":"https://tecnocampus.academia.edu/MarcosFaundezZanuy?f_ri=5109","photo":"https://0.academia-photos.com/4967260/2150095/2523253/s65_marcos.faundez-zanuy.jpg"}</script></span></span></li><li class="js-paper-rank-work_47212112 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="47212112"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 47212112, container: ".js-paper-rank-work_47212112", }); });</script></li><li class="js-percentile-work_47212112 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 47212112; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_47212112"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_47212112 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="47212112"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 47212112; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=47212112]").text(description); $(".js-view-count-work_47212112").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_47212112").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="47212112"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">20</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="237" rel="nofollow" href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a>,&nbsp;<script data-card-contents-for-ri="237" type="text/json">{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="6177" rel="nofollow" href="https://www.academia.edu/Documents/in/Modeling">Modeling</a>,&nbsp;<script data-card-contents-for-ri="6177" type="text/json">{"id":6177,"name":"Modeling","url":"https://www.academia.edu/Documents/in/Modeling?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="9173" rel="nofollow" href="https://www.academia.edu/Documents/in/Biometrics">Biometrics</a><script data-card-contents-for-ri="9173" type="text/json">{"id":9173,"name":"Biometrics","url":"https://www.academia.edu/Documents/in/Biometrics?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=47212112]'), work: {"id":47212112,"title":"Authentication of Individuals using Hand Geometry Biometrics: A Neural Network Approach","created_at":"2021-04-21T06:33:20.781-07:00","url":"https://www.academia.edu/47212112/Authentication_of_Individuals_using_Hand_Geometry_Biometrics_A_Neural_Network_Approach?f_ri=5109","dom_id":"work_47212112","summary":"Biometric based systems for individual authentication are increasingly becoming indispensable for protecting life and property. They provide ways for uniquely and reliably authenticating people, and are difficult to counterfeit. Biometric based authenticity systems are currently used in governmental, commercial and public sectors. However, these systems can be expensive to put in place and often impose physical constraint to the users. This paper introduces an inexpensive, powerful and easy to use hand geometry based biometric person authentication system using neural networks. The proposed approach followed to construct this system consists of an acquisition device, a pre-processing stage, and a neural network based classifier. One of the novelties of this work comprises on the introduction of hand geometry's related, position independent, feature extraction and identification which can be useful in problems related to image processing and pattern recognition. Another novelty of this research comprises on the use of error correction codes to enhance the level of performance of the neural network model. A dataset made of scanned images of the right hand of fifty different people was created for this study. Identification rates and Detection Cost Function (DCF) values obtained with the system were evaluated. Several strategies for coding the outputs of the neural networks were studied. Experimental results show that, when using Error Correction Output Codes (ECOC), up to 100% identification rates and 0% DCF can M. Faundez-Zanuy 123 202 M. Faundez-Zanuy et al.","downloadable_attachments":[{"id":66430295,"asset_id":47212112,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":4967260,"first_name":"Marcos","last_name":"Faundez-Zanuy","domain_name":"tecnocampus","page_name":"MarcosFaundezZanuy","display_name":"Marcos Faundez-Zanuy","profile_url":"https://tecnocampus.academia.edu/MarcosFaundezZanuy?f_ri=5109","photo":"https://0.academia-photos.com/4967260/2150095/2523253/s65_marcos.faundez-zanuy.jpg"}],"research_interests":[{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":6177,"name":"Modeling","url":"https://www.academia.edu/Documents/in/Modeling?f_ri=5109","nofollow":true},{"id":9173,"name":"Biometrics","url":"https://www.academia.edu/Documents/in/Biometrics?f_ri=5109","nofollow":true},{"id":16457,"name":"Public sector","url":"https://www.academia.edu/Documents/in/Public_sector?f_ri=5109"},{"id":26066,"name":"Neural Network","url":"https://www.academia.edu/Documents/in/Neural_Network?f_ri=5109"},{"id":148960,"name":"Fingerprint","url":"https://www.academia.edu/Documents/in/Fingerprint?f_ri=5109"},{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=5109"},{"id":167397,"name":"Image recognition","url":"https://www.academia.edu/Documents/in/Image_recognition?f_ri=5109"},{"id":328150,"name":"Associative Memory","url":"https://www.academia.edu/Documents/in/Associative_Memory?f_ri=5109"},{"id":684007,"name":"ORTHOGONALITY","url":"https://www.academia.edu/Documents/in/ORTHOGONALITY?f_ri=5109"},{"id":780669,"name":"Input Output","url":"https://www.academia.edu/Documents/in/Input_Output?f_ri=5109"},{"id":1274453,"name":"Right Handed","url":"https://www.academia.edu/Documents/in/Right_Handed?f_ri=5109"},{"id":1373317,"name":"Linear Independence","url":"https://www.academia.edu/Documents/in/Linear_Independence?f_ri=5109"},{"id":1633082,"name":"Hamming Distance","url":"https://www.academia.edu/Documents/in/Hamming_Distance?f_ri=5109"},{"id":1837730,"name":"Neural Network Model","url":"https://www.academia.edu/Documents/in/Neural_Network_Model?f_ri=5109"},{"id":2003495,"name":"Error Correction Code Words","url":"https://www.academia.edu/Documents/in/Error_Correction_Code_Words?f_ri=5109"},{"id":2295409,"name":"Error Correction Code","url":"https://www.academia.edu/Documents/in/Error_Correction_Code?f_ri=5109"},{"id":2364727,"name":"Hand Geometry","url":"https://www.academia.edu/Documents/in/Hand_Geometry?f_ri=5109"},{"id":3356996,"name":"neural processing","url":"https://www.academia.edu/Documents/in/neural_processing?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_35089998" data-work_id="35089998" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/35089998/Essence_of_kernel_Fisher_discriminant_KPCA_plus_LDA">Essence of kernel Fisher discriminant: KPCA plus LDA</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">In this paper, the method of kernel Fisher discriminant (KFD) is analyzed and its nature is revealed, i.e., KFD is equivalent to kernel principal component analysis (KPCA) plus Fisher linear discriminant analysis (LDA). Based on this... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_35089998" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">In this paper, the method of kernel Fisher discriminant (KFD) is analyzed and its nature is revealed, i.e., KFD is equivalent to kernel principal component analysis (KPCA) plus Fisher linear discriminant analysis (LDA). Based on this result, a more transparent KFD algorithm is proposed. That is, KPCA is ÿrst performed and then LDA is used for a second feature extraction in the KPCA-transformed space. Finally, the e ectiveness of the proposed algorithm is veriÿed using the CENPARMI handwritten numeral database.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/35089998" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="f8afafbb7b416213a45999b7d97609bb" rel="nofollow" data-download="{&quot;attachment_id&quot;:54951104,&quot;asset_id&quot;:35089998,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/54951104/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="54314150" href="https://manchester.academia.edu/AlejandroFrangi">Alejandro F Frangi</a><script data-card-contents-for-user="54314150" type="text/json">{"id":54314150,"first_name":"Alejandro","last_name":"Frangi","domain_name":"manchester","page_name":"AlejandroFrangi","display_name":"Alejandro F Frangi","profile_url":"https://manchester.academia.edu/AlejandroFrangi?f_ri=5109","photo":"https://0.academia-photos.com/54314150/16776348/17038613/s65_alejandro.frangi.png"}</script></span></span></li><li class="js-paper-rank-work_35089998 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="35089998"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 35089998, container: ".js-paper-rank-work_35089998", }); });</script></li><li class="js-percentile-work_35089998 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 35089998; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_35089998"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_35089998 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="35089998"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 35089998; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=35089998]").text(description); $(".js-view-count-work_35089998").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_35089998").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="35089998"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">5</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="5069" rel="nofollow" href="https://www.academia.edu/Documents/in/Principal_Component_Analysis">Principal Component Analysis</a>,&nbsp;<script data-card-contents-for-ri="5069" type="text/json">{"id":5069,"name":"Principal Component Analysis","url":"https://www.academia.edu/Documents/in/Principal_Component_Analysis?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="160144" rel="nofollow" href="https://www.academia.edu/Documents/in/Feature_Extraction">Feature Extraction</a>,&nbsp;<script data-card-contents-for-ri="160144" type="text/json">{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="503368" rel="nofollow" href="https://www.academia.edu/Documents/in/Kernel_principal_component_analysis_KPCA_">Kernel principal component analysis (KPCA)</a><script data-card-contents-for-ri="503368" type="text/json">{"id":503368,"name":"Kernel principal component analysis (KPCA)","url":"https://www.academia.edu/Documents/in/Kernel_principal_component_analysis_KPCA_?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=35089998]'), work: {"id":35089998,"title":"Essence of kernel Fisher discriminant: KPCA plus LDA","created_at":"2017-11-08T08:33:14.507-08:00","url":"https://www.academia.edu/35089998/Essence_of_kernel_Fisher_discriminant_KPCA_plus_LDA?f_ri=5109","dom_id":"work_35089998","summary":"In this paper, the method of kernel Fisher discriminant (KFD) is analyzed and its nature is revealed, i.e., KFD is equivalent to kernel principal component analysis (KPCA) plus Fisher linear discriminant analysis (LDA). Based on this result, a more transparent KFD algorithm is proposed. That is, KPCA is ÿrst performed and then LDA is used for a second feature extraction in the KPCA-transformed space. Finally, the e ectiveness of the proposed algorithm is veriÿed using the CENPARMI handwritten numeral database.","downloadable_attachments":[{"id":54951104,"asset_id":35089998,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":54314150,"first_name":"Alejandro","last_name":"Frangi","domain_name":"manchester","page_name":"AlejandroFrangi","display_name":"Alejandro F Frangi","profile_url":"https://manchester.academia.edu/AlejandroFrangi?f_ri=5109","photo":"https://0.academia-photos.com/54314150/16776348/17038613/s65_alejandro.frangi.png"}],"research_interests":[{"id":5069,"name":"Principal Component Analysis","url":"https://www.academia.edu/Documents/in/Principal_Component_Analysis?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=5109","nofollow":true},{"id":503368,"name":"Kernel principal component analysis (KPCA)","url":"https://www.academia.edu/Documents/in/Kernel_principal_component_analysis_KPCA_?f_ri=5109","nofollow":true},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_31698056" data-work_id="31698056" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/31698056/Computer_assisted_structure_activity_relationships_of_nitrogenous_cyclic_compounds_tested_in_salmonella_assays_for_mutagenicity">Computer-assisted structure-activity relationships of nitrogenous cyclic compounds tested in salmonella assays for mutagenicity</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Study of the relationship between mutagenicity and molecular structure for a data set of nitrogenous cyclic compounds is reported. A computerized SAR system (ADAPT) was utilized to classify a data set of 114 nitrogenous cyclic compounds... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_31698056" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Study of the relationship between mutagenicity and molecular structure for a data set of nitrogenous cyclic compounds is reported. A computerized SAR system (ADAPT) was utilized to classify a data set of 114 nitrogenous cyclic compounds with 19 molecular descriptors. All of the descriptors represented at least 10% of the compounds in the data sets. The average correct predictability of the data base was calculated to be 89% after evaluating 100 training/prediction subsets. The actual predictive ability of the discriminants generated by the ADAPT system was demonstrated by predicting the mutagenicity of structurally similar compounds not in the data set. Weight vectors generated in the pattern recognition programs were used to predict the bacterial mutagenicity of 10 compounds which were not included in the data set. All of the compounds were predicted correctly which was actually better than the 89% calculated by the system. This displayed the ability of the system of classify compo...</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/31698056" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="f9ef776d6c325009bbd2f4d9b762341a" rel="nofollow" data-download="{&quot;attachment_id&quot;:52014188,&quot;asset_id&quot;:31698056,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/52014188/download_file?st=MTc0MDA0MzU4MSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="3260303" href="https://independent.academia.edu/LarryClaxton">Larry D Claxton</a><script data-card-contents-for-user="3260303" type="text/json">{"id":3260303,"first_name":"Larry","last_name":"Claxton","domain_name":"independent","page_name":"LarryClaxton","display_name":"Larry D Claxton","profile_url":"https://independent.academia.edu/LarryClaxton?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_31698056 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="31698056"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 31698056, container: ".js-paper-rank-work_31698056", }); });</script></li><li class="js-percentile-work_31698056 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 31698056; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_31698056"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_31698056 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="31698056"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 31698056; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=31698056]").text(description); $(".js-view-count-work_31698056").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_31698056").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="31698056"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">11</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="53293" rel="nofollow" href="https://www.academia.edu/Documents/in/Software">Software</a>,&nbsp;<script data-card-contents-for-ri="53293" type="text/json">{"id":53293,"name":"Software","url":"https://www.academia.edu/Documents/in/Software?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="74780" rel="nofollow" href="https://www.academia.edu/Documents/in/Mutation">Mutation</a>,&nbsp;<script data-card-contents-for-ri="74780" type="text/json">{"id":74780,"name":"Mutation","url":"https://www.academia.edu/Documents/in/Mutation?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="102637" rel="nofollow" href="https://www.academia.edu/Documents/in/Salmonella_Typhimurium">Salmonella Typhimurium</a><script data-card-contents-for-ri="102637" type="text/json">{"id":102637,"name":"Salmonella Typhimurium","url":"https://www.academia.edu/Documents/in/Salmonella_Typhimurium?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=31698056]'), work: {"id":31698056,"title":"Computer-assisted structure-activity relationships of nitrogenous cyclic compounds tested in salmonella assays for mutagenicity","created_at":"2017-03-03T10:37:32.319-08:00","url":"https://www.academia.edu/31698056/Computer_assisted_structure_activity_relationships_of_nitrogenous_cyclic_compounds_tested_in_salmonella_assays_for_mutagenicity?f_ri=5109","dom_id":"work_31698056","summary":"Study of the relationship between mutagenicity and molecular structure for a data set of nitrogenous cyclic compounds is reported. A computerized SAR system (ADAPT) was utilized to classify a data set of 114 nitrogenous cyclic compounds with 19 molecular descriptors. All of the descriptors represented at least 10% of the compounds in the data sets. The average correct predictability of the data base was calculated to be 89% after evaluating 100 training/prediction subsets. The actual predictive ability of the discriminants generated by the ADAPT system was demonstrated by predicting the mutagenicity of structurally similar compounds not in the data set. Weight vectors generated in the pattern recognition programs were used to predict the bacterial mutagenicity of 10 compounds which were not included in the data set. All of the compounds were predicted correctly which was actually better than the 89% calculated by the system. This displayed the ability of the system of classify compo...","downloadable_attachments":[{"id":52014188,"asset_id":31698056,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":3260303,"first_name":"Larry","last_name":"Claxton","domain_name":"independent","page_name":"LarryClaxton","display_name":"Larry D Claxton","profile_url":"https://independent.academia.edu/LarryClaxton?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":53293,"name":"Software","url":"https://www.academia.edu/Documents/in/Software?f_ri=5109","nofollow":true},{"id":74780,"name":"Mutation","url":"https://www.academia.edu/Documents/in/Mutation?f_ri=5109","nofollow":true},{"id":102637,"name":"Salmonella Typhimurium","url":"https://www.academia.edu/Documents/in/Salmonella_Typhimurium?f_ri=5109","nofollow":true},{"id":113903,"name":"Bacteria","url":"https://www.academia.edu/Documents/in/Bacteria?f_ri=5109"},{"id":282001,"name":"Ames Test","url":"https://www.academia.edu/Documents/in/Ames_Test?f_ri=5109"},{"id":649537,"name":"Molecular Conformation","url":"https://www.academia.edu/Documents/in/Molecular_Conformation?f_ri=5109"},{"id":967839,"name":"Structure activity Relationship","url":"https://www.academia.edu/Documents/in/Structure_activity_Relationship?f_ri=5109"},{"id":1279814,"name":"Heterocyclic compounds","url":"https://www.academia.edu/Documents/in/Heterocyclic_compounds?f_ri=5109"},{"id":1318932,"name":"Predictive value of tests","url":"https://www.academia.edu/Documents/in/Predictive_value_of_tests?f_ri=5109"},{"id":1489379,"name":"Amine","url":"https://www.academia.edu/Documents/in/Amine?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_5706776" data-work_id="5706776" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/5706776/Combining_Multi_scale_Character_Recognition_and_Linguistic_Knowledge_for_Natural_Scene_Text_OCR">Combining Multi-scale Character Recognition and Linguistic Knowledge for Natural Scene Text OCR</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Understanding text captured in real-world scenes is a challenging problem in the field of visual pattern recognition and continues to generate a significant interest in the OCR (Optical Character Recognition) community. This paper... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_5706776" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Understanding text captured in real-world scenes is a challenging problem in the field of visual pattern recognition and continues to generate a significant interest in the OCR (Optical Character Recognition) community. This paper proposes a novel method to recognize scene texts avoiding the conventional character segmentation step. The idea is to scan the text image with multi-scale windows and apply a robust recognition model, relying on a neural classification approach, to every window in order to recognize valid characters and identify non valid ones. Recognition results are represented as a graph model in order to determine the best sequence of characters. Some linguistic knowledge is also incorporated to remove errors due to recognition confusions. The designed method is evaluated on the ICDAR 2003 database of scene text images and outperforms state-of-the-art approaches.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/5706776" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="68df411c992e0d513bcadf30739f3176" rel="nofollow" data-download="{&quot;attachment_id&quot;:49173317,&quot;asset_id&quot;:5706776,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/49173317/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="8269647" href="https://irisa.academia.edu/PascaleSebillot">Pascale Sebillot</a><script data-card-contents-for-user="8269647" type="text/json">{"id":8269647,"first_name":"Pascale","last_name":"Sebillot","domain_name":"irisa","page_name":"PascaleSebillot","display_name":"Pascale Sebillot","profile_url":"https://irisa.academia.edu/PascaleSebillot?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_5706776 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="5706776"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 5706776, container: ".js-paper-rank-work_5706776", }); });</script></li><li class="js-percentile-work_5706776 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 5706776; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_5706776"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_5706776 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="5706776"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 5706776; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=5706776]").text(description); $(".js-view-count-work_5706776").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_5706776").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="5706776"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">8</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5111" rel="nofollow" href="https://www.academia.edu/Documents/in/Character_Recognition">Character Recognition</a>,&nbsp;<script data-card-contents-for-ri="5111" type="text/json">{"id":5111,"name":"Character Recognition","url":"https://www.academia.edu/Documents/in/Character_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="26066" rel="nofollow" href="https://www.academia.edu/Documents/in/Neural_Network">Neural Network</a>,&nbsp;<script data-card-contents-for-ri="26066" type="text/json">{"id":26066,"name":"Neural Network","url":"https://www.academia.edu/Documents/in/Neural_Network?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="80596" rel="nofollow" href="https://www.academia.edu/Documents/in/Design_method">Design method</a><script data-card-contents-for-ri="80596" type="text/json">{"id":80596,"name":"Design method","url":"https://www.academia.edu/Documents/in/Design_method?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=5706776]'), work: {"id":5706776,"title":"Combining Multi-scale Character Recognition and Linguistic Knowledge for Natural Scene Text OCR","created_at":"2014-01-13T20:01:59.503-08:00","url":"https://www.academia.edu/5706776/Combining_Multi_scale_Character_Recognition_and_Linguistic_Knowledge_for_Natural_Scene_Text_OCR?f_ri=5109","dom_id":"work_5706776","summary":"Understanding text captured in real-world scenes is a challenging problem in the field of visual pattern recognition and continues to generate a significant interest in the OCR (Optical Character Recognition) community. This paper proposes a novel method to recognize scene texts avoiding the conventional character segmentation step. The idea is to scan the text image with multi-scale windows and apply a robust recognition model, relying on a neural classification approach, to every window in order to recognize valid characters and identify non valid ones. Recognition results are represented as a graph model in order to determine the best sequence of characters. Some linguistic knowledge is also incorporated to remove errors due to recognition confusions. The designed method is evaluated on the ICDAR 2003 database of scene text images and outperforms state-of-the-art approaches.","downloadable_attachments":[{"id":49173317,"asset_id":5706776,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":8269647,"first_name":"Pascale","last_name":"Sebillot","domain_name":"irisa","page_name":"PascaleSebillot","display_name":"Pascale Sebillot","profile_url":"https://irisa.academia.edu/PascaleSebillot?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":5111,"name":"Character Recognition","url":"https://www.academia.edu/Documents/in/Character_Recognition?f_ri=5109","nofollow":true},{"id":26066,"name":"Neural Network","url":"https://www.academia.edu/Documents/in/Neural_Network?f_ri=5109","nofollow":true},{"id":80596,"name":"Design method","url":"https://www.academia.edu/Documents/in/Design_method?f_ri=5109","nofollow":true},{"id":108243,"name":"Character Segmentation","url":"https://www.academia.edu/Documents/in/Character_Segmentation?f_ri=5109"},{"id":265584,"name":"Optical Character Recognition","url":"https://www.academia.edu/Documents/in/Optical_Character_Recognition?f_ri=5109"},{"id":520672,"name":"Language Model","url":"https://www.academia.edu/Documents/in/Language_Model?f_ri=5109"},{"id":1745900,"name":"Natural Scenes","url":"https://www.academia.edu/Documents/in/Natural_Scenes?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_10741823" data-work_id="10741823" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/10741823/Expression_glasses_a_wearable_device_for_facial_expression_recognition">Expression glasses: a wearable device for facial expression recognition</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Expression Glasses provide a wearable \appliance-based&quot; alternative to generalpurpose machine vision face recognition systems. The glasses sense facial muscle movements, and use pattern recognition to identify meaningful expressions such... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_10741823" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Expression Glasses provide a wearable \appliance-based&quot; alternative to generalpurpose machine vision face recognition systems. The glasses sense facial muscle movements, and use pattern recognition to identify meaningful expressions such a s confusion or interest. A prototype of the glasses has been built and evaluated. The prototype uses piezoelectric sensors hidden in a visor extension to a pair of glasses, providing for compactness, user control, and anonymity. On users who received no training or feedback, the glasses initially performed at 94% accuracy in detecting an expression, and at 74% accuracy in recognizing whether the expression was confusion or interest. Signi cant i mprovement b e y ond these numbersappears to be possible with extended use, and with a small amount of feedback (letting the user see the output of the system).</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/10741823" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="32a6bcf1f79bebcc0219a9bd316d8121" rel="nofollow" data-download="{&quot;attachment_id&quot;:47162897,&quot;asset_id&quot;:10741823,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/47162897/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="26180764" href="https://independent.academia.edu/RaulFernandez16">Raul Fernandez</a><script data-card-contents-for-user="26180764" type="text/json">{"id":26180764,"first_name":"Raul","last_name":"Fernandez","domain_name":"independent","page_name":"RaulFernandez16","display_name":"Raul Fernandez","profile_url":"https://independent.academia.edu/RaulFernandez16?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_10741823 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="10741823"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 10741823, container: ".js-paper-rank-work_10741823", }); });</script></li><li class="js-percentile-work_10741823 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 10741823; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_10741823"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_10741823 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="10741823"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 10741823; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=10741823]").text(description); $(".js-view-count-work_10741823").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_10741823").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="10741823"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">12</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="472" rel="nofollow" href="https://www.academia.edu/Documents/in/Human_Computer_Interaction">Human Computer Interaction</a>,&nbsp;<script data-card-contents-for-ri="472" type="text/json">{"id":472,"name":"Human Computer Interaction","url":"https://www.academia.edu/Documents/in/Human_Computer_Interaction?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2896" rel="nofollow" href="https://www.academia.edu/Documents/in/Wearable_Computing">Wearable Computing</a>,&nbsp;<script data-card-contents-for-ri="2896" type="text/json">{"id":2896,"name":"Wearable Computing","url":"https://www.academia.edu/Documents/in/Wearable_Computing?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5110" rel="nofollow" href="https://www.academia.edu/Documents/in/Face_Recognition">Face Recognition</a><script data-card-contents-for-ri="5110" type="text/json">{"id":5110,"name":"Face Recognition","url":"https://www.academia.edu/Documents/in/Face_Recognition?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=10741823]'), work: {"id":10741823,"title":"Expression glasses: a wearable device for facial expression recognition","created_at":"2015-02-12T10:25:21.563-08:00","url":"https://www.academia.edu/10741823/Expression_glasses_a_wearable_device_for_facial_expression_recognition?f_ri=5109","dom_id":"work_10741823","summary":"Expression Glasses provide a wearable \\appliance-based\" alternative to generalpurpose machine vision face recognition systems. The glasses sense facial muscle movements, and use pattern recognition to identify meaningful expressions such a s confusion or interest. A prototype of the glasses has been built and evaluated. The prototype uses piezoelectric sensors hidden in a visor extension to a pair of glasses, providing for compactness, user control, and anonymity. On users who received no training or feedback, the glasses initially performed at 94% accuracy in detecting an expression, and at 74% accuracy in recognizing whether the expression was confusion or interest. Signi cant i mprovement b e y ond these numbersappears to be possible with extended use, and with a small amount of feedback (letting the user see the output of the system).","downloadable_attachments":[{"id":47162897,"asset_id":10741823,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":26180764,"first_name":"Raul","last_name":"Fernandez","domain_name":"independent","page_name":"RaulFernandez16","display_name":"Raul Fernandez","profile_url":"https://independent.academia.edu/RaulFernandez16?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":472,"name":"Human Computer Interaction","url":"https://www.academia.edu/Documents/in/Human_Computer_Interaction?f_ri=5109","nofollow":true},{"id":2896,"name":"Wearable Computing","url":"https://www.academia.edu/Documents/in/Wearable_Computing?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":5110,"name":"Face Recognition","url":"https://www.academia.edu/Documents/in/Face_Recognition?f_ri=5109","nofollow":true},{"id":11749,"name":"Affective Computing","url":"https://www.academia.edu/Documents/in/Affective_Computing?f_ri=5109"},{"id":14417,"name":"Machine Vision","url":"https://www.academia.edu/Documents/in/Machine_Vision?f_ri=5109"},{"id":21269,"name":"Facial expression","url":"https://www.academia.edu/Documents/in/Facial_expression?f_ri=5109"},{"id":64573,"name":"Facial expressions","url":"https://www.academia.edu/Documents/in/Facial_expressions?f_ri=5109"},{"id":237673,"name":"3D Input Devices","url":"https://www.academia.edu/Documents/in/3D_Input_Devices?f_ri=5109"},{"id":275202,"name":"Wearable Computer","url":"https://www.academia.edu/Documents/in/Wearable_Computer?f_ri=5109"},{"id":851044,"name":"Computer Human Interaction","url":"https://www.academia.edu/Documents/in/Computer_Human_Interaction?f_ri=5109"},{"id":1179384,"name":"Facial Expression Recognition","url":"https://www.academia.edu/Documents/in/Facial_Expression_Recognition?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_24940413" data-work_id="24940413" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/24940413/Are_Subitizing_and_Counting_Implemented_as_Separate_or_Functionally_Overlapping_Processes">Are Subitizing and Counting Implemented as Separate or Functionally Overlapping Processes?</a></div></div><div class="u-pb4x u-mt3x"></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/24940413" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="18f3b17b340c47816bbce4f77790aea5" rel="nofollow" data-download="{&quot;attachment_id&quot;:45272007,&quot;asset_id&quot;:24940413,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/45272007/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="322872" href="https://ucl.academia.edu/BrianButterworth">Brian Butterworth</a><script data-card-contents-for-user="322872" type="text/json">{"id":322872,"first_name":"Brian","last_name":"Butterworth","domain_name":"ucl","page_name":"BrianButterworth","display_name":"Brian Butterworth","profile_url":"https://ucl.academia.edu/BrianButterworth?f_ri=5109","photo":"https://0.academia-photos.com/322872/135120/157100/s65_brian.butterworth.jpg"}</script></span></span></li><li class="js-paper-rank-work_24940413 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="24940413"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 24940413, container: ".js-paper-rank-work_24940413", }); });</script></li><li class="js-percentile-work_24940413 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 24940413; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_24940413"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_24940413 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="24940413"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 24940413; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=24940413]").text(description); $(".js-view-count-work_24940413").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_24940413").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="24940413"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">14</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="300" rel="nofollow" href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a>,&nbsp;<script data-card-contents-for-ri="300" type="text/json">{"id":300,"name":"Mathematics","url":"https://www.academia.edu/Documents/in/Mathematics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="4212" rel="nofollow" href="https://www.academia.edu/Documents/in/Cognition">Cognition</a>,&nbsp;<script data-card-contents-for-ri="4212" type="text/json">{"id":4212,"name":"Cognition","url":"https://www.academia.edu/Documents/in/Cognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="18174" rel="nofollow" href="https://www.academia.edu/Documents/in/Language">Language</a><script data-card-contents-for-ri="18174" type="text/json">{"id":18174,"name":"Language","url":"https://www.academia.edu/Documents/in/Language?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=24940413]'), work: {"id":24940413,"title":"Are Subitizing and Counting Implemented as Separate or Functionally Overlapping Processes?","created_at":"2016-05-02T03:45:48.850-07:00","url":"https://www.academia.edu/24940413/Are_Subitizing_and_Counting_Implemented_as_Separate_or_Functionally_Overlapping_Processes?f_ri=5109","dom_id":"work_24940413","summary":null,"downloadable_attachments":[{"id":45272007,"asset_id":24940413,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":322872,"first_name":"Brian","last_name":"Butterworth","domain_name":"ucl","page_name":"BrianButterworth","display_name":"Brian Butterworth","profile_url":"https://ucl.academia.edu/BrianButterworth?f_ri=5109","photo":"https://0.academia-photos.com/322872/135120/157100/s65_brian.butterworth.jpg"}],"research_interests":[{"id":300,"name":"Mathematics","url":"https://www.academia.edu/Documents/in/Mathematics?f_ri=5109","nofollow":true},{"id":4212,"name":"Cognition","url":"https://www.academia.edu/Documents/in/Cognition?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":18174,"name":"Language","url":"https://www.academia.edu/Documents/in/Language?f_ri=5109","nofollow":true},{"id":29916,"name":"PET","url":"https://www.academia.edu/Documents/in/PET?f_ri=5109"},{"id":52176,"name":"Brain Mapping","url":"https://www.academia.edu/Documents/in/Brain_Mapping?f_ri=5109"},{"id":59407,"name":"Process","url":"https://www.academia.edu/Documents/in/Process?f_ri=5109"},{"id":61474,"name":"Brain","url":"https://www.academia.edu/Documents/in/Brain?f_ri=5109"},{"id":103260,"name":"Neuroimage","url":"https://www.academia.edu/Documents/in/Neuroimage?f_ri=5109"},{"id":123274,"name":"Parietal Cortex","url":"https://www.academia.edu/Documents/in/Parietal_Cortex?f_ri=5109"},{"id":193974,"name":"Neurons","url":"https://www.academia.edu/Documents/in/Neurons?f_ri=5109"},{"id":199347,"name":"Subitizing","url":"https://www.academia.edu/Documents/in/Subitizing?f_ri=5109"},{"id":316314,"name":"Counting","url":"https://www.academia.edu/Documents/in/Counting?f_ri=5109"},{"id":375439,"name":"Single Photon Emission Computed Tomography","url":"https://www.academia.edu/Documents/in/Single_Photon_Emission_Computed_Tomography?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_21288265 coauthored" data-work_id="21288265" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/21288265/Estimating_the_number_of_clusters_in_a_numerical_data_set_via_quantization_error_modeling">Estimating the number of clusters in a numerical data set via quantization error modeling</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">In this paper, we consider the problem of unsupervised clustering (vector quantization) of multidimensional numerical data. We propose a new method for determining an optimal number of clusters in the data set. The method is based on... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_21288265" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">In this paper, we consider the problem of unsupervised clustering (vector quantization) of multidimensional numerical data. We propose a new method for determining an optimal number of clusters in the data set. The method is based on parametric modeling of the quantization error. The model parameter can be treated as the effective dimensionality of the data set. The proposed method was tested with artificial and real numerical data sets and the results of the experiments demonstrate empirically not only the effectiveness of the method but its ability to cope with difficult cases where other known methods fail.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/21288265" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="2677e0d3ef2a8d0fba1f398bccc7c1cc" rel="nofollow" data-download="{&quot;attachment_id&quot;:41800201,&quot;asset_id&quot;:21288265,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/41800201/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="35788789" href="https://lut.academia.edu/TKauranne">T. Kauranne</a><script data-card-contents-for-user="35788789" type="text/json">{"id":35788789,"first_name":"T.","last_name":"Kauranne","domain_name":"lut","page_name":"TKauranne","display_name":"T. Kauranne","profile_url":"https://lut.academia.edu/TKauranne?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span><span class="u-displayInlineBlock InlineList-item-text">&nbsp;and&nbsp;<span class="u-textDecorationUnderline u-clickable InlineList-item-text js-work-more-authors-21288265">+1</span><div class="hidden js-additional-users-21288265"><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://independent.academia.edu/AlexanderKolesnikov">Alexander Kolesnikov</a></span></div></div></span><script>(function(){ var popoverSettings = { el: $('.js-work-more-authors-21288265'), placement: 'bottom', hide_delay: 200, html: true, content: function(){ return $('.js-additional-users-21288265').html(); } } new HoverPopover(popoverSettings); })();</script></li><li class="js-paper-rank-work_21288265 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="21288265"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 21288265, container: ".js-paper-rank-work_21288265", }); });</script></li><li class="js-percentile-work_21288265 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 21288265; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_21288265"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_21288265 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="21288265"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 21288265; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=21288265]").text(description); $(".js-view-count-work_21288265").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_21288265").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="21288265"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">2</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1237788" rel="nofollow" href="https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering">Electrical And Electronic Engineering</a><script data-card-contents-for-ri="1237788" type="text/json">{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=21288265]'), work: {"id":21288265,"title":"Estimating the number of clusters in a numerical data set via quantization error modeling","created_at":"2016-01-31T03:23:21.031-08:00","url":"https://www.academia.edu/21288265/Estimating_the_number_of_clusters_in_a_numerical_data_set_via_quantization_error_modeling?f_ri=5109","dom_id":"work_21288265","summary":"In this paper, we consider the problem of unsupervised clustering (vector quantization) of multidimensional numerical data. We propose a new method for determining an optimal number of clusters in the data set. The method is based on parametric modeling of the quantization error. The model parameter can be treated as the effective dimensionality of the data set. The proposed method was tested with artificial and real numerical data sets and the results of the experiments demonstrate empirically not only the effectiveness of the method but its ability to cope with difficult cases where other known methods fail.","downloadable_attachments":[{"id":41800201,"asset_id":21288265,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":35788789,"first_name":"T.","last_name":"Kauranne","domain_name":"lut","page_name":"TKauranne","display_name":"T. Kauranne","profile_url":"https://lut.academia.edu/TKauranne?f_ri=5109","photo":"/images/s65_no_pic.png"},{"id":1127037,"first_name":"Alexander","last_name":"Kolesnikov","domain_name":"independent","page_name":"AlexanderKolesnikov","display_name":"Alexander Kolesnikov","profile_url":"https://independent.academia.edu/AlexanderKolesnikov?f_ri=5109","photo":"https://0.academia-photos.com/1127037/394666/480396/s65_alexander.kolesnikov.jpg"}],"research_interests":[{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=5109","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_4805879" data-work_id="4805879" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/4805879/Handwritten_Isolated_Digit_Recognition_Using_Artificial_Neural_Networks">Handwritten Isolated Digit Recognition Using Artificial Neural Networks</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">A new mechanism proposes the use of Artificial Neural Network (ANN) models for recognizing handwritten isolated digits and also presents new techniques for to extract digits from scanned digits images. Remove the slope and slant from... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_4805879" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">A new mechanism proposes the use of Artificial Neural Network (ANN) models for recognizing handwritten isolated digits and also presents new techniques for to extract digits from scanned digits images. Remove the slope and slant from handwritten isolated digits and normalize the size of Digit images with our proposed methods. Experiments have been conducted on handwritten isolated digits and the achieved recognizing rates up to 99%.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/4805879" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="c1e8fabdd89649342cef446b1d7ed9df" rel="nofollow" data-download="{&quot;attachment_id&quot;:32103711,&quot;asset_id&quot;:4805879,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/32103711/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="5624934" href="https://independent.academia.edu/ARMark">AR Publication</a><script data-card-contents-for-user="5624934" type="text/json">{"id":5624934,"first_name":"AR","last_name":"Publication","domain_name":"independent","page_name":"ARMark","display_name":"AR Publication","profile_url":"https://independent.academia.edu/ARMark?f_ri=5109","photo":"https://0.academia-photos.com/5624934/2451886/2850235/s65_ar.mark.jpg"}</script></span></span></li><li class="js-paper-rank-work_4805879 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="4805879"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 4805879, container: ".js-paper-rank-work_4805879", }); });</script></li><li class="js-percentile-work_4805879 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 4805879; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_4805879"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_4805879 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="4805879"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 4805879; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=4805879]").text(description); $(".js-view-count-work_4805879").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_4805879").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="4805879"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">8</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="465" rel="nofollow" href="https://www.academia.edu/Documents/in/Artificial_Intelligence">Artificial Intelligence</a>,&nbsp;<script data-card-contents-for-ri="465" type="text/json">{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a><script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=4805879]'), work: {"id":4805879,"title":"Handwritten Isolated Digit Recognition Using Artificial Neural Networks","created_at":"2013-10-17T20:59:46.756-07:00","url":"https://www.academia.edu/4805879/Handwritten_Isolated_Digit_Recognition_Using_Artificial_Neural_Networks?f_ri=5109","dom_id":"work_4805879","summary":"A new mechanism proposes the use of Artificial Neural Network (ANN) models for recognizing handwritten isolated digits and also presents new techniques for to extract digits from scanned digits images. Remove the slope and slant from handwritten isolated digits and normalize the size of Digit images with our proposed methods. Experiments have been conducted on handwritten isolated digits and the achieved recognizing rates up to 99%.","downloadable_attachments":[{"id":32103711,"asset_id":4805879,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":5624934,"first_name":"AR","last_name":"Publication","domain_name":"independent","page_name":"ARMark","display_name":"AR Publication","profile_url":"https://independent.academia.edu/ARMark?f_ri=5109","photo":"https://0.academia-photos.com/5624934/2451886/2850235/s65_ar.mark.jpg"}],"research_interests":[{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=5109","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":11598,"name":"Neural Networks","url":"https://www.academia.edu/Documents/in/Neural_Networks?f_ri=5109"},{"id":11598,"name":"Neural Networks","url":"https://www.academia.edu/Documents/in/Neural_Networks?f_ri=5109"},{"id":54123,"name":"Artificial Neural Networks","url":"https://www.academia.edu/Documents/in/Artificial_Neural_Networks?f_ri=5109"},{"id":106145,"name":"Classification","url":"https://www.academia.edu/Documents/in/Classification?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_79495789" data-work_id="79495789" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/79495789/Prediction_of_apatite_lattice_constants_from_their_constituent_elemental_radii_and_artificial_intelligence_methods">Prediction of apatite lattice constants from their constituent elemental radii and artificial intelligence methods</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">are predicted from their elemental ionic radii, using pattern recognition (PR) and artificial neural networks (ANN) techniques. In particular, by a PR study it is demonstrated that ionic radii predominantly govern the LCs of apatites.... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_79495789" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">are predicted from their elemental ionic radii, using pattern recognition (PR) and artificial neural networks (ANN) techniques. In particular, by a PR study it is demonstrated that ionic radii predominantly govern the LCs of apatites. Furthermore, by using ANN techniques, prediction models of LCs a and c are developed, which reproduce well the measured LCs (R 2 ¼ 0:98). All the literature reported on 30 pure and 22 mixed apatite compounds are collected and used in the present work. LCs of all possible 66 new apatites (assuming they exist) are estimated by the developed ANN models. These proposed new apatites may be of interest to biomedical research especially in the design of new apatite biomaterials for bone remodeling. Similarly these techniques may also be applied in the study of interface growth behaviors involving other biomaterials.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/79495789" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="4a0e1ca6c84a5772d88055136f216146" rel="nofollow" data-download="{&quot;attachment_id&quot;:86191504,&quot;asset_id&quot;:79495789,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/86191504/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="181865586" href="https://independent.academia.edu/YZZeng1">YZ Zeng</a><script data-card-contents-for-user="181865586" type="text/json">{"id":181865586,"first_name":"YZ","last_name":"Zeng","domain_name":"independent","page_name":"YZZeng1","display_name":"YZ Zeng","profile_url":"https://independent.academia.edu/YZZeng1?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_79495789 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="79495789"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 79495789, container: ".js-paper-rank-work_79495789", }); });</script></li><li class="js-percentile-work_79495789 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 79495789; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_79495789"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_79495789 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="79495789"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 79495789; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=79495789]").text(description); $(".js-view-count-work_79495789").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_79495789").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="79495789"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">19</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="428" rel="nofollow" href="https://www.academia.edu/Documents/in/Algorithms">Algorithms</a>,&nbsp;<script data-card-contents-for-ri="428" type="text/json">{"id":428,"name":"Algorithms","url":"https://www.academia.edu/Documents/in/Algorithms?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="465" rel="nofollow" href="https://www.academia.edu/Documents/in/Artificial_Intelligence">Artificial Intelligence</a>,&nbsp;<script data-card-contents-for-ri="465" type="text/json">{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="511" rel="nofollow" href="https://www.academia.edu/Documents/in/Materials_Science">Materials Science</a>,&nbsp;<script data-card-contents-for-ri="511" type="text/json">{"id":511,"name":"Materials Science","url":"https://www.academia.edu/Documents/in/Materials_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1177" rel="nofollow" href="https://www.academia.edu/Documents/in/Crystallography">Crystallography</a><script data-card-contents-for-ri="1177" type="text/json">{"id":1177,"name":"Crystallography","url":"https://www.academia.edu/Documents/in/Crystallography?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=79495789]'), work: {"id":79495789,"title":"Prediction of apatite lattice constants from their constituent elemental radii and artificial intelligence methods","created_at":"2022-05-20T00:34:56.821-07:00","url":"https://www.academia.edu/79495789/Prediction_of_apatite_lattice_constants_from_their_constituent_elemental_radii_and_artificial_intelligence_methods?f_ri=5109","dom_id":"work_79495789","summary":"are predicted from their elemental ionic radii, using pattern recognition (PR) and artificial neural networks (ANN) techniques. In particular, by a PR study it is demonstrated that ionic radii predominantly govern the LCs of apatites. Furthermore, by using ANN techniques, prediction models of LCs a and c are developed, which reproduce well the measured LCs (R 2 ¼ 0:98). All the literature reported on 30 pure and 22 mixed apatite compounds are collected and used in the present work. LCs of all possible 66 new apatites (assuming they exist) are estimated by the developed ANN models. These proposed new apatites may be of interest to biomedical research especially in the design of new apatite biomaterials for bone remodeling. Similarly these techniques may also be applied in the study of interface growth behaviors involving other biomaterials.","downloadable_attachments":[{"id":86191504,"asset_id":79495789,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":181865586,"first_name":"YZ","last_name":"Zeng","domain_name":"independent","page_name":"YZZeng1","display_name":"YZ Zeng","profile_url":"https://independent.academia.edu/YZZeng1?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":428,"name":"Algorithms","url":"https://www.academia.edu/Documents/in/Algorithms?f_ri=5109","nofollow":true},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109","nofollow":true},{"id":511,"name":"Materials Science","url":"https://www.academia.edu/Documents/in/Materials_Science?f_ri=5109","nofollow":true},{"id":1177,"name":"Crystallography","url":"https://www.academia.edu/Documents/in/Crystallography?f_ri=5109","nofollow":true},{"id":2698,"name":"Biomaterials","url":"https://www.academia.edu/Documents/in/Biomaterials?f_ri=5109"},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":12597,"name":"Crystallization","url":"https://www.academia.edu/Documents/in/Crystallization?f_ri=5109"},{"id":26327,"name":"Medicine","url":"https://www.academia.edu/Documents/in/Medicine?f_ri=5109"},{"id":28235,"name":"Multidisciplinary","url":"https://www.academia.edu/Documents/in/Multidisciplinary?f_ri=5109"},{"id":69542,"name":"Computer Simulation","url":"https://www.academia.edu/Documents/in/Computer_Simulation?f_ri=5109"},{"id":205587,"name":"Apatite","url":"https://www.academia.edu/Documents/in/Apatite?f_ri=5109"},{"id":224767,"name":"Prediction Model","url":"https://www.academia.edu/Documents/in/Prediction_Model?f_ri=5109"},{"id":549280,"name":"Reproducibility of Results","url":"https://www.academia.edu/Documents/in/Reproducibility_of_Results?f_ri=5109"},{"id":616689,"name":"Bone remodeling","url":"https://www.academia.edu/Documents/in/Bone_remodeling?f_ri=5109"},{"id":649537,"name":"Molecular Conformation","url":"https://www.academia.edu/Documents/in/Molecular_Conformation?f_ri=5109"},{"id":681938,"name":"Apatites","url":"https://www.academia.edu/Documents/in/Apatites?f_ri=5109"},{"id":901876,"name":"Sensitivity and Specificity","url":"https://www.academia.edu/Documents/in/Sensitivity_and_Specificity?f_ri=5109"},{"id":1031067,"name":"Biocompatible Materials","url":"https://www.academia.edu/Documents/in/Biocompatible_Materials?f_ri=5109"},{"id":1211304,"name":"Artificial Neural Network","url":"https://www.academia.edu/Documents/in/Artificial_Neural_Network?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_74209858" data-work_id="74209858" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/74209858/Characterization_of_complex_networks_A_survey_of_measurements">Characterization of complex networks: A survey of measurements</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Each complex network (or class of networks) presents specific topological features which characterize its connectivity and highly influence the dynamics of processes executed on the network. The analysis, discrimination, and synthesis of... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_74209858" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Each complex network (or class of networks) presents specific topological features which characterize its connectivity and highly influence the dynamics of processes executed on the network. The analysis, discrimination, and synthesis of complex networks therefore rely on the use of measurements capable of expressing the most relevant topological features. This article presents a survey of such measurements. It includes general considerations about complex network characterization, a brief review of the principal models, and the ...</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/74209858" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="c6fffe1280c813b9a0916ed94fd83317" rel="nofollow" data-download="{&quot;attachment_id&quot;:82440644,&quot;asset_id&quot;:74209858,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/82440644/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="206086249" href="https://independent.academia.edu/FranciscoRodrigues242">Francisco Rodrigues</a><script data-card-contents-for-user="206086249" type="text/json">{"id":206086249,"first_name":"Francisco","last_name":"Rodrigues","domain_name":"independent","page_name":"FranciscoRodrigues242","display_name":"Francisco Rodrigues","profile_url":"https://independent.academia.edu/FranciscoRodrigues242?f_ri=5109","photo":"https://0.academia-photos.com/206086249/66555287/54909086/s65_francisco.rodrigues.png"}</script></span></span></li><li class="js-paper-rank-work_74209858 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="74209858"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 74209858, container: ".js-paper-rank-work_74209858", }); });</script></li><li class="js-percentile-work_74209858 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 74209858; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_74209858"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_74209858 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="74209858"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 74209858; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=74209858]").text(description); $(".js-view-count-work_74209858").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_74209858").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="74209858"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">16</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="498" rel="nofollow" href="https://www.academia.edu/Documents/in/Physics">Physics</a>,&nbsp;<script data-card-contents-for-ri="498" type="text/json">{"id":498,"name":"Physics","url":"https://www.academia.edu/Documents/in/Physics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="505" rel="nofollow" href="https://www.academia.edu/Documents/in/Condensed_Matter_Physics">Condensed Matter Physics</a>,&nbsp;<script data-card-contents-for-ri="505" type="text/json">{"id":505,"name":"Condensed Matter Physics","url":"https://www.academia.edu/Documents/in/Condensed_Matter_Physics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="518" rel="nofollow" href="https://www.academia.edu/Documents/in/Quantum_Physics">Quantum Physics</a>,&nbsp;<script data-card-contents-for-ri="518" type="text/json">{"id":518,"name":"Quantum Physics","url":"https://www.academia.edu/Documents/in/Quantum_Physics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1352" rel="nofollow" href="https://www.academia.edu/Documents/in/Multivariate_Statistics">Multivariate Statistics</a><script data-card-contents-for-ri="1352" type="text/json">{"id":1352,"name":"Multivariate Statistics","url":"https://www.academia.edu/Documents/in/Multivariate_Statistics?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=74209858]'), work: {"id":74209858,"title":"Characterization of complex networks: A survey of measurements","created_at":"2022-03-21T04:26:05.694-07:00","url":"https://www.academia.edu/74209858/Characterization_of_complex_networks_A_survey_of_measurements?f_ri=5109","dom_id":"work_74209858","summary":"Each complex network (or class of networks) presents specific topological features which characterize its connectivity and highly influence the dynamics of processes executed on the network. The analysis, discrimination, and synthesis of complex networks therefore rely on the use of measurements capable of expressing the most relevant topological features. This article presents a survey of such measurements. It includes general considerations about complex network characterization, a brief review of the principal models, and the ...","downloadable_attachments":[{"id":82440644,"asset_id":74209858,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":206086249,"first_name":"Francisco","last_name":"Rodrigues","domain_name":"independent","page_name":"FranciscoRodrigues242","display_name":"Francisco Rodrigues","profile_url":"https://independent.academia.edu/FranciscoRodrigues242?f_ri=5109","photo":"https://0.academia-photos.com/206086249/66555287/54909086/s65_francisco.rodrigues.png"}],"research_interests":[{"id":498,"name":"Physics","url":"https://www.academia.edu/Documents/in/Physics?f_ri=5109","nofollow":true},{"id":505,"name":"Condensed Matter Physics","url":"https://www.academia.edu/Documents/in/Condensed_Matter_Physics?f_ri=5109","nofollow":true},{"id":518,"name":"Quantum Physics","url":"https://www.academia.edu/Documents/in/Quantum_Physics?f_ri=5109","nofollow":true},{"id":1352,"name":"Multivariate Statistics","url":"https://www.academia.edu/Documents/in/Multivariate_Statistics?f_ri=5109","nofollow":true},{"id":2616,"name":"Graph Theory","url":"https://www.academia.edu/Documents/in/Graph_Theory?f_ri=5109"},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":41482,"name":"Multivariate Analysis","url":"https://www.academia.edu/Documents/in/Multivariate_Analysis?f_ri=5109"},{"id":43619,"name":"Feature Selection","url":"https://www.academia.edu/Documents/in/Feature_Selection?f_ri=5109"},{"id":54501,"name":"Complex System","url":"https://www.academia.edu/Documents/in/Complex_System?f_ri=5109"},{"id":99499,"name":"Complex network","url":"https://www.academia.edu/Documents/in/Complex_network?f_ri=5109"},{"id":210992,"name":"Vertex","url":"https://www.academia.edu/Documents/in/Vertex?f_ri=5109"},{"id":408186,"name":"Perturbation Analysis","url":"https://www.academia.edu/Documents/in/Perturbation_Analysis?f_ri=5109"},{"id":613465,"name":"Statistical models","url":"https://www.academia.edu/Documents/in/Statistical_models?f_ri=5109"},{"id":890611,"name":"Fractal Dimension","url":"https://www.academia.edu/Documents/in/Fractal_Dimension?f_ri=5109"},{"id":1463317,"name":"Contents","url":"https://www.academia.edu/Documents/in/Contents?f_ri=5109"},{"id":1600931,"name":"Perturbation Theory","url":"https://www.academia.edu/Documents/in/Perturbation_Theory?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_73958945" data-work_id="73958945" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/73958945/Quadratically_constrained_quadratic_programs_on_acyclic_graphs_with_application_to_power_flow">Quadratically constrained quadratic programs on acyclic graphs with application to power flow</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Abstract: This paper proves that non-convex quadratically constrained quadratic programs have an exact semidefinite relaxation when their underlying graph is acyclic, provided the constraint set satisfies a certain technical condition.... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_73958945" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Abstract: This paper proves that non-convex quadratically constrained quadratic programs have an exact semidefinite relaxation when their underlying graph is acyclic, provided the constraint set satisfies a certain technical condition. When the condition is not satisfied, we propose a heuristic to obtain a feasible point starting from a solution of the relaxed problem. These methods are then demonstrated to provide exact solutions to a richer class of optimal power flow problems than previously solved.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/73958945" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="8579f29eb5b2e9bf00c1f879633fec08" rel="nofollow" data-download="{&quot;attachment_id&quot;:82286880,&quot;asset_id&quot;:73958945,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/82286880/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="125700311" href="https://independent.academia.edu/deepakkumar2736">deepak kumar</a><script data-card-contents-for-user="125700311" type="text/json">{"id":125700311,"first_name":"deepak","last_name":"kumar","domain_name":"independent","page_name":"deepakkumar2736","display_name":"deepak kumar","profile_url":"https://independent.academia.edu/deepakkumar2736?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_73958945 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="73958945"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 73958945, container: ".js-paper-rank-work_73958945", }); });</script></li><li class="js-percentile-work_73958945 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 73958945; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_73958945"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_73958945 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="73958945"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 73958945; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=73958945]").text(description); $(".js-view-count-work_73958945").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_73958945").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="73958945"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">19</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="300" rel="nofollow" href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a>,&nbsp;<script data-card-contents-for-ri="300" type="text/json">{"id":300,"name":"Mathematics","url":"https://www.academia.edu/Documents/in/Mathematics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="371" rel="nofollow" href="https://www.academia.edu/Documents/in/Functional_Analysis">Functional Analysis</a>,&nbsp;<script data-card-contents-for-ri="371" type="text/json">{"id":371,"name":"Functional Analysis","url":"https://www.academia.edu/Documents/in/Functional_Analysis?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a><script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=73958945]'), work: {"id":73958945,"title":"Quadratically constrained quadratic programs on acyclic graphs with application to power flow","created_at":"2022-03-17T09:44:33.361-07:00","url":"https://www.academia.edu/73958945/Quadratically_constrained_quadratic_programs_on_acyclic_graphs_with_application_to_power_flow?f_ri=5109","dom_id":"work_73958945","summary":"Abstract: This paper proves that non-convex quadratically constrained quadratic programs have an exact semidefinite relaxation when their underlying graph is acyclic, provided the constraint set satisfies a certain technical condition. When the condition is not satisfied, we propose a heuristic to obtain a feasible point starting from a solution of the relaxed problem. These methods are then demonstrated to provide exact solutions to a richer class of optimal power flow problems than previously solved.","downloadable_attachments":[{"id":82286880,"asset_id":73958945,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":125700311,"first_name":"deepak","last_name":"kumar","domain_name":"independent","page_name":"deepakkumar2736","display_name":"deepak kumar","profile_url":"https://independent.academia.edu/deepakkumar2736?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":300,"name":"Mathematics","url":"https://www.academia.edu/Documents/in/Mathematics?f_ri=5109","nofollow":true},{"id":371,"name":"Functional Analysis","url":"https://www.academia.edu/Documents/in/Functional_Analysis?f_ri=5109","nofollow":true},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true},{"id":3853,"name":"Optimization (Mathematics)","url":"https://www.academia.edu/Documents/in/Optimization_Mathematics_?f_ri=5109"},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":10924,"name":"Optimization techniques","url":"https://www.academia.edu/Documents/in/Optimization_techniques?f_ri=5109"},{"id":11598,"name":"Neural Networks","url":"https://www.academia.edu/Documents/in/Neural_Networks?f_ri=5109"},{"id":25896,"name":"Particle Swarm Optimization","url":"https://www.academia.edu/Documents/in/Particle_Swarm_Optimization?f_ri=5109"},{"id":43981,"name":"Optimization","url":"https://www.academia.edu/Documents/in/Optimization?f_ri=5109"},{"id":46254,"name":"Optimization Problem","url":"https://www.academia.edu/Documents/in/Optimization_Problem?f_ri=5109"},{"id":86041,"name":"Interior Point Methods","url":"https://www.academia.edu/Documents/in/Interior_Point_Methods?f_ri=5109"},{"id":254189,"name":"Kernel Regression","url":"https://www.academia.edu/Documents/in/Kernel_Regression?f_ri=5109"},{"id":389563,"name":"ICANN","url":"https://www.academia.edu/Documents/in/ICANN?f_ri=5109"},{"id":884835,"name":"Reproducing Kernel Hilbert Space","url":"https://www.academia.edu/Documents/in/Reproducing_Kernel_Hilbert_Space?f_ri=5109"},{"id":1189840,"name":"Index Terms-","url":"https://www.academia.edu/Documents/in/Index_Terms?f_ri=5109"},{"id":1460800,"name":"Quadratic Programming","url":"https://www.academia.edu/Documents/in/Quadratic_Programming?f_ri=5109"},{"id":1643503,"name":"Structural model","url":"https://www.academia.edu/Documents/in/Structural_model?f_ri=5109"},{"id":3323685,"name":"Regression estimator","url":"https://www.academia.edu/Documents/in/Regression_estimator?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_73490504" data-work_id="73490504" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/73490504/Ecosystem_mapping_at_the_African_continent_scale_using_a_hybrid_clustering_approach_based_on_1_km_resolution_multi_annual_data_from_SPOT_VEGETATION">Ecosystem mapping at the African continent scale using a hybrid clustering approach based on 1-km resolution multi-annual data from SPOT/VEGETATION</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">The goal of this study is to propose a new classification of African ecosystems based on an 8-year analysis of Normalized Difference Vegetation Index (NDVI) data sets from SPOT/VEGETATION. We develop two methods of classification. The... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_73490504" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">The goal of this study is to propose a new classification of African ecosystems based on an 8-year analysis of Normalized Difference Vegetation Index (NDVI) data sets from SPOT/VEGETATION. We develop two methods of classification. The first method is obtained from a k-nearest neighbour (k-NN) classifier, which represents a simple machine learning algorithm in pattern recognition. The second method is hybrid in that it combines k-NN clustering, hierarchical principles and the Fast Fourier Transform (FFT). The nomenclature of the two classifications relies on three levels of vegetation structural categories based on the Land Cover Classification System (LCCS). The two main outcomes are: (i) The delineation of the spatial distribution of ecosystems into five bioclimatic ecoregions at the African continental scale; (ii) Two ecosystem maps were made sequentially: an initial map with 92 ecosystems from the k-NN, plus a deduced hybrid classification with 73 classes, which better reflects the bio-geographical patterns. The inclusion of bioclimatic information and successive k-NN clustering elements helps to enhance the discrimination of ecosystems. Adopting this hybrid approach makes the ecosystem identification and labelling more flexible and more accurate in comparison to straightforward methods of classification. The validation of the hybrid classification, conducted by crossing-comparisons with validated continental maps, displayed a mapping accuracy of 54% to 61%.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/73490504" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="2c0b6fbf6ed72902c149b8158ce0c720" rel="nofollow" data-download="{&quot;attachment_id&quot;:81993605,&quot;asset_id&quot;:73490504,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/81993605/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="55704508" href="https://independent.academia.edu/JeanLouisRoujean">Jean-Louis Roujean</a><script data-card-contents-for-user="55704508" type="text/json">{"id":55704508,"first_name":"Jean-Louis","last_name":"Roujean","domain_name":"independent","page_name":"JeanLouisRoujean","display_name":"Jean-Louis Roujean","profile_url":"https://independent.academia.edu/JeanLouisRoujean?f_ri=5109","photo":"https://0.academia-photos.com/55704508/15913638/16424026/s65_jean-louis.roujean.jpg"}</script></span></span></li><li class="js-paper-rank-work_73490504 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="73490504"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 73490504, container: ".js-paper-rank-work_73490504", }); });</script></li><li class="js-percentile-work_73490504 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 73490504; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_73490504"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_73490504 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="73490504"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 73490504; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=73490504]").text(description); $(".js-view-count-work_73490504").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_73490504").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="73490504"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">20</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="280" rel="nofollow" href="https://www.academia.edu/Documents/in/Cartography">Cartography</a>,&nbsp;<script data-card-contents-for-ri="280" type="text/json">{"id":280,"name":"Cartography","url":"https://www.academia.edu/Documents/in/Cartography?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="465" rel="nofollow" href="https://www.academia.edu/Documents/in/Artificial_Intelligence">Artificial Intelligence</a>,&nbsp;<script data-card-contents-for-ri="465" type="text/json">{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a><script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=73490504]'), work: {"id":73490504,"title":"Ecosystem mapping at the African continent scale using a hybrid clustering approach based on 1-km resolution multi-annual data from SPOT/VEGETATION","created_at":"2022-03-10T11:46:35.973-08:00","url":"https://www.academia.edu/73490504/Ecosystem_mapping_at_the_African_continent_scale_using_a_hybrid_clustering_approach_based_on_1_km_resolution_multi_annual_data_from_SPOT_VEGETATION?f_ri=5109","dom_id":"work_73490504","summary":"The goal of this study is to propose a new classification of African ecosystems based on an 8-year analysis of Normalized Difference Vegetation Index (NDVI) data sets from SPOT/VEGETATION. We develop two methods of classification. The first method is obtained from a k-nearest neighbour (k-NN) classifier, which represents a simple machine learning algorithm in pattern recognition. The second method is hybrid in that it combines k-NN clustering, hierarchical principles and the Fast Fourier Transform (FFT). The nomenclature of the two classifications relies on three levels of vegetation structural categories based on the Land Cover Classification System (LCCS). The two main outcomes are: (i) The delineation of the spatial distribution of ecosystems into five bioclimatic ecoregions at the African continental scale; (ii) Two ecosystem maps were made sequentially: an initial map with 92 ecosystems from the k-NN, plus a deduced hybrid classification with 73 classes, which better reflects the bio-geographical patterns. The inclusion of bioclimatic information and successive k-NN clustering elements helps to enhance the discrimination of ecosystems. Adopting this hybrid approach makes the ecosystem identification and labelling more flexible and more accurate in comparison to straightforward methods of classification. The validation of the hybrid classification, conducted by crossing-comparisons with validated continental maps, displayed a mapping accuracy of 54% to 61%.","downloadable_attachments":[{"id":81993605,"asset_id":73490504,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":55704508,"first_name":"Jean-Louis","last_name":"Roujean","domain_name":"independent","page_name":"JeanLouisRoujean","display_name":"Jean-Louis Roujean","profile_url":"https://independent.academia.edu/JeanLouisRoujean?f_ri=5109","photo":"https://0.academia-photos.com/55704508/15913638/16424026/s65_jean-louis.roujean.jpg"}],"research_interests":[{"id":280,"name":"Cartography","url":"https://www.academia.edu/Documents/in/Cartography?f_ri=5109","nofollow":true},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence?f_ri=5109","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":53132,"name":"Analysis","url":"https://www.academia.edu/Documents/in/Analysis?f_ri=5109"},{"id":65805,"name":"Nomenclature","url":"https://www.academia.edu/Documents/in/Nomenclature?f_ri=5109"},{"id":79084,"name":"Hybrid","url":"https://www.academia.edu/Documents/in/Hybrid?f_ri=5109"},{"id":106145,"name":"Classification","url":"https://www.academia.edu/Documents/in/Classification?f_ri=5109"},{"id":106333,"name":"Ecosystems","url":"https://www.academia.edu/Documents/in/Ecosystems?f_ri=5109"},{"id":107083,"name":"Method","url":"https://www.academia.edu/Documents/in/Method?f_ri=5109"},{"id":108782,"name":"Data","url":"https://www.academia.edu/Documents/in/Data?f_ri=5109"},{"id":146693,"name":"Ndvi","url":"https://www.academia.edu/Documents/in/Ndvi?f_ri=5109"},{"id":162010,"name":"Geomatic Engineering","url":"https://www.academia.edu/Documents/in/Geomatic_Engineering?f_ri=5109"},{"id":377471,"name":"Hybrid Approach","url":"https://www.academia.edu/Documents/in/Hybrid_Approach?f_ri=5109"},{"id":588226,"name":"Fast Fourier Transform","url":"https://www.academia.edu/Documents/in/Fast_Fourier_Transform?f_ri=5109"},{"id":1028008,"name":"Classifier","url":"https://www.academia.edu/Documents/in/Classifier?f_ri=5109"},{"id":1288273,"name":"Fourier Transformation","url":"https://www.academia.edu/Documents/in/Fourier_Transformation?f_ri=5109"},{"id":1690923,"name":"K Nearest Neighbour","url":"https://www.academia.edu/Documents/in/K_Nearest_Neighbour?f_ri=5109"},{"id":2004933,"name":"Hybrid System","url":"https://www.academia.edu/Documents/in/Hybrid_System?f_ri=5109"},{"id":2184443,"name":"Normalized Difference Vegetation Index","url":"https://www.academia.edu/Documents/in/Normalized_Difference_Vegetation_Index?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_70779232" data-work_id="70779232" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/70779232/Pattern_Recognition_Using_Neural_Networks">Pattern Recognition Using Neural Networks</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Face Recognition has been identified as one of the attracting research areas and it has drawn the attention of many researchers due to its varying applications such as security systems, medical systems, entertainment, etc. Face... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_70779232" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Face Recognition has been identified as one of the attracting research areas and it has drawn the attention of many researchers due to its varying applications such as security systems, medical systems, entertainment, etc. Face recognition is the preferred mode of identification by humans: it is natural, robust and non-intrusive. A wide variety of systems requires reliable personal recognition schemes to either confirm or determine the identity of an individual requesting their services. The purpose of such schemes is to ensure that the rendered services are accessed only by a legitimate user and no one else. Examples of such applications include secure access to buildings, computer systems, laptops, cellular phones, and ATMs. In the absence of robust personal recognition schemes, these systems are vulnerable to the wiles of an impostor. In this paper we have developed and illustrated a recognition system for human faces using a novel Kohonen self-organizing map (SOM) or Self-Organi...</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/70779232" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="902496ced468ff8adb292c4769809a84" rel="nofollow" data-download="{&quot;attachment_id&quot;:80385182,&quot;asset_id&quot;:70779232,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/80385182/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="128935072" href="https://mitpune.academia.edu/DrShamlaMantri">Dr. Shamla Mantri</a><script data-card-contents-for-user="128935072" type="text/json">{"id":128935072,"first_name":"Dr. Shamla","last_name":"Mantri","domain_name":"mitpune","page_name":"DrShamlaMantri","display_name":"Dr. Shamla Mantri","profile_url":"https://mitpune.academia.edu/DrShamlaMantri?f_ri=5109","photo":"https://0.academia-photos.com/128935072/46085496/143497789/s65_dr._shamla.mantri.jpg"}</script></span></span></li><li class="js-paper-rank-work_70779232 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="70779232"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 70779232, container: ".js-paper-rank-work_70779232", }); });</script></li><li class="js-percentile-work_70779232 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 70779232; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_70779232"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_70779232 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="70779232"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 70779232; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=70779232]").text(description); $(".js-view-count-work_70779232").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_70779232").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="70779232"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">5</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="26066" rel="nofollow" href="https://www.academia.edu/Documents/in/Neural_Network">Neural Network</a>,&nbsp;<script data-card-contents-for-ri="26066" type="text/json">{"id":26066,"name":"Neural Network","url":"https://www.academia.edu/Documents/in/Neural_Network?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="503369" rel="nofollow" href="https://www.academia.edu/Documents/in/Principal_component_analysis_PCA_">Principal component analysis (PCA)</a><script data-card-contents-for-ri="503369" type="text/json">{"id":503369,"name":"Principal component analysis (PCA)","url":"https://www.academia.edu/Documents/in/Principal_component_analysis_PCA_?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=70779232]'), work: {"id":70779232,"title":"Pattern Recognition Using Neural Networks","created_at":"2022-02-06T19:57:23.787-08:00","url":"https://www.academia.edu/70779232/Pattern_Recognition_Using_Neural_Networks?f_ri=5109","dom_id":"work_70779232","summary":"Face Recognition has been identified as one of the attracting research areas and it has drawn the attention of many researchers due to its varying applications such as security systems, medical systems, entertainment, etc. Face recognition is the preferred mode of identification by humans: it is natural, robust and non-intrusive. A wide variety of systems requires reliable personal recognition schemes to either confirm or determine the identity of an individual requesting their services. The purpose of such schemes is to ensure that the rendered services are accessed only by a legitimate user and no one else. Examples of such applications include secure access to buildings, computer systems, laptops, cellular phones, and ATMs. In the absence of robust personal recognition schemes, these systems are vulnerable to the wiles of an impostor. In this paper we have developed and illustrated a recognition system for human faces using a novel Kohonen self-organizing map (SOM) or Self-Organi...","downloadable_attachments":[{"id":80385182,"asset_id":70779232,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":128935072,"first_name":"Dr. Shamla","last_name":"Mantri","domain_name":"mitpune","page_name":"DrShamlaMantri","display_name":"Dr. Shamla Mantri","profile_url":"https://mitpune.academia.edu/DrShamlaMantri?f_ri=5109","photo":"https://0.academia-photos.com/128935072/46085496/143497789/s65_dr._shamla.mantri.jpg"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":26066,"name":"Neural Network","url":"https://www.academia.edu/Documents/in/Neural_Network?f_ri=5109","nofollow":true},{"id":503369,"name":"Principal component analysis (PCA)","url":"https://www.academia.edu/Documents/in/Principal_component_analysis_PCA_?f_ri=5109","nofollow":true},{"id":1311460,"name":"Computer Science Information Technology","url":"https://www.academia.edu/Documents/in/Computer_Science_Information_Technology?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_65988426" data-work_id="65988426" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/65988426/Gabor_filter_based_edge_detection">Gabor filter-based edge detection</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">It is common practice to utilize evidence from biological and psychological vision experiments to develop computational models for low-level feature extraction. The receptive profiles of simple cells in mammalian visual systems have been... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_65988426" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">It is common practice to utilize evidence from biological and psychological vision experiments to develop computational models for low-level feature extraction. The receptive profiles of simple cells in mammalian visual systems have been found to closely resemble Gabor filters. ...</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/65988426" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="588ba2622e42e8130eaba47c0608e664" rel="nofollow" data-download="{&quot;attachment_id&quot;:77353027,&quot;asset_id&quot;:65988426,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/77353027/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="9617725" href="https://unt.academia.edu/KameshNamuduri">Kamesh Namuduri</a><script data-card-contents-for-user="9617725" type="text/json">{"id":9617725,"first_name":"Kamesh","last_name":"Namuduri","domain_name":"unt","page_name":"KameshNamuduri","display_name":"Kamesh Namuduri","profile_url":"https://unt.academia.edu/KameshNamuduri?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_65988426 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="65988426"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 65988426, container: ".js-paper-rank-work_65988426", }); });</script></li><li class="js-percentile-work_65988426 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 65988426; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_65988426"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_65988426 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="65988426"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 65988426; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=65988426]").text(description); $(".js-view-count-work_65988426").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_65988426").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="65988426"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">14</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="300" rel="nofollow" href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a>,&nbsp;<script data-card-contents-for-ri="300" type="text/json">{"id":300,"name":"Mathematics","url":"https://www.academia.edu/Documents/in/Mathematics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="854" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Vision">Computer Vision</a>,&nbsp;<script data-card-contents-for-ri="854" type="text/json">{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=65988426]'), work: {"id":65988426,"title":"Gabor filter-based edge detection","created_at":"2021-12-25T21:07:28.646-08:00","url":"https://www.academia.edu/65988426/Gabor_filter_based_edge_detection?f_ri=5109","dom_id":"work_65988426","summary":"It is common practice to utilize evidence from biological and psychological vision experiments to develop computational models for low-level feature extraction. The receptive profiles of simple cells in mammalian visual systems have been found to closely resemble Gabor filters. ...","downloadable_attachments":[{"id":77353027,"asset_id":65988426,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":9617725,"first_name":"Kamesh","last_name":"Namuduri","domain_name":"unt","page_name":"KameshNamuduri","display_name":"Kamesh Namuduri","profile_url":"https://unt.academia.edu/KameshNamuduri?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":300,"name":"Mathematics","url":"https://www.academia.edu/Documents/in/Mathematics?f_ri=5109","nofollow":true},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=5109","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":6177,"name":"Modeling","url":"https://www.academia.edu/Documents/in/Modeling?f_ri=5109"},{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=5109"},{"id":68431,"name":"Noise","url":"https://www.academia.edu/Documents/in/Noise?f_ri=5109"},{"id":81788,"name":"Edge Detection","url":"https://www.academia.edu/Documents/in/Edge_Detection?f_ri=5109"},{"id":93217,"name":"Segmentation","url":"https://www.academia.edu/Documents/in/Segmentation?f_ri=5109"},{"id":178021,"name":"Filter","url":"https://www.academia.edu/Documents/in/Filter?f_ri=5109"},{"id":329007,"name":"Filter Design","url":"https://www.academia.edu/Documents/in/Filter_Design?f_ri=5109"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=5109"},{"id":1569002,"name":"Gabor Filter","url":"https://www.academia.edu/Documents/in/Gabor_Filter?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_65211614" data-work_id="65211614" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/65211614/Table_structure_understanding_and_its_performance_evaluation">Table structure understanding and its performance evaluation</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">This paper presents a table structure understanding algorithm designed using optimization methods. The algorithm is probability based, where the probabilities are estimated from geometric measurements made on the various entities in a... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_65211614" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">This paper presents a table structure understanding algorithm designed using optimization methods. The algorithm is probability based, where the probabilities are estimated from geometric measurements made on the various entities in a large training set. The methodology includes a global parameter optimization scheme, a novel automatic table ground truth generation system and a table structure understanding performance evaluation protocol. With a document data set having 518 table and 10,934 cell entities, it performed at the 96.76% accuracy rate on the cell level and 98.32% accuracy rate on the table level.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/65211614" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="c4cd786a1fc540a418e679b571ad8fba" rel="nofollow" data-download="{&quot;attachment_id&quot;:76911045,&quot;asset_id&quot;:65211614,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/76911045/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="95570532" href="https://gc-cuny.academia.edu/haralickrobert">robert haralick</a><script data-card-contents-for-user="95570532" type="text/json">{"id":95570532,"first_name":"robert","last_name":"haralick","domain_name":"gc-cuny","page_name":"haralickrobert","display_name":"robert haralick","profile_url":"https://gc-cuny.academia.edu/haralickrobert?f_ri=5109","photo":"https://0.academia-photos.com/95570532/24941803/23763121/s65_robert.haralick.jpg"}</script></span></span></li><li class="js-paper-rank-work_65211614 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="65211614"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 65211614, container: ".js-paper-rank-work_65211614", }); });</script></li><li class="js-percentile-work_65211614 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 65211614; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_65211614"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_65211614 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="65211614"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 65211614; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=65211614]").text(description); $(".js-view-count-work_65211614").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_65211614").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="65211614"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">11</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="43981" rel="nofollow" href="https://www.academia.edu/Documents/in/Optimization">Optimization</a>,&nbsp;<script data-card-contents-for-ri="43981" type="text/json">{"id":43981,"name":"Optimization","url":"https://www.academia.edu/Documents/in/Optimization?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="55641" rel="nofollow" href="https://www.academia.edu/Documents/in/Performance_Evaluation">Performance Evaluation</a><script data-card-contents-for-ri="55641" type="text/json">{"id":55641,"name":"Performance Evaluation","url":"https://www.academia.edu/Documents/in/Performance_Evaluation?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=65211614]'), work: {"id":65211614,"title":"Table structure understanding and its performance evaluation","created_at":"2021-12-20T11:07:08.281-08:00","url":"https://www.academia.edu/65211614/Table_structure_understanding_and_its_performance_evaluation?f_ri=5109","dom_id":"work_65211614","summary":"This paper presents a table structure understanding algorithm designed using optimization methods. The algorithm is probability based, where the probabilities are estimated from geometric measurements made on the various entities in a large training set. The methodology includes a global parameter optimization scheme, a novel automatic table ground truth generation system and a table structure understanding performance evaluation protocol. With a document data set having 518 table and 10,934 cell entities, it performed at the 96.76% accuracy rate on the cell level and 98.32% accuracy rate on the table level.","downloadable_attachments":[{"id":76911045,"asset_id":65211614,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":95570532,"first_name":"robert","last_name":"haralick","domain_name":"gc-cuny","page_name":"haralickrobert","display_name":"robert haralick","profile_url":"https://gc-cuny.academia.edu/haralickrobert?f_ri=5109","photo":"https://0.academia-photos.com/95570532/24941803/23763121/s65_robert.haralick.jpg"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":43981,"name":"Optimization","url":"https://www.academia.edu/Documents/in/Optimization?f_ri=5109","nofollow":true},{"id":55641,"name":"Performance Evaluation","url":"https://www.academia.edu/Documents/in/Performance_Evaluation?f_ri=5109","nofollow":true},{"id":116787,"name":"Algorithm Design","url":"https://www.academia.edu/Documents/in/Algorithm_Design?f_ri=5109"},{"id":143780,"name":"Document Image Analysis","url":"https://www.academia.edu/Documents/in/Document_Image_Analysis?f_ri=5109"},{"id":774112,"name":"Non parametric statistics","url":"https://www.academia.edu/Documents/in/Non_parametric_statistics?f_ri=5109"},{"id":822358,"name":"Ground Truth","url":"https://www.academia.edu/Documents/in/Ground_Truth?f_ri=5109"},{"id":862300,"name":"Parameter Optimization","url":"https://www.academia.edu/Documents/in/Parameter_Optimization?f_ri=5109"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=5109"},{"id":1935794,"name":"Document Layout Analysis","url":"https://www.academia.edu/Documents/in/Document_Layout_Analysis?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_63437752" data-work_id="63437752" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/63437752/Knowledge_base_expert_system_for_tuning_PID_controllers_using_wireless_technology">Knowledge base expert system for tuning PID controllers using wireless technology</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">PID controllers are widely used to control the industrial plants because of their robustness and simple structures. Tuning of the controller&#39;s parameters to get a desired response is difficult and time consuming. The main purpose of this... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_63437752" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">PID controllers are widely used to control the industrial plants because of their robustness and simple structures. Tuning of the controller&#39;s parameters to get a desired response is difficult and time consuming. The main purpose of this paper is to sue wireless technology and design a Knowledge Based expert system to tune the parameters of the PID controller without using any mathematical model of the plant. The designed expert system uses pattern recognition techniques to tune the parameters of the controller, as a human expert does. The expert system uses input output to and from the plant under control, using wireless technology. The expert system uses a number of tuning rules to change the parameters of the controller based on the shape of the closed-loop step response to get a desired performance, which has been set by the control engineer. The developed expert system is able to tune the parameters of the controller for fixed, time-varying, time-delayed and noisy plants. The performance of the expert system has been examined using a laboratory ri. .</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/63437752" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="51c6762ab78e1fae67f2cf006ef75a3a" rel="nofollow" data-download="{&quot;attachment_id&quot;:75868376,&quot;asset_id&quot;:63437752,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/75868376/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="156618786" href="https://independent.academia.edu/KamranKhakpour">Kamran Khakpour</a><script data-card-contents-for-user="156618786" type="text/json">{"id":156618786,"first_name":"Kamran","last_name":"Khakpour","domain_name":"independent","page_name":"KamranKhakpour","display_name":"Kamran Khakpour","profile_url":"https://independent.academia.edu/KamranKhakpour?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_63437752 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="63437752"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 63437752, container: ".js-paper-rank-work_63437752", }); });</script></li><li class="js-percentile-work_63437752 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 63437752; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_63437752"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_63437752 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="63437752"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 63437752; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=63437752]").text(description); $(".js-view-count-work_63437752").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_63437752").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="63437752"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">14</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="470" rel="nofollow" href="https://www.academia.edu/Documents/in/Expert_Systems">Expert Systems</a>,&nbsp;<script data-card-contents-for-ri="470" type="text/json">{"id":470,"name":"Expert Systems","url":"https://www.academia.edu/Documents/in/Expert_Systems?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="4202" rel="nofollow" href="https://www.academia.edu/Documents/in/Control_Engineering">Control Engineering</a>,&nbsp;<script data-card-contents-for-ri="4202" type="text/json">{"id":4202,"name":"Control Engineering","url":"https://www.academia.edu/Documents/in/Control_Engineering?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="15344" rel="nofollow" href="https://www.academia.edu/Documents/in/Robust_control">Robust control</a><script data-card-contents-for-ri="15344" type="text/json">{"id":15344,"name":"Robust control","url":"https://www.academia.edu/Documents/in/Robust_control?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=63437752]'), work: {"id":63437752,"title":"Knowledge base expert system for tuning PID controllers using wireless technology","created_at":"2021-12-07T04:22:49.757-08:00","url":"https://www.academia.edu/63437752/Knowledge_base_expert_system_for_tuning_PID_controllers_using_wireless_technology?f_ri=5109","dom_id":"work_63437752","summary":"PID controllers are widely used to control the industrial plants because of their robustness and simple structures. Tuning of the controller's parameters to get a desired response is difficult and time consuming. The main purpose of this paper is to sue wireless technology and design a Knowledge Based expert system to tune the parameters of the PID controller without using any mathematical model of the plant. The designed expert system uses pattern recognition techniques to tune the parameters of the controller, as a human expert does. The expert system uses input output to and from the plant under control, using wireless technology. The expert system uses a number of tuning rules to change the parameters of the controller based on the shape of the closed-loop step response to get a desired performance, which has been set by the control engineer. The developed expert system is able to tune the parameters of the controller for fixed, time-varying, time-delayed and noisy plants. The performance of the expert system has been examined using a laboratory ri. .","downloadable_attachments":[{"id":75868376,"asset_id":63437752,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":156618786,"first_name":"Kamran","last_name":"Khakpour","domain_name":"independent","page_name":"KamranKhakpour","display_name":"Kamran Khakpour","profile_url":"https://independent.academia.edu/KamranKhakpour?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":470,"name":"Expert Systems","url":"https://www.academia.edu/Documents/in/Expert_Systems?f_ri=5109","nofollow":true},{"id":4202,"name":"Control Engineering","url":"https://www.academia.edu/Documents/in/Control_Engineering?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":15344,"name":"Robust control","url":"https://www.academia.edu/Documents/in/Robust_control?f_ri=5109","nofollow":true},{"id":30596,"name":"Control Systems","url":"https://www.academia.edu/Documents/in/Control_Systems?f_ri=5109"},{"id":85507,"name":"Wireless Technology","url":"https://www.academia.edu/Documents/in/Wireless_Technology?f_ri=5109"},{"id":86034,"name":"Mathematical Analysis","url":"https://www.academia.edu/Documents/in/Mathematical_Analysis?f_ri=5109"},{"id":121337,"name":"expert System","url":"https://www.academia.edu/Documents/in/expert_System?f_ri=5109"},{"id":132592,"name":"Time Delay","url":"https://www.academia.edu/Documents/in/Time_Delay?f_ri=5109"},{"id":246163,"name":"Knowledge base","url":"https://www.academia.edu/Documents/in/Knowledge_base?f_ri=5109"},{"id":249165,"name":"Pid Controller","url":"https://www.academia.edu/Documents/in/Pid_Controller?f_ri=5109"},{"id":291387,"name":"Mathematical Model","url":"https://www.academia.edu/Documents/in/Mathematical_Model?f_ri=5109"},{"id":687700,"name":"computer Communication","url":"https://www.academia.edu/Documents/in/computer_Communication?f_ri=5109"},{"id":780669,"name":"Input Output","url":"https://www.academia.edu/Documents/in/Input_Output?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_63229093" data-work_id="63229093" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/63229093/Content_based_image_and_video_retrieval_using_embedded_text">Content based image and video retrieval using embedded text</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Extraction of text from image and video is an important step in building efficient indexing and retrieval systems for multimedia databases. We adopt a hybrid approach for such text extraction by exploiting a number of characteristics of... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_63229093" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Extraction of text from image and video is an important step in building efficient indexing and retrieval systems for multimedia databases. We adopt a hybrid approach for such text extraction by exploiting a number of characteristics of text blocks in color images and video frames. Our system detects both caption text as well as scene text of different font, size, color and intensity. We have developed an application for on-line extraction and recognition of texts from videos. Such texts are used for retrieval of video clips based on any given keyword. The application is available on the web for the readers to repeat our experiments and also to try text extraction and retrieval from their own videos.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/63229093" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="e8c479718194d1c012f353280970ed4d" rel="nofollow" data-download="{&quot;attachment_id&quot;:75724302,&quot;asset_id&quot;:63229093,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/75724302/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="51051523" href="https://independent.academia.edu/ChinmayaMisra">Chinmaya Misra</a><script data-card-contents-for-user="51051523" type="text/json">{"id":51051523,"first_name":"Chinmaya","last_name":"Misra","domain_name":"independent","page_name":"ChinmayaMisra","display_name":"Chinmaya Misra","profile_url":"https://independent.academia.edu/ChinmayaMisra?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_63229093 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="63229093"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 63229093, container: ".js-paper-rank-work_63229093", }); });</script></li><li class="js-percentile-work_63229093 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 63229093; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_63229093"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_63229093 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="63229093"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 63229093; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=63229093]").text(description); $(".js-view-count-work_63229093").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_63229093").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="63229093"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">16</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="464" rel="nofollow" href="https://www.academia.edu/Documents/in/Information_Retrieval">Information Retrieval</a>,&nbsp;<script data-card-contents-for-ri="464" type="text/json">{"id":464,"name":"Information Retrieval","url":"https://www.academia.edu/Documents/in/Information_Retrieval?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="854" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Vision">Computer Vision</a>,&nbsp;<script data-card-contents-for-ri="854" type="text/json">{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a><script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=63229093]'), work: {"id":63229093,"title":"Content based image and video retrieval using embedded text","created_at":"2021-12-04T21:22:13.704-08:00","url":"https://www.academia.edu/63229093/Content_based_image_and_video_retrieval_using_embedded_text?f_ri=5109","dom_id":"work_63229093","summary":"Extraction of text from image and video is an important step in building efficient indexing and retrieval systems for multimedia databases. We adopt a hybrid approach for such text extraction by exploiting a number of characteristics of text blocks in color images and video frames. Our system detects both caption text as well as scene text of different font, size, color and intensity. We have developed an application for on-line extraction and recognition of texts from videos. Such texts are used for retrieval of video clips based on any given keyword. The application is available on the web for the readers to repeat our experiments and also to try text extraction and retrieval from their own videos.","downloadable_attachments":[{"id":75724302,"asset_id":63229093,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":51051523,"first_name":"Chinmaya","last_name":"Misra","domain_name":"independent","page_name":"ChinmayaMisra","display_name":"Chinmaya Misra","profile_url":"https://independent.academia.edu/ChinmayaMisra?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":464,"name":"Information Retrieval","url":"https://www.academia.edu/Documents/in/Information_Retrieval?f_ri=5109","nofollow":true},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=5109","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":6426,"name":"Content Analysis","url":"https://www.academia.edu/Documents/in/Content_Analysis?f_ri=5109"},{"id":70731,"name":"Multimedia Database","url":"https://www.academia.edu/Documents/in/Multimedia_Database?f_ri=5109"},{"id":77936,"name":"Text","url":"https://www.academia.edu/Documents/in/Text?f_ri=5109"},{"id":88876,"name":"Image Retrieval","url":"https://www.academia.edu/Documents/in/Image_Retrieval?f_ri=5109"},{"id":141114,"name":"World Wide Web","url":"https://www.academia.edu/Documents/in/World_Wide_Web?f_ri=5109"},{"id":158329,"name":"Indexing","url":"https://www.academia.edu/Documents/in/Indexing?f_ri=5109"},{"id":171975,"name":"Video Retrieval","url":"https://www.academia.edu/Documents/in/Video_Retrieval?f_ri=5109"},{"id":187345,"name":"Color Image","url":"https://www.academia.edu/Documents/in/Color_Image?f_ri=5109"},{"id":377471,"name":"Hybrid Approach","url":"https://www.academia.edu/Documents/in/Hybrid_Approach?f_ri=5109"},{"id":749302,"name":"Indexation","url":"https://www.academia.edu/Documents/in/Indexation?f_ri=5109"},{"id":955461,"name":"Text Extraction","url":"https://www.academia.edu/Documents/in/Text_Extraction?f_ri=5109"},{"id":1191356,"name":"Internet","url":"https://www.academia.edu/Documents/in/Internet?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_54843655" data-work_id="54843655" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/54843655/GCHL_A_grid_clustering_algorithm_for_high_dimensional_very_large_spatial_data_bases">GCHL: A grid-clustering algorithm for high-dimensional very large spatial data bases</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Spatial clustering, which groups similar spatial objects into classes, is an important component of spatial data mining [Han and Kamber, Data Mining: Concepts and Techniques, 2000]. Due to its immense applications in various areas,... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_54843655" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Spatial clustering, which groups similar spatial objects into classes, is an important component of spatial data mining [Han and Kamber, Data Mining: Concepts and Techniques, 2000]. Due to its immense applications in various areas, spatial clustering has been highly active topic in data mining researches, with fruitful, scalable clustering methods developed recently. These spatial clustering methods can be classified into four categories: partitioning method, hierarchical method, density-based method and grid-based method. Clustering large data sets of high dimensionality has always been a serious challenge for clustering algorithms. Many recently developed clustering algorithms have attempted to address either handling data with very large number of records or data sets with very high number of dimensions. This new clustering method GCHL (a Grid-Clustering algorithm for High-dimensional very Large spatial databases) combines a novel density-grid based clustering with axis-parallel partitioning strategy to identify areas of high density in the input data space. The algorithm work as well in the feature space of any data set. The method operates on a limited memory buffer and requires at most a single scan through the data. We demonstrate the high quality of the obtained clustering solutions, capability of discovering concave/deeper and convex/higher regions, their robustness to outlier and noise, and GCHL excellent scalability.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/54843655" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="4e8cf711845dda01a41ae1834f1e0a88" rel="nofollow" data-download="{&quot;attachment_id&quot;:71009460,&quot;asset_id&quot;:54843655,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/71009460/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="44452786" href="https://independent.academia.edu/HPilevar">Hamid Pilevar</a><script data-card-contents-for-user="44452786" type="text/json">{"id":44452786,"first_name":"Hamid","last_name":"Pilevar","domain_name":"independent","page_name":"HPilevar","display_name":"Hamid Pilevar","profile_url":"https://independent.academia.edu/HPilevar?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_54843655 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="54843655"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 54843655, container: ".js-paper-rank-work_54843655", }); });</script></li><li class="js-percentile-work_54843655 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 54843655; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_54843655"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_54843655 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="54843655"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 54843655; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=54843655]").text(description); $(".js-view-count-work_54843655").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_54843655").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="54843655"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">15</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="237" rel="nofollow" href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a>,&nbsp;<script data-card-contents-for-ri="237" type="text/json">{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2009" rel="nofollow" href="https://www.academia.edu/Documents/in/Data_Mining">Data Mining</a>,&nbsp;<script data-card-contents-for-ri="2009" type="text/json">{"id":2009,"name":"Data Mining","url":"https://www.academia.edu/Documents/in/Data_Mining?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="26817" rel="nofollow" href="https://www.academia.edu/Documents/in/Algorithm">Algorithm</a><script data-card-contents-for-ri="26817" type="text/json">{"id":26817,"name":"Algorithm","url":"https://www.academia.edu/Documents/in/Algorithm?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=54843655]'), work: {"id":54843655,"title":"GCHL: A grid-clustering algorithm for high-dimensional very large spatial data bases","created_at":"2021-10-02T07:20:00.149-07:00","url":"https://www.academia.edu/54843655/GCHL_A_grid_clustering_algorithm_for_high_dimensional_very_large_spatial_data_bases?f_ri=5109","dom_id":"work_54843655","summary":"Spatial clustering, which groups similar spatial objects into classes, is an important component of spatial data mining [Han and Kamber, Data Mining: Concepts and Techniques, 2000]. Due to its immense applications in various areas, spatial clustering has been highly active topic in data mining researches, with fruitful, scalable clustering methods developed recently. These spatial clustering methods can be classified into four categories: partitioning method, hierarchical method, density-based method and grid-based method. Clustering large data sets of high dimensionality has always been a serious challenge for clustering algorithms. Many recently developed clustering algorithms have attempted to address either handling data with very large number of records or data sets with very high number of dimensions. This new clustering method GCHL (a Grid-Clustering algorithm for High-dimensional very Large spatial databases) combines a novel density-grid based clustering with axis-parallel partitioning strategy to identify areas of high density in the input data space. The algorithm work as well in the feature space of any data set. The method operates on a limited memory buffer and requires at most a single scan through the data. We demonstrate the high quality of the obtained clustering solutions, capability of discovering concave/deeper and convex/higher regions, their robustness to outlier and noise, and GCHL excellent scalability.","downloadable_attachments":[{"id":71009460,"asset_id":54843655,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":44452786,"first_name":"Hamid","last_name":"Pilevar","domain_name":"independent","page_name":"HPilevar","display_name":"Hamid Pilevar","profile_url":"https://independent.academia.edu/HPilevar?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science?f_ri=5109","nofollow":true},{"id":2009,"name":"Data Mining","url":"https://www.academia.edu/Documents/in/Data_Mining?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":26817,"name":"Algorithm","url":"https://www.academia.edu/Documents/in/Algorithm?f_ri=5109","nofollow":true},{"id":68381,"name":"Spatial data mining","url":"https://www.academia.edu/Documents/in/Spatial_data_mining?f_ri=5109"},{"id":84990,"name":"Clustering","url":"https://www.academia.edu/Documents/in/Clustering?f_ri=5109"},{"id":327126,"name":"Clustering Method","url":"https://www.academia.edu/Documents/in/Clustering_Method?f_ri=5109"},{"id":521483,"name":"Large Data Sets","url":"https://www.academia.edu/Documents/in/Large_Data_Sets?f_ri=5109"},{"id":533468,"name":"Spatial Data","url":"https://www.academia.edu/Documents/in/Spatial_Data?f_ri=5109"},{"id":543524,"name":"Spatial Clustering","url":"https://www.academia.edu/Documents/in/Spatial_Clustering?f_ri=5109"},{"id":557801,"name":"High Dimensionality","url":"https://www.academia.edu/Documents/in/High_Dimensionality?f_ri=5109"},{"id":582380,"name":"Spatial Database","url":"https://www.academia.edu/Documents/in/Spatial_Database?f_ri=5109"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=5109"},{"id":1555442,"name":"Feature Space","url":"https://www.academia.edu/Documents/in/Feature_Space?f_ri=5109"},{"id":3443535,"name":"High density","url":"https://www.academia.edu/Documents/in/High_density?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_43727059" data-work_id="43727059" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/43727059/Implementation_of_deep_neural_networks_DNN_with_batch_normalization_for_batik_pattern_recognition">Implementation of deep neural networks (DNN) with batch normalization for batik pattern recognition</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">One of the most famous cultural heritages in Indonesia is batik. Batik is a specially made drawing cloth by writing Malam (wax) on the cloth, then processed in a certain way. The diversity of motifs both in Indonesia and the allied... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_43727059" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">One of the most famous cultural heritages in Indonesia is batik. Batik is a specially made drawing cloth by writing Malam (wax) on the cloth, then processed in a certain way. The diversity of motifs both in Indonesia and the allied countries raises new research topics in the field of information technology, both for conservation, storage, publication and the creation of new batik motifs. In computer science research area, studies about Batik pattern have been done by researchers and some algorithms have been successfully applied in Batik pattern recognition. This study was focused on Batik motif recognition using texture fusion feature which is Gabor, Log-Gabor, and GLCM; and using PCA feature reduction to improve the classification accuracy and reduce the computational time. To improve the accuracy, we proposed a Deep Neural Network model to recognise batik pattern and used batch normalisation as a regularises to generalise the model and to reduce time complexity. From the experiments, the feature extraction, selection, and reduction gave better accuracy than the raw dataset. The feature selection and reduction also reduce time complexity. The DNN+BN significantly improve the accuracy of the classification model from 65.36% to 83.15%. BN as a regularization has successfully made the model more general, hence improve the accuracy of the model. The parameters tuning also improved accuracy from 83.15% to 85.57%.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/43727059" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="de5572e3123a9f30d5071fc13df0196e" rel="nofollow" data-download="{&quot;attachment_id&quot;:64036015,&quot;asset_id&quot;:43727059,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/64036015/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="163474776" href="https://independent.academia.edu/JournalIJECE">International Journal of Electrical and Computer Engineering (IJECE)</a><script data-card-contents-for-user="163474776" type="text/json">{"id":163474776,"first_name":"International Journal of Electrical and Computer Engineering","last_name":"(IJECE)","domain_name":"independent","page_name":"JournalIJECE","display_name":"International Journal of Electrical and Computer Engineering (IJECE)","profile_url":"https://independent.academia.edu/JournalIJECE?f_ri=5109","photo":"https://0.academia-photos.com/163474776/123357473/112705609/s65_international_journal_of_electrical_and_computer_engineering._ijece_.jpg"}</script></span></span></li><li class="js-paper-rank-work_43727059 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="43727059"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 43727059, container: ".js-paper-rank-work_43727059", }); });</script></li><li class="js-percentile-work_43727059 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 43727059; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_43727059"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_43727059 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="43727059"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 43727059; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=43727059]").text(description); $(".js-view-count-work_43727059").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_43727059").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="43727059"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">4</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="81182" rel="nofollow" href="https://www.academia.edu/Documents/in/Deep_Learning">Deep Learning</a>,&nbsp;<script data-card-contents-for-ri="81182" type="text/json">{"id":81182,"name":"Deep Learning","url":"https://www.academia.edu/Documents/in/Deep_Learning?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="890015" rel="nofollow" href="https://www.academia.edu/Documents/in/Deep_Neural_Networks">Deep Neural Networks</a>,&nbsp;<script data-card-contents-for-ri="890015" type="text/json">{"id":890015,"name":"Deep Neural Networks","url":"https://www.academia.edu/Documents/in/Deep_Neural_Networks?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="3724784" rel="nofollow" href="https://www.academia.edu/Documents/in/Batch_normalization">Batch normalization </a><script data-card-contents-for-ri="3724784" type="text/json">{"id":3724784,"name":"Batch normalization ","url":"https://www.academia.edu/Documents/in/Batch_normalization?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=43727059]'), work: {"id":43727059,"title":"Implementation of deep neural networks (DNN) with batch normalization for batik pattern recognition","created_at":"2020-07-28T00:08:12.135-07:00","url":"https://www.academia.edu/43727059/Implementation_of_deep_neural_networks_DNN_with_batch_normalization_for_batik_pattern_recognition?f_ri=5109","dom_id":"work_43727059","summary":"One of the most famous cultural heritages in Indonesia is batik. Batik is a specially made drawing cloth by writing Malam (wax) on the cloth, then processed in a certain way. The diversity of motifs both in Indonesia and the allied countries raises new research topics in the field of information technology, both for conservation, storage, publication and the creation of new batik motifs. In computer science research area, studies about Batik pattern have been done by researchers and some algorithms have been successfully applied in Batik pattern recognition. This study was focused on Batik motif recognition using texture fusion feature which is Gabor, Log-Gabor, and GLCM; and using PCA feature reduction to improve the classification accuracy and reduce the computational time. To improve the accuracy, we proposed a Deep Neural Network model to recognise batik pattern and used batch normalisation as a regularises to generalise the model and to reduce time complexity. From the experiments, the feature extraction, selection, and reduction gave better accuracy than the raw dataset. The feature selection and reduction also reduce time complexity. The DNN+BN significantly improve the accuracy of the classification model from 65.36% to 83.15%. BN as a regularization has successfully made the model more general, hence improve the accuracy of the model. The parameters tuning also improved accuracy from 83.15% to 85.57%.","downloadable_attachments":[{"id":64036015,"asset_id":43727059,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":163474776,"first_name":"International Journal of Electrical and Computer Engineering","last_name":"(IJECE)","domain_name":"independent","page_name":"JournalIJECE","display_name":"International Journal of Electrical and Computer Engineering (IJECE)","profile_url":"https://independent.academia.edu/JournalIJECE?f_ri=5109","photo":"https://0.academia-photos.com/163474776/123357473/112705609/s65_international_journal_of_electrical_and_computer_engineering._ijece_.jpg"}],"research_interests":[{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":81182,"name":"Deep Learning","url":"https://www.academia.edu/Documents/in/Deep_Learning?f_ri=5109","nofollow":true},{"id":890015,"name":"Deep Neural Networks","url":"https://www.academia.edu/Documents/in/Deep_Neural_Networks?f_ri=5109","nofollow":true},{"id":3724784,"name":"Batch normalization ","url":"https://www.academia.edu/Documents/in/Batch_normalization?f_ri=5109","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_36751114" data-work_id="36751114" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/36751114/Network_Based_Prediction_of_Drug_Drug_Interactions">Network Based Prediction of Drug-Drug Interactions</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Drug-drug interactions (DDIs) are responsible for many serious adverse events; their detection is crucial for patient safety but also very challenging. In recent years, several drugs have been withdrawn from the market due to interaction... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_36751114" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Drug-drug interactions (DDIs) are responsible for many serious adverse events; their detection is crucial for patient safety but also very challenging. In recent years, several drugs have been withdrawn from the market due to interaction related Adverse Events (AEs). Currently, the US Food and Drug Administration (FDA) and pharmaceutical companies are showing great interest in the development of improved tools for identifying DDIs.<br /><br />We describe a predictive model, applicable on a large scale to predict novel DDIs based on similarity of drug interaction candidates to drugs involved in established DDIs. The underlying assumption is that if drug A and drug B interact to produce a specific biological effect, then drugs similar to drug A (or drug B) are likely to interact with drug B (or drug A) to produce the same effect. We constructed a 352 drug DDI network from a 2011 snapshot of a widely-used drug safety database, which contains 3 700 established DDIs, and used it to develop the proposed model for predicting future DDIs. The target similarity for all selected pairs of drugs in DrugBank was computed to identify DDI candidates. The proposed model mainly follows two distinct approaches: the first one is ‘Which forces the preservation of existing (known) DDIs’ and the other one is ‘without forced to preserve existing DDIs.’ Predictions were made under each of these approaches using three different techniques: target similarity score, side effect similarity (P-score) and resulting score.<br /><br />The methodology was evaluated using Drugbank 2014 snapshot as a gold standard for the same set of drugs. The proposed model generates novel DDIs with an average accuracy of 95% for force to preserve existing (known) DDIs. Average accuracy for without forced to preserve existing DDIs is 92%. These two approaches also give average AUC (Area Under the Curve) of 0.9834 and 0.8651 respectively.<br /><br />The results presented in this study demonstrate the usefulness of the proposed network based drug-drug interaction methodology as a promising approach. The method described in this article is very simple, efficient, and biologically sound.<br /><br />Keywords— Drug-drug Interactions, Adverse Events, Target similarity score, P-score.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/36751114" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="24fc0146a7f0a01796f7ac39fc2ae5e8" rel="nofollow" data-download="{&quot;attachment_id&quot;:56696592,&quot;asset_id&quot;:36751114,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/56696592/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="7100292" href="https://cmb.academia.edu/DinushaGunawardena">Sohan Gunawardena</a><script data-card-contents-for-user="7100292" type="text/json">{"id":7100292,"first_name":"Sohan","last_name":"Gunawardena","domain_name":"cmb","page_name":"DinushaGunawardena","display_name":"Sohan Gunawardena","profile_url":"https://cmb.academia.edu/DinushaGunawardena?f_ri=5109","photo":"https://0.academia-photos.com/7100292/19787515/124126343/s65_sohan.gunawardena.jpg"}</script></span></span></li><li class="js-paper-rank-work_36751114 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="36751114"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 36751114, container: ".js-paper-rank-work_36751114", }); });</script></li><li class="js-percentile-work_36751114 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 36751114; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_36751114"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_36751114 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="36751114"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 36751114; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=36751114]").text(description); $(".js-view-count-work_36751114").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_36751114").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="36751114"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">5</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="146" rel="nofollow" href="https://www.academia.edu/Documents/in/Bioinformatics">Bioinformatics</a>,&nbsp;<script data-card-contents-for-ri="146" type="text/json">{"id":146,"name":"Bioinformatics","url":"https://www.academia.edu/Documents/in/Bioinformatics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="4233" rel="nofollow" href="https://www.academia.edu/Documents/in/Computational_Biology">Computational Biology</a>,&nbsp;<script data-card-contents-for-ri="4233" type="text/json">{"id":4233,"name":"Computational Biology","url":"https://www.academia.edu/Documents/in/Computational_Biology?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a><script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=36751114]'), work: {"id":36751114,"title":"Network Based Prediction of Drug-Drug Interactions","created_at":"2018-05-30T21:24:18.778-07:00","url":"https://www.academia.edu/36751114/Network_Based_Prediction_of_Drug_Drug_Interactions?f_ri=5109","dom_id":"work_36751114","summary":"Drug-drug interactions (DDIs) are responsible for many serious adverse events; their detection is crucial for patient safety but also very challenging. In recent years, several drugs have been withdrawn from the market due to interaction related Adverse Events (AEs). Currently, the US Food and Drug Administration (FDA) and pharmaceutical companies are showing great interest in the development of improved tools for identifying DDIs.\n\nWe describe a predictive model, applicable on a large scale to predict novel DDIs based on similarity of drug interaction candidates to drugs involved in established DDIs. The underlying assumption is that if drug A and drug B interact to produce a specific biological effect, then drugs similar to drug A (or drug B) are likely to interact with drug B (or drug A) to produce the same effect. We constructed a 352 drug DDI network from a 2011 snapshot of a widely-used drug safety database, which contains 3 700 established DDIs, and used it to develop the proposed model for predicting future DDIs. The target similarity for all selected pairs of drugs in DrugBank was computed to identify DDI candidates. The proposed model mainly follows two distinct approaches: the first one is ‘Which forces the preservation of existing (known) DDIs’ and the other one is ‘without forced to preserve existing DDIs.’ Predictions were made under each of these approaches using three different techniques: target similarity score, side effect similarity (P-score) and resulting score.\n\nThe methodology was evaluated using Drugbank 2014 snapshot as a gold standard for the same set of drugs. The proposed model generates novel DDIs with an average accuracy of 95% for force to preserve existing (known) DDIs. Average accuracy for without forced to preserve existing DDIs is 92%. These two approaches also give average AUC (Area Under the Curve) of 0.9834 and 0.8651 respectively.\n\nThe results presented in this study demonstrate the usefulness of the proposed network based drug-drug interaction methodology as a promising approach. The method described in this article is very simple, efficient, and biologically sound.\n\nKeywords— Drug-drug Interactions, Adverse Events, Target similarity score, P-score.\n","downloadable_attachments":[{"id":56696592,"asset_id":36751114,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":7100292,"first_name":"Sohan","last_name":"Gunawardena","domain_name":"cmb","page_name":"DinushaGunawardena","display_name":"Sohan Gunawardena","profile_url":"https://cmb.academia.edu/DinushaGunawardena?f_ri=5109","photo":"https://0.academia-photos.com/7100292/19787515/124126343/s65_sohan.gunawardena.jpg"}],"research_interests":[{"id":146,"name":"Bioinformatics","url":"https://www.academia.edu/Documents/in/Bioinformatics?f_ri=5109","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true},{"id":4233,"name":"Computational Biology","url":"https://www.academia.edu/Documents/in/Computational_Biology?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":69100,"name":"Data Science","url":"https://www.academia.edu/Documents/in/Data_Science?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_34467255" data-work_id="34467255" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/34467255/COHESIVE_MULTI_ORIENTED_TEXT_DETECTION_AND_RECOGNITION_STRUCTURE_IN_NATURAL_SCENE_IMAGES_REGIONS_HAS_EXPOSED">COHESIVE MULTI-ORIENTED TEXT DETECTION AND RECOGNITION STRUCTURE IN NATURAL SCENE IMAGES REGIONS HAS EXPOSED</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Scene text recognition brings various new challenges occurs in recent years. Detecting and recognizing text in scenes entails some of the equivalent problems as document processing, but there are also numerous novel problems to face for... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_34467255" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Scene text recognition brings various new challenges occurs in recent years. Detecting and recognizing text in scenes entails some of the equivalent problems as document processing, but there are also numerous novel problems to face for recognizing text in natural scene images. Recent research in these regions has exposed several promise but present is motionless much effort to be entire in these regions. Most existing techniques have focused on detecting horizontal or near-horizontal texts. In this paper, we propose a new scheme which detects texts of arbitrary directions in natural scene images. Our algorithm is equipped with two sets of characteristics specially designed for capturing both the natural characteristics of texts using MSER regions using Otsu method. To better estimate our algorithm and compare it with other existing algorithms, we are using existing MSRA Dataset, ICDAR Dataset, and our new dataset, which includes various texts in various real-world situations. Experiments results on these standard datasets and the proposed dataset shows that our algorithm compares positively with the modern algorithms when using horizontal texts and accomplishes significantly improved performance on texts of random orientations in composite natural scenes images.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/34467255" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="b4a03365f24031e81d7a5c783f3c28db" rel="nofollow" data-download="{&quot;attachment_id&quot;:54339612,&quot;asset_id&quot;:34467255,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/54339612/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="15995294" rel="nofollow" href="https://independent.academia.edu/IJDPSJournal">Call for paper-International Journal of Distributed and Parallel Systems (IJDPS)</a><script data-card-contents-for-user="15995294" type="text/json">{"id":15995294,"first_name":"Call for paper-International Journal of Distributed and Parallel Systems","last_name":"(IJDPS)","domain_name":"independent","page_name":"IJDPSJournal","display_name":"Call for paper-International Journal of Distributed and Parallel Systems (IJDPS)","profile_url":"https://independent.academia.edu/IJDPSJournal?f_ri=5109","photo":"https://0.academia-photos.com/15995294/9436000/48008008/s65_international_journal_of_distributed_and_parallel_systems._ijdps_.png"}</script></span></span></li><li class="js-paper-rank-work_34467255 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="34467255"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 34467255, container: ".js-paper-rank-work_34467255", }); });</script></li><li class="js-percentile-work_34467255 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 34467255; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_34467255"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_34467255 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="34467255"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 34467255; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=34467255]").text(description); $(".js-view-count-work_34467255").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_34467255").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="34467255"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">10</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="428" rel="nofollow" href="https://www.academia.edu/Documents/in/Algorithms">Algorithms</a>,&nbsp;<script data-card-contents-for-ri="428" type="text/json">{"id":428,"name":"Algorithms","url":"https://www.academia.edu/Documents/in/Algorithms?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1432" rel="nofollow" href="https://www.academia.edu/Documents/in/Natural_Language_Processing">Natural Language Processing</a>,&nbsp;<script data-card-contents-for-ri="1432" type="text/json">{"id":1432,"name":"Natural Language Processing","url":"https://www.academia.edu/Documents/in/Natural_Language_Processing?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="21949" rel="nofollow" href="https://www.academia.edu/Documents/in/Horizontal_Gene_Transfer">Horizontal Gene Transfer</a><script data-card-contents-for-ri="21949" type="text/json">{"id":21949,"name":"Horizontal Gene Transfer","url":"https://www.academia.edu/Documents/in/Horizontal_Gene_Transfer?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=34467255]'), work: {"id":34467255,"title":"COHESIVE MULTI-ORIENTED TEXT DETECTION AND RECOGNITION STRUCTURE IN NATURAL SCENE IMAGES REGIONS HAS EXPOSED","created_at":"2017-09-04T23:55:49.603-07:00","url":"https://www.academia.edu/34467255/COHESIVE_MULTI_ORIENTED_TEXT_DETECTION_AND_RECOGNITION_STRUCTURE_IN_NATURAL_SCENE_IMAGES_REGIONS_HAS_EXPOSED?f_ri=5109","dom_id":"work_34467255","summary":"Scene text recognition brings various new challenges occurs in recent years. Detecting and recognizing text in scenes entails some of the equivalent problems as document processing, but there are also numerous novel problems to face for recognizing text in natural scene images. Recent research in these regions has exposed several promise but present is motionless much effort to be entire in these regions. Most existing techniques have focused on detecting horizontal or near-horizontal texts. In this paper, we propose a new scheme which detects texts of arbitrary directions in natural scene images. Our algorithm is equipped with two sets of characteristics specially designed for capturing both the natural characteristics of texts using MSER regions using Otsu method. To better estimate our algorithm and compare it with other existing algorithms, we are using existing MSRA Dataset, ICDAR Dataset, and our new dataset, which includes various texts in various real-world situations. Experiments results on these standard datasets and the proposed dataset shows that our algorithm compares positively with the modern algorithms when using horizontal texts and accomplishes significantly improved performance on texts of random orientations in composite natural scenes images.","downloadable_attachments":[{"id":54339612,"asset_id":34467255,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":15995294,"first_name":"Call for paper-International Journal of Distributed and Parallel Systems","last_name":"(IJDPS)","domain_name":"independent","page_name":"IJDPSJournal","display_name":"Call for paper-International Journal of Distributed and Parallel Systems (IJDPS)","profile_url":"https://independent.academia.edu/IJDPSJournal?f_ri=5109","photo":"https://0.academia-photos.com/15995294/9436000/48008008/s65_international_journal_of_distributed_and_parallel_systems._ijdps_.png"}],"research_interests":[{"id":428,"name":"Algorithms","url":"https://www.academia.edu/Documents/in/Algorithms?f_ri=5109","nofollow":true},{"id":1432,"name":"Natural Language Processing","url":"https://www.academia.edu/Documents/in/Natural_Language_Processing?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":21949,"name":"Horizontal Gene Transfer","url":"https://www.academia.edu/Documents/in/Horizontal_Gene_Transfer?f_ri=5109","nofollow":true},{"id":23214,"name":"Documentary Film","url":"https://www.academia.edu/Documents/in/Documentary_Film?f_ri=5109"},{"id":61234,"name":"Stroke","url":"https://www.academia.edu/Documents/in/Stroke?f_ri=5109"},{"id":386000,"name":"Text Detection in Natural Scene","url":"https://www.academia.edu/Documents/in/Text_Detection_in_Natural_Scene?f_ri=5109"},{"id":803119,"name":"Researchers","url":"https://www.academia.edu/Documents/in/Researchers?f_ri=5109"},{"id":955461,"name":"Text Extraction","url":"https://www.academia.edu/Documents/in/Text_Extraction?f_ri=5109"},{"id":961848,"name":"Paragraph","url":"https://www.academia.edu/Documents/in/Paragraph?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_258270" data-work_id="258270" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/258270/Learning_With_Few_Examples_by_Transferring_Feature_Relevance">Learning With Few Examples by Transferring Feature Relevance</a></div></div><div class="u-pb4x u-mt3x"></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/258270" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="cecd6e3f0f2728fbc4ad3b595d847f01" rel="nofollow" data-download="{&quot;attachment_id&quot;:3238886,&quot;asset_id&quot;:258270,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/3238886/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="214106" href="https://berkeley.academia.edu/ErikRodner">Erik Rodner</a><script data-card-contents-for-user="214106" type="text/json">{"id":214106,"first_name":"Erik","last_name":"Rodner","domain_name":"berkeley","page_name":"ErikRodner","display_name":"Erik Rodner","profile_url":"https://berkeley.academia.edu/ErikRodner?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_258270 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="258270"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 258270, container: ".js-paper-rank-work_258270", }); });</script></li><li class="js-percentile-work_258270 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 258270; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_258270"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_258270 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="258270"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 258270; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=258270]").text(description); $(".js-view-count-work_258270").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_258270").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="258270"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">2</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="154214" rel="nofollow" href="https://www.academia.edu/Documents/in/Binary_Classification">Binary Classification</a><script data-card-contents-for-ri="154214" type="text/json">{"id":154214,"name":"Binary Classification","url":"https://www.academia.edu/Documents/in/Binary_Classification?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=258270]'), work: {"id":258270,"title":"Learning With Few Examples by Transferring Feature Relevance","created_at":"2010-07-07T17:48:52.990-07:00","url":"https://www.academia.edu/258270/Learning_With_Few_Examples_by_Transferring_Feature_Relevance?f_ri=5109","dom_id":"work_258270","summary":null,"downloadable_attachments":[{"id":3238886,"asset_id":258270,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":214106,"first_name":"Erik","last_name":"Rodner","domain_name":"berkeley","page_name":"ErikRodner","display_name":"Erik Rodner","profile_url":"https://berkeley.academia.edu/ErikRodner?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":154214,"name":"Binary Classification","url":"https://www.academia.edu/Documents/in/Binary_Classification?f_ri=5109","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_889941" data-work_id="889941" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/889941/Musical_genre_classification_of_audio_signals">Musical genre classification of audio signals</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Musical genres are categorical labels created by humans to characterize pieces of music. A musical genre is characterized by the common characteristics shared by its members. These characteristics typically are related to the... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_889941" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Musical genres are categorical labels created by humans to characterize pieces of music. A musical genre is characterized by the common characteristics shared by its members. These characteristics typically are related to the instrumentation, rhythmic structure, and harmonic content of the music. Genre hierarchies are commonly used to structure the large collections of music available on the Web. Currently musical genre annotation is performed manually. Automatic musical genre classification can assist or replace the human user in this process and would be a valuable addition to music information retrieval systems. In addition, automatic musical genre classification provides a framework for developing and evaluating features for any type of content-based analysis of musical signals.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/889941" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="d17da6ca00b6043be164eadc1bc5cf41" rel="nofollow" data-download="{&quot;attachment_id&quot;:5373755,&quot;asset_id&quot;:889941,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/5373755/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="711284" href="https://maringa.academia.edu/NilsonSouza">Nilson Souza</a><script data-card-contents-for-user="711284" type="text/json">{"id":711284,"first_name":"Nilson","last_name":"Souza","domain_name":"maringa","page_name":"NilsonSouza","display_name":"Nilson Souza","profile_url":"https://maringa.academia.edu/NilsonSouza?f_ri=5109","photo":"https://0.academia-photos.com/711284/240972/284935/s65_nilson.souza.jpg"}</script></span></span></li><li class="js-paper-rank-work_889941 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="889941"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 889941, container: ".js-paper-rank-work_889941", }); });</script></li><li class="js-percentile-work_889941 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 889941; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_889941"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_889941 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="889941"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 889941; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=889941]").text(description); $(".js-view-count-work_889941").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_889941").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="889941"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">24</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="48" rel="nofollow" href="https://www.academia.edu/Documents/in/Engineering">Engineering</a>,&nbsp;<script data-card-contents-for-ri="48" type="text/json">{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="464" rel="nofollow" href="https://www.academia.edu/Documents/in/Information_Retrieval">Information Retrieval</a>,&nbsp;<script data-card-contents-for-ri="464" type="text/json">{"id":464,"name":"Information Retrieval","url":"https://www.academia.edu/Documents/in/Information_Retrieval?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="671" rel="nofollow" href="https://www.academia.edu/Documents/in/Music">Music</a><script data-card-contents-for-ri="671" type="text/json">{"id":671,"name":"Music","url":"https://www.academia.edu/Documents/in/Music?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=889941]'), work: {"id":889941,"title":"Musical genre classification of audio signals","created_at":"2011-09-06T00:55:13.462-07:00","url":"https://www.academia.edu/889941/Musical_genre_classification_of_audio_signals?f_ri=5109","dom_id":"work_889941","summary":"Musical genres are categorical labels created by humans to characterize pieces of music. A musical genre is characterized by the common characteristics shared by its members. These characteristics typically are related to the instrumentation, rhythmic structure, and harmonic content of the music. Genre hierarchies are commonly used to structure the large collections of music available on the Web. Currently musical genre annotation is performed manually. Automatic musical genre classification can assist or replace the human user in this process and would be a valuable addition to music information retrieval systems. In addition, automatic musical genre classification provides a framework for developing and evaluating features for any type of content-based analysis of musical signals.","downloadable_attachments":[{"id":5373755,"asset_id":889941,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":711284,"first_name":"Nilson","last_name":"Souza","domain_name":"maringa","page_name":"NilsonSouza","display_name":"Nilson Souza","profile_url":"https://maringa.academia.edu/NilsonSouza?f_ri=5109","photo":"https://0.academia-photos.com/711284/240972/284935/s65_nilson.souza.jpg"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering?f_ri=5109","nofollow":true},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true},{"id":464,"name":"Information Retrieval","url":"https://www.academia.edu/Documents/in/Information_Retrieval?f_ri=5109","nofollow":true},{"id":671,"name":"Music","url":"https://www.academia.edu/Documents/in/Music?f_ri=5109","nofollow":true},{"id":687,"name":"Musicology","url":"https://www.academia.edu/Documents/in/Musicology?f_ri=5109"},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=5109"},{"id":2041,"name":"Music and Language","url":"https://www.academia.edu/Documents/in/Music_and_Language?f_ri=5109"},{"id":3460,"name":"Music Psychology","url":"https://www.academia.edu/Documents/in/Music_Psychology?f_ri=5109"},{"id":4148,"name":"Audio Signal Processing","url":"https://www.academia.edu/Documents/in/Audio_Signal_Processing?f_ri=5109"},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109"},{"id":5187,"name":"Statistical Analysis","url":"https://www.academia.edu/Documents/in/Statistical_Analysis?f_ri=5109"},{"id":10073,"name":"Sociology of Music","url":"https://www.academia.edu/Documents/in/Sociology_of_Music?f_ri=5109"},{"id":12039,"name":"Music Information Retrieval","url":"https://www.academia.edu/Documents/in/Music_Information_Retrieval?f_ri=5109"},{"id":27497,"name":"Instrumentation","url":"https://www.academia.edu/Documents/in/Instrumentation?f_ri=5109"},{"id":40738,"name":"Signal Analysis","url":"https://www.academia.edu/Documents/in/Signal_Analysis?f_ri=5109"},{"id":55276,"name":"Wavelet Analysis","url":"https://www.academia.edu/Documents/in/Wavelet_Analysis?f_ri=5109"},{"id":61864,"name":"Cultural Differences","url":"https://www.academia.edu/Documents/in/Cultural_Differences?f_ri=5109"},{"id":141114,"name":"World Wide Web","url":"https://www.academia.edu/Documents/in/World_Wide_Web?f_ri=5109"},{"id":157696,"name":"Music Genre Classification","url":"https://www.academia.edu/Documents/in/Music_Genre_Classification?f_ri=5109"},{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=5109"},{"id":229390,"name":"Real Time","url":"https://www.academia.edu/Documents/in/Real_Time?f_ri=5109"},{"id":494306,"name":"Statistical Pattern Recognition","url":"https://www.academia.edu/Documents/in/Statistical_Pattern_Recognition?f_ri=5109"},{"id":892538,"name":"Automatic Classification","url":"https://www.academia.edu/Documents/in/Automatic_Classification?f_ri=5109"},{"id":1488701,"name":"Multiple Signal Classification","url":"https://www.academia.edu/Documents/in/Multiple_Signal_Classification?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_2909443" data-work_id="2909443" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/2909443/Foundations_of_Technical_Analysis_Computational_Algorithms_Statistical_Inference_and_Empirical_Implementation">Foundations of Technical Analysis: Computational Algorithms, Statistical Inference, and Empirical Implementation</a></div></div><div class="u-pb4x u-mt3x"></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/2909443" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="45467fa8cb1bea645bed4cae6f1585f3" rel="nofollow" data-download="{&quot;attachment_id&quot;:30834415,&quot;asset_id&quot;:2909443,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/30834415/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="3422454" href="https://bangaloreuniversity.academia.edu/akshayasharma">akshaya sharma</a><script data-card-contents-for-user="3422454" type="text/json">{"id":3422454,"first_name":"akshaya","last_name":"sharma","domain_name":"bangaloreuniversity","page_name":"akshayasharma","display_name":"akshaya sharma","profile_url":"https://bangaloreuniversity.academia.edu/akshayasharma?f_ri=5109","photo":"https://0.academia-photos.com/3422454/1162820/1456159/s65_akshaya.sharma.jpg"}</script></span></span></li><li class="js-paper-rank-work_2909443 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="2909443"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 2909443, container: ".js-paper-rank-work_2909443", }); });</script></li><li class="js-percentile-work_2909443 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 2909443; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_2909443"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_2909443 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="2909443"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 2909443; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=2909443]").text(description); $(".js-view-count-work_2909443").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_2909443").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="2909443"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">7</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="47" rel="nofollow" href="https://www.academia.edu/Documents/in/Finance">Finance</a>,&nbsp;<script data-card-contents-for-ri="47" type="text/json">{"id":47,"name":"Finance","url":"https://www.academia.edu/Documents/in/Finance?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="43343" rel="nofollow" href="https://www.academia.edu/Documents/in/Technical_Analysis">Technical Analysis</a>,&nbsp;<script data-card-contents-for-ri="43343" type="text/json">{"id":43343,"name":"Technical Analysis","url":"https://www.academia.edu/Documents/in/Technical_Analysis?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="67968" rel="nofollow" href="https://www.academia.edu/Documents/in/Statistical_Inference">Statistical Inference</a><script data-card-contents-for-ri="67968" type="text/json">{"id":67968,"name":"Statistical Inference","url":"https://www.academia.edu/Documents/in/Statistical_Inference?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=2909443]'), work: {"id":2909443,"title":"Foundations of Technical Analysis: Computational Algorithms, Statistical Inference, and Empirical Implementation","created_at":"2013-03-05T03:44:30.147-08:00","url":"https://www.academia.edu/2909443/Foundations_of_Technical_Analysis_Computational_Algorithms_Statistical_Inference_and_Empirical_Implementation?f_ri=5109","dom_id":"work_2909443","summary":null,"downloadable_attachments":[{"id":30834415,"asset_id":2909443,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":3422454,"first_name":"akshaya","last_name":"sharma","domain_name":"bangaloreuniversity","page_name":"akshayasharma","display_name":"akshaya sharma","profile_url":"https://bangaloreuniversity.academia.edu/akshayasharma?f_ri=5109","photo":"https://0.academia-photos.com/3422454/1162820/1456159/s65_akshaya.sharma.jpg"}],"research_interests":[{"id":47,"name":"Finance","url":"https://www.academia.edu/Documents/in/Finance?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":43343,"name":"Technical Analysis","url":"https://www.academia.edu/Documents/in/Technical_Analysis?f_ri=5109","nofollow":true},{"id":67968,"name":"Statistical Inference","url":"https://www.academia.edu/Documents/in/Statistical_Inference?f_ri=5109","nofollow":true},{"id":70854,"name":"Developing Country","url":"https://www.academia.edu/Documents/in/Developing_Country?f_ri=5109"},{"id":254189,"name":"Kernel Regression","url":"https://www.academia.edu/Documents/in/Kernel_Regression?f_ri=5109"},{"id":974197,"name":"Stock Returns","url":"https://www.academia.edu/Documents/in/Stock_Returns?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_4042900 coauthored" data-work_id="4042900" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/4042900/Automatic_Identification_of_Impairments_Using_Support_Vector_Machine_Pattern_Classification_on_Eye_Diagrams">Automatic Identification of Impairments Using Support Vector Machine Pattern Classification on Eye Diagrams</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">We have demonstrated powerful new techniques for identifying the optical impairments causing the degradation of an optical channel. We use machine learning and pattern classification techniques on eye diagrams to identify the optical... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_4042900" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">We have demonstrated powerful new techniques for identifying the optical impairments causing the degradation of an optical channel. We use machine learning and pattern classification techniques on eye diagrams to identify the optical impairments. These capabilities can enable the development of low-cost optical performance monitors having significant diagnostic capabilities.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/4042900" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="554db1130270eb7706e9f2b5d986e2be" rel="nofollow" data-download="{&quot;attachment_id&quot;:50065169,&quot;asset_id&quot;:4042900,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/50065169/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="4860811" href="https://webplus.academia.edu/JoelGannett">Joel Gannett</a><script data-card-contents-for-user="4860811" type="text/json">{"id":4860811,"first_name":"Joel","last_name":"Gannett","domain_name":"webplus","page_name":"JoelGannett","display_name":"Joel Gannett","profile_url":"https://webplus.academia.edu/JoelGannett?f_ri=5109","photo":"https://0.academia-photos.com/4860811/2083886/18365318/s65_joel.gannett.jpg"}</script></span></span><span class="u-displayInlineBlock InlineList-item-text">&nbsp;and&nbsp;<span class="u-textDecorationUnderline u-clickable InlineList-item-text js-work-more-authors-4042900">+1</span><div class="hidden js-additional-users-4042900"><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://independent.academia.edu/MarcusPang2">Marcus Pang</a></span></div></div></span><script>(function(){ var popoverSettings = { el: $('.js-work-more-authors-4042900'), placement: 'bottom', hide_delay: 200, html: true, content: function(){ return $('.js-additional-users-4042900').html(); } } new HoverPopover(popoverSettings); })();</script></li><li class="js-paper-rank-work_4042900 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="4042900"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 4042900, container: ".js-paper-rank-work_4042900", }); });</script></li><li class="js-percentile-work_4042900 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 4042900; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_4042900"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_4042900 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="4042900"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 4042900; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=4042900]").text(description); $(".js-view-count-work_4042900").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_4042900").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="4042900"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">9</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="10019" rel="nofollow" href="https://www.academia.edu/Documents/in/Photonics">Photonics</a>,&nbsp;<script data-card-contents-for-ri="10019" type="text/json">{"id":10019,"name":"Photonics","url":"https://www.academia.edu/Documents/in/Photonics?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="10408" rel="nofollow" href="https://www.academia.edu/Documents/in/Support_Vector_Machines">Support Vector Machines</a><script data-card-contents-for-ri="10408" type="text/json">{"id":10408,"name":"Support Vector Machines","url":"https://www.academia.edu/Documents/in/Support_Vector_Machines?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=4042900]'), work: {"id":4042900,"title":"Automatic Identification of Impairments Using Support Vector Machine Pattern Classification on Eye Diagrams","created_at":"2013-07-16T06:44:31.644-07:00","url":"https://www.academia.edu/4042900/Automatic_Identification_of_Impairments_Using_Support_Vector_Machine_Pattern_Classification_on_Eye_Diagrams?f_ri=5109","dom_id":"work_4042900","summary":"We have demonstrated powerful new techniques for identifying the optical impairments causing the degradation of an optical channel. We use machine learning and pattern classification techniques on eye diagrams to identify the optical impairments. These capabilities can enable the development of low-cost optical performance monitors having significant diagnostic capabilities.","downloadable_attachments":[{"id":50065169,"asset_id":4042900,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":4860811,"first_name":"Joel","last_name":"Gannett","domain_name":"webplus","page_name":"JoelGannett","display_name":"Joel Gannett","profile_url":"https://webplus.academia.edu/JoelGannett?f_ri=5109","photo":"https://0.academia-photos.com/4860811/2083886/18365318/s65_joel.gannett.jpg"},{"id":52775794,"first_name":"Marcus","last_name":"Pang","domain_name":"independent","page_name":"MarcusPang2","display_name":"Marcus Pang","profile_url":"https://independent.academia.edu/MarcusPang2?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":10019,"name":"Photonics","url":"https://www.academia.edu/Documents/in/Photonics?f_ri=5109","nofollow":true},{"id":10408,"name":"Support Vector Machines","url":"https://www.academia.edu/Documents/in/Support_Vector_Machines?f_ri=5109","nofollow":true},{"id":171640,"name":"Performance Monitoring","url":"https://www.academia.edu/Documents/in/Performance_Monitoring?f_ri=5109"},{"id":191289,"name":"Support vector machine","url":"https://www.academia.edu/Documents/in/Support_vector_machine?f_ri=5109"},{"id":221389,"name":"Pattern Classification","url":"https://www.academia.edu/Documents/in/Pattern_Classification?f_ri=5109"},{"id":263152,"name":"Optical physics","url":"https://www.academia.edu/Documents/in/Optical_physics?f_ri=5109"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_8242076" data-work_id="8242076" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/8242076/Symbol_Recognition_Current_Advances_and_Perspectives">Symbol Recognition: Current Advances and Perspectives</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">The recognition of symbols in graphic documents is an intensive research activity in the community of pattern recognition and document analysis. A key issue in the interpretation of maps, engineering drawings, diagrams, etc. is the... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_8242076" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">The recognition of symbols in graphic documents is an intensive research activity in the community of pattern recognition and document analysis. A key issue in the interpretation of maps, engineering drawings, diagrams, etc. is the recognition of domain dependent symbols according to a symbol database. In this work we first review the most outstanding symbol recognition methods from two different points of view: application domains and pattern recognition methods. In the second part of the paper, open and unaddressed problems involved in symbol recognition are described, analyzing their current state of art and discussing future research challenges. Thus, issues such as symbol representation, matching, segmentation, learning, scalability of recognition methods and performance evaluation are addressed in this work. Finally, we discuss the perspectives of symbol recognition concerning to new paradigms such as user interfaces in handheld computers or document database and WWW indexing by graphical content.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/8242076" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="b77c96e7bbef63672f92567775f7c094" rel="nofollow" data-download="{&quot;attachment_id&quot;:48170039,&quot;asset_id&quot;:8242076,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/48170039/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="16278091" href="https://independent.academia.edu/S%C3%A1nchezGemma">Gemma Sánchez</a><script data-card-contents-for-user="16278091" type="text/json">{"id":16278091,"first_name":"Gemma","last_name":"Sánchez","domain_name":"independent","page_name":"SánchezGemma","display_name":"Gemma Sánchez","profile_url":"https://independent.academia.edu/S%C3%A1nchezGemma?f_ri=5109","photo":"https://0.academia-photos.com/16278091/4416467/5123120/s65_gemma.s_nchez.jpg_oh_07aaaa8cccf2b37cb965d473911df7e8_oe_5485ae75___gda___1418656013_2f4cd130b053a2ac93ac76612b869e61"}</script></span></span></li><li class="js-paper-rank-work_8242076 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="8242076"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 8242076, container: ".js-paper-rank-work_8242076", }); });</script></li><li class="js-percentile-work_8242076 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 8242076; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_8242076"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_8242076 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="8242076"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 8242076; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=8242076]").text(description); $(".js-view-count-work_8242076").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_8242076").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="8242076"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">12</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="11119" rel="nofollow" href="https://www.academia.edu/Documents/in/User_Interface">User Interface</a>,&nbsp;<script data-card-contents-for-ri="11119" type="text/json">{"id":11119,"name":"User Interface","url":"https://www.academia.edu/Documents/in/User_Interface?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="55641" rel="nofollow" href="https://www.academia.edu/Documents/in/Performance_Evaluation">Performance Evaluation</a>,&nbsp;<script data-card-contents-for-ri="55641" type="text/json">{"id":55641,"name":"Performance Evaluation","url":"https://www.academia.edu/Documents/in/Performance_Evaluation?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="71999" rel="nofollow" href="https://www.academia.edu/Documents/in/Document_Analysis">Document Analysis</a><script data-card-contents-for-ri="71999" type="text/json">{"id":71999,"name":"Document Analysis","url":"https://www.academia.edu/Documents/in/Document_Analysis?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=8242076]'), work: {"id":8242076,"title":"Symbol Recognition: Current Advances and Perspectives","created_at":"2014-09-08T04:46:41.113-07:00","url":"https://www.academia.edu/8242076/Symbol_Recognition_Current_Advances_and_Perspectives?f_ri=5109","dom_id":"work_8242076","summary":"The recognition of symbols in graphic documents is an intensive research activity in the community of pattern recognition and document analysis. A key issue in the interpretation of maps, engineering drawings, diagrams, etc. is the recognition of domain dependent symbols according to a symbol database. In this work we first review the most outstanding symbol recognition methods from two different points of view: application domains and pattern recognition methods. In the second part of the paper, open and unaddressed problems involved in symbol recognition are described, analyzing their current state of art and discussing future research challenges. Thus, issues such as symbol representation, matching, segmentation, learning, scalability of recognition methods and performance evaluation are addressed in this work. Finally, we discuss the perspectives of symbol recognition concerning to new paradigms such as user interfaces in handheld computers or document database and WWW indexing by graphical content.","downloadable_attachments":[{"id":48170039,"asset_id":8242076,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":16278091,"first_name":"Gemma","last_name":"Sánchez","domain_name":"independent","page_name":"SánchezGemma","display_name":"Gemma Sánchez","profile_url":"https://independent.academia.edu/S%C3%A1nchezGemma?f_ri=5109","photo":"https://0.academia-photos.com/16278091/4416467/5123120/s65_gemma.s_nchez.jpg_oh_07aaaa8cccf2b37cb965d473911df7e8_oe_5485ae75___gda___1418656013_2f4cd130b053a2ac93ac76612b869e61"}],"research_interests":[{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":11119,"name":"User Interface","url":"https://www.academia.edu/Documents/in/User_Interface?f_ri=5109","nofollow":true},{"id":55641,"name":"Performance Evaluation","url":"https://www.academia.edu/Documents/in/Performance_Evaluation?f_ri=5109","nofollow":true},{"id":71999,"name":"Document Analysis","url":"https://www.academia.edu/Documents/in/Document_Analysis?f_ri=5109","nofollow":true},{"id":155450,"name":"Graphics Recognition","url":"https://www.academia.edu/Documents/in/Graphics_Recognition?f_ri=5109"},{"id":155451,"name":"Symbol Recognition","url":"https://www.academia.edu/Documents/in/Symbol_Recognition?f_ri=5109"},{"id":377043,"name":"Scalability","url":"https://www.academia.edu/Documents/in/Scalability?f_ri=5109"},{"id":749302,"name":"Indexation","url":"https://www.academia.edu/Documents/in/Indexation?f_ri=5109"},{"id":892890,"name":"Point of View","url":"https://www.academia.edu/Documents/in/Point_of_View?f_ri=5109"},{"id":961959,"name":"Symbolic Representation of Drugs","url":"https://www.academia.edu/Documents/in/Symbolic_Representation_of_Drugs?f_ri=5109"},{"id":1545338,"name":"Engineering Drawings","url":"https://www.academia.edu/Documents/in/Engineering_Drawings?f_ri=5109"},{"id":2500540,"name":"Pattern analysis","url":"https://www.academia.edu/Documents/in/Pattern_analysis?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_27034032" data-work_id="27034032" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/27034032/Associative_memory_of_a_dynamical_system_the_example_of_the_convection_instability">Associative memory of a dynamical system: the example of the convection instability</a></div></div><div class="u-pb4x u-mt3x"></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/27034032" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="3e0951899d6fba606c336ad28ca035b2" rel="nofollow" data-download="{&quot;attachment_id&quot;:47289532,&quot;asset_id&quot;:27034032,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/47289532/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="51073861" href="https://b-tu.academia.edu/MBestehorn">M. Bestehorn</a><script data-card-contents-for-user="51073861" type="text/json">{"id":51073861,"first_name":"M.","last_name":"Bestehorn","domain_name":"b-tu","page_name":"MBestehorn","display_name":"M. Bestehorn","profile_url":"https://b-tu.academia.edu/MBestehorn?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_27034032 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="27034032"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 27034032, container: ".js-paper-rank-work_27034032", }); });</script></li><li class="js-percentile-work_27034032 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 27034032; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_27034032"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_27034032 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="27034032"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 27034032; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=27034032]").text(description); $(".js-view-count-work_27034032").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_27034032").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="27034032"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">6</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="53344" rel="nofollow" href="https://www.academia.edu/Documents/in/Fluid">Fluid</a>,&nbsp;<script data-card-contents-for-ri="53344" type="text/json">{"id":53344,"name":"Fluid","url":"https://www.academia.edu/Documents/in/Fluid?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="80414" rel="nofollow" href="https://www.academia.edu/Documents/in/Mathematical_Sciences">Mathematical Sciences</a>,&nbsp;<script data-card-contents-for-ri="80414" type="text/json">{"id":80414,"name":"Mathematical Sciences","url":"https://www.academia.edu/Documents/in/Mathematical_Sciences?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="118582" rel="nofollow" href="https://www.academia.edu/Documents/in/Physical_sciences">Physical sciences</a><script data-card-contents-for-ri="118582" type="text/json">{"id":118582,"name":"Physical sciences","url":"https://www.academia.edu/Documents/in/Physical_sciences?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=27034032]'), work: {"id":27034032,"title":"Associative memory of a dynamical system: the example of the convection instability","created_at":"2016-07-16T23:31:31.288-07:00","url":"https://www.academia.edu/27034032/Associative_memory_of_a_dynamical_system_the_example_of_the_convection_instability?f_ri=5109","dom_id":"work_27034032","summary":null,"downloadable_attachments":[{"id":47289532,"asset_id":27034032,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":51073861,"first_name":"M.","last_name":"Bestehorn","domain_name":"b-tu","page_name":"MBestehorn","display_name":"M. Bestehorn","profile_url":"https://b-tu.academia.edu/MBestehorn?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":53344,"name":"Fluid","url":"https://www.academia.edu/Documents/in/Fluid?f_ri=5109","nofollow":true},{"id":80414,"name":"Mathematical Sciences","url":"https://www.academia.edu/Documents/in/Mathematical_Sciences?f_ri=5109","nofollow":true},{"id":118582,"name":"Physical sciences","url":"https://www.academia.edu/Documents/in/Physical_sciences?f_ri=5109","nofollow":true},{"id":328150,"name":"Associative Memory","url":"https://www.academia.edu/Documents/in/Associative_Memory?f_ri=5109"},{"id":868912,"name":"Dynamic System","url":"https://www.academia.edu/Documents/in/Dynamic_System?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_12604840 coauthored" data-work_id="12604840" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/12604840/Chemometric_classification_of_Basque_and_French_ciders_based_on_their_total_polyphenol_contents_and_CIELab_parameters">Chemometric classification of Basque and French ciders based on their total polyphenol contents and CIELab parameters</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Total polyphenol contents, estimated by Folin–Ciocalteu method, and CIELab chromatic parameters were determined in Basque and French ciders with the aim of developing a classification system to confirm the authenticity of ciders. A... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_12604840" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Total polyphenol contents, estimated by Folin–Ciocalteu method, and CIELab chromatic parameters were determined in Basque and French ciders with the aim of developing a classification system to confirm the authenticity of ciders. A preliminary study of data structure was performed by a multivariate data analysis using chemometric techniques such as cluster analysis and principal component analysis. Supervised pattern recognition methods,</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/12604840" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="70d0f25e2846dfe9e0efaaf39791719a" rel="nofollow" data-download="{&quot;attachment_id&quot;:46055778,&quot;asset_id&quot;:12604840,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/46055778/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="31548814" href="https://ehu.academia.edu/BlancaGallo">Blanca Gallo</a><script data-card-contents-for-user="31548814" type="text/json">{"id":31548814,"first_name":"Blanca","last_name":"Gallo","domain_name":"ehu","page_name":"BlancaGallo","display_name":"Blanca Gallo","profile_url":"https://ehu.academia.edu/BlancaGallo?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span><span class="u-displayInlineBlock InlineList-item-text">&nbsp;and&nbsp;<span class="u-textDecorationUnderline u-clickable InlineList-item-text js-work-more-authors-12604840">+1</span><div class="hidden js-additional-users-12604840"><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://conicet-ar.academia.edu/RosaAlonsoSalces">Rosa Alonso-Salces</a></span></div></div></span><script>(function(){ var popoverSettings = { el: $('.js-work-more-authors-12604840'), placement: 'bottom', hide_delay: 200, html: true, content: function(){ return $('.js-additional-users-12604840').html(); } } new HoverPopover(popoverSettings); })();</script></li><li class="js-paper-rank-work_12604840 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="12604840"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 12604840, container: ".js-paper-rank-work_12604840", }); });</script></li><li class="js-percentile-work_12604840 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 12604840; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_12604840"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_12604840 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="12604840"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 12604840; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=12604840]").text(description); $(".js-view-count-work_12604840").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_12604840").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="12604840"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">13</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="5069" rel="nofollow" href="https://www.academia.edu/Documents/in/Principal_Component_Analysis">Principal Component Analysis</a>,&nbsp;<script data-card-contents-for-ri="5069" type="text/json">{"id":5069,"name":"Principal Component Analysis","url":"https://www.academia.edu/Documents/in/Principal_Component_Analysis?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="16137" rel="nofollow" href="https://www.academia.edu/Documents/in/Food_Chemistry">Food Chemistry</a>,&nbsp;<script data-card-contents-for-ri="16137" type="text/json">{"id":16137,"name":"Food Chemistry","url":"https://www.academia.edu/Documents/in/Food_Chemistry?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="23892" rel="nofollow" href="https://www.academia.edu/Documents/in/Multivariate_Data_Analysis">Multivariate Data Analysis</a><script data-card-contents-for-ri="23892" type="text/json">{"id":23892,"name":"Multivariate Data Analysis","url":"https://www.academia.edu/Documents/in/Multivariate_Data_Analysis?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=12604840]'), work: {"id":12604840,"title":"Chemometric classification of Basque and French ciders based on their total polyphenol contents and CIELab parameters","created_at":"2015-05-26T05:34:19.024-07:00","url":"https://www.academia.edu/12604840/Chemometric_classification_of_Basque_and_French_ciders_based_on_their_total_polyphenol_contents_and_CIELab_parameters?f_ri=5109","dom_id":"work_12604840","summary":"Total polyphenol contents, estimated by Folin–Ciocalteu method, and CIELab chromatic parameters were determined in Basque and French ciders with the aim of developing a classification system to confirm the authenticity of ciders. A preliminary study of data structure was performed by a multivariate data analysis using chemometric techniques such as cluster analysis and principal component analysis. Supervised pattern recognition methods,","downloadable_attachments":[{"id":46055778,"asset_id":12604840,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":31548814,"first_name":"Blanca","last_name":"Gallo","domain_name":"ehu","page_name":"BlancaGallo","display_name":"Blanca Gallo","profile_url":"https://ehu.academia.edu/BlancaGallo?f_ri=5109","photo":"/images/s65_no_pic.png"},{"id":31673652,"first_name":"Rosa","last_name":"Alonso-Salces","domain_name":"conicet-ar","page_name":"RosaAlonsoSalces","display_name":"Rosa Alonso-Salces","profile_url":"https://conicet-ar.academia.edu/RosaAlonsoSalces?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":5069,"name":"Principal Component Analysis","url":"https://www.academia.edu/Documents/in/Principal_Component_Analysis?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":16137,"name":"Food Chemistry","url":"https://www.academia.edu/Documents/in/Food_Chemistry?f_ri=5109","nofollow":true},{"id":23892,"name":"Multivariate Data Analysis","url":"https://www.academia.edu/Documents/in/Multivariate_Data_Analysis?f_ri=5109","nofollow":true},{"id":28235,"name":"Multidisciplinary","url":"https://www.academia.edu/Documents/in/Multidisciplinary?f_ri=5109"},{"id":53994,"name":"Data Structure","url":"https://www.academia.edu/Documents/in/Data_Structure?f_ri=5109"},{"id":131237,"name":"Cluster Analysis","url":"https://www.academia.edu/Documents/in/Cluster_Analysis?f_ri=5109"},{"id":402431,"name":"Success Rate","url":"https://www.academia.edu/Documents/in/Success_Rate?f_ri=5109"},{"id":1109095,"name":"Classification Rules","url":"https://www.academia.edu/Documents/in/Classification_Rules?f_ri=5109"},{"id":1120502,"name":"Experimental Data","url":"https://www.academia.edu/Documents/in/Experimental_Data?f_ri=5109"},{"id":1211304,"name":"Artificial Neural Network","url":"https://www.academia.edu/Documents/in/Artificial_Neural_Network?f_ri=5109"},{"id":1376804,"name":"Feed-Forward","url":"https://www.academia.edu/Documents/in/Feed-Forward?f_ri=5109"},{"id":1690923,"name":"K Nearest Neighbour","url":"https://www.academia.edu/Documents/in/K_Nearest_Neighbour?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_12823281 coauthored" data-work_id="12823281" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/12823281/Recognizing_Facial_Expression_Machine_Learning_and_Application_to_Spontaneous_Behavior">Recognizing Facial Expression: Machine Learning and Application to Spontaneous Behavior</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">We present a systematic comparison of machine learning methods applied to the problem of fully automatic recognition of facial expressions. We report results on a series of experiments comparing recognition engines, including Ad-aBoost,... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_12823281" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">We present a systematic comparison of machine learning methods applied to the problem of fully automatic recognition of facial expressions. We report results on a series of experiments comparing recognition engines, including Ad-aBoost, support vector machines, linear discriminant analysis. We also explored feature selection techniques, including the use of AdaBoost for feature selection prior to classification by SVM or LDA. Best results were obtained by selecting a subset of Gabor filters using AdaBoost followed by classification with Support Vector Machines. The system operates in real-time, and obtained 93% correct generalization to novel subjects for a 7-way forced choice on the Cohn-Kanade expression dataset. The outputs of the classifiers change smoothly as a function of time and thus can be used to measure facial expression dynamics. We applied the system to to fully automated recognition of facial actions (FACS). The present system classifies 17 action units, whether they occur singly or in combination with other actions, with a mean accuracy of 94.8%. We present preliminary results for applying this system to spontaneous facial expressions.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/12823281" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="2faaf15a5e3b8d3643d8e4c3136587c1" rel="nofollow" data-download="{&quot;attachment_id&quot;:45910896,&quot;asset_id&quot;:12823281,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/45910896/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="31914517" href="https://ucsd.academia.edu/IanFasel">Ian Fasel</a><script data-card-contents-for-user="31914517" type="text/json">{"id":31914517,"first_name":"Ian","last_name":"Fasel","domain_name":"ucsd","page_name":"IanFasel","display_name":"Ian Fasel","profile_url":"https://ucsd.academia.edu/IanFasel?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span><span class="u-displayInlineBlock InlineList-item-text">&nbsp;and&nbsp;<span class="u-textDecorationUnderline u-clickable InlineList-item-text js-work-more-authors-12823281">+2</span><div class="hidden js-additional-users-12823281"><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://independent.academia.edu/CLainscsek">Claudia Lainscsek</a></span></div><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://independent.academia.edu/JMovellan">J. Movellan</a></span></div></div></span><script>(function(){ var popoverSettings = { el: $('.js-work-more-authors-12823281'), placement: 'bottom', hide_delay: 200, html: true, content: function(){ return $('.js-additional-users-12823281').html(); } } new HoverPopover(popoverSettings); })();</script></li><li class="js-paper-rank-work_12823281 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="12823281"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 12823281, container: ".js-paper-rank-work_12823281", }); });</script></li><li class="js-percentile-work_12823281 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 12823281; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_12823281"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_12823281 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="12823281"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 12823281; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=12823281]").text(description); $(".js-view-count-work_12823281").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_12823281").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="12823281"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">8</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="21269" rel="nofollow" href="https://www.academia.edu/Documents/in/Facial_expression">Facial expression</a>,&nbsp;<script data-card-contents-for-ri="21269" type="text/json">{"id":21269,"name":"Facial expression","url":"https://www.academia.edu/Documents/in/Facial_expression?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="43619" rel="nofollow" href="https://www.academia.edu/Documents/in/Feature_Selection">Feature Selection</a><script data-card-contents-for-ri="43619" type="text/json">{"id":43619,"name":"Feature Selection","url":"https://www.academia.edu/Documents/in/Feature_Selection?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=12823281]'), work: {"id":12823281,"title":"Recognizing Facial Expression: Machine Learning and Application to Spontaneous Behavior","created_at":"2015-06-05T16:22:59.385-07:00","url":"https://www.academia.edu/12823281/Recognizing_Facial_Expression_Machine_Learning_and_Application_to_Spontaneous_Behavior?f_ri=5109","dom_id":"work_12823281","summary":"We present a systematic comparison of machine learning methods applied to the problem of fully automatic recognition of facial expressions. We report results on a series of experiments comparing recognition engines, including Ad-aBoost, support vector machines, linear discriminant analysis. We also explored feature selection techniques, including the use of AdaBoost for feature selection prior to classification by SVM or LDA. Best results were obtained by selecting a subset of Gabor filters using AdaBoost followed by classification with Support Vector Machines. The system operates in real-time, and obtained 93% correct generalization to novel subjects for a 7-way forced choice on the Cohn-Kanade expression dataset. The outputs of the classifiers change smoothly as a function of time and thus can be used to measure facial expression dynamics. We applied the system to to fully automated recognition of facial actions (FACS). The present system classifies 17 action units, whether they occur singly or in combination with other actions, with a mean accuracy of 94.8%. We present preliminary results for applying this system to spontaneous facial expressions.","downloadable_attachments":[{"id":45910896,"asset_id":12823281,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":31914517,"first_name":"Ian","last_name":"Fasel","domain_name":"ucsd","page_name":"IanFasel","display_name":"Ian Fasel","profile_url":"https://ucsd.academia.edu/IanFasel?f_ri=5109","photo":"/images/s65_no_pic.png"},{"id":31960874,"first_name":"Claudia","last_name":"Lainscsek","domain_name":"independent","page_name":"CLainscsek","display_name":"Claudia Lainscsek","profile_url":"https://independent.academia.edu/CLainscsek?f_ri=5109","photo":"/images/s65_no_pic.png"},{"id":31995334,"first_name":"J.","last_name":"Movellan","domain_name":"independent","page_name":"JMovellan","display_name":"J. Movellan","profile_url":"https://independent.academia.edu/JMovellan?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":21269,"name":"Facial expression","url":"https://www.academia.edu/Documents/in/Facial_expression?f_ri=5109","nofollow":true},{"id":43619,"name":"Feature Selection","url":"https://www.academia.edu/Documents/in/Feature_Selection?f_ri=5109","nofollow":true},{"id":97644,"name":"Computer Vision and Pattern Recognition","url":"https://www.academia.edu/Documents/in/Computer_Vision_and_Pattern_Recognition?f_ri=5109"},{"id":191289,"name":"Support vector machine","url":"https://www.academia.edu/Documents/in/Support_vector_machine?f_ri=5109"},{"id":1569002,"name":"Gabor Filter","url":"https://www.academia.edu/Documents/in/Gabor_Filter?f_ri=5109"},{"id":2154980,"name":"Action Unit","url":"https://www.academia.edu/Documents/in/Action_Unit?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_24907332" data-work_id="24907332" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/24907332/Algorithm_for_the_computation_of_3D_Fourier_descriptors">Algorithm for the computation of 3D Fourier descriptors</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">This work describes a new approach for the computation of 3D Fourier descriptors, which are used for characterization, classification, and recognition of 3D objects. The method starts with a polygonized surface which is mapped onto a unit... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_24907332" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">This work describes a new approach for the computation of 3D Fourier descriptors, which are used for characterization, classification, and recognition of 3D objects. The method starts with a polygonized surface which is mapped onto a unit sphere using an inflation algorithm, after which the polyhedron is expanded in spherical harmonic functions. Homogeneous distribution of the vertices is achieved by applying an iterative watershed algorithm to the surface graph.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/24907332" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="f2460f9dfad03fb90b5b6f518feb4e43" rel="nofollow" data-download="{&quot;attachment_id&quot;:45232482,&quot;asset_id&quot;:24907332,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/45232482/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="9620282" href="https://antwerp.academia.edu/JanSijbers">Jan Sijbers</a><script data-card-contents-for-user="9620282" type="text/json">{"id":9620282,"first_name":"Jan","last_name":"Sijbers","domain_name":"antwerp","page_name":"JanSijbers","display_name":"Jan Sijbers","profile_url":"https://antwerp.academia.edu/JanSijbers?f_ri=5109","photo":"https://0.academia-photos.com/9620282/3829599/19285443/s65_jan.sijbers.jpg"}</script></span></span></li><li class="js-paper-rank-work_24907332 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="24907332"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 24907332, container: ".js-paper-rank-work_24907332", }); });</script></li><li class="js-percentile-work_24907332 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 24907332; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_24907332"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_24907332 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="24907332"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 24907332; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=24907332]").text(description); $(".js-view-count-work_24907332").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_24907332").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="24907332"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">7</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="5057" rel="nofollow" href="https://www.academia.edu/Documents/in/Iterative_Methods">Iterative Methods</a>,&nbsp;<script data-card-contents-for-ri="5057" type="text/json">{"id":5057,"name":"Iterative Methods","url":"https://www.academia.edu/Documents/in/Iterative_Methods?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="56368" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Classification">Image Classification</a>,&nbsp;<script data-card-contents-for-ri="56368" type="text/json">{"id":56368,"name":"Image Classification","url":"https://www.academia.edu/Documents/in/Image_Classification?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="71001" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Reconstruction">Image Reconstruction</a><script data-card-contents-for-ri="71001" type="text/json">{"id":71001,"name":"Image Reconstruction","url":"https://www.academia.edu/Documents/in/Image_Reconstruction?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=24907332]'), work: {"id":24907332,"title":"Algorithm for the computation of 3D Fourier descriptors","created_at":"2016-04-30T14:31:20.703-07:00","url":"https://www.academia.edu/24907332/Algorithm_for_the_computation_of_3D_Fourier_descriptors?f_ri=5109","dom_id":"work_24907332","summary":"This work describes a new approach for the computation of 3D Fourier descriptors, which are used for characterization, classification, and recognition of 3D objects. The method starts with a polygonized surface which is mapped onto a unit sphere using an inflation algorithm, after which the polyhedron is expanded in spherical harmonic functions. Homogeneous distribution of the vertices is achieved by applying an iterative watershed algorithm to the surface graph.","downloadable_attachments":[{"id":45232482,"asset_id":24907332,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":9620282,"first_name":"Jan","last_name":"Sijbers","domain_name":"antwerp","page_name":"JanSijbers","display_name":"Jan Sijbers","profile_url":"https://antwerp.academia.edu/JanSijbers?f_ri=5109","photo":"https://0.academia-photos.com/9620282/3829599/19285443/s65_jan.sijbers.jpg"}],"research_interests":[{"id":5057,"name":"Iterative Methods","url":"https://www.academia.edu/Documents/in/Iterative_Methods?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":56368,"name":"Image Classification","url":"https://www.academia.edu/Documents/in/Image_Classification?f_ri=5109","nofollow":true},{"id":71001,"name":"Image Reconstruction","url":"https://www.academia.edu/Documents/in/Image_Reconstruction?f_ri=5109","nofollow":true},{"id":73645,"name":"Sampling methods","url":"https://www.academia.edu/Documents/in/Sampling_methods?f_ri=5109"},{"id":102887,"name":"Object Recognition","url":"https://www.academia.edu/Documents/in/Object_Recognition?f_ri=5109"},{"id":1231240,"name":"Fourier Transforms","url":"https://www.academia.edu/Documents/in/Fourier_Transforms?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_73478528" data-work_id="73478528" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/73478528/Classification_and_Clustering_of_Electricity_Demand_Patterns_in_Industrial_Parks">Classification and Clustering of Electricity Demand Patterns in Industrial Parks</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Understanding of energy consumption patterns is extremely important for optimization of resources and application of green trends. Traditionally, analyses were performed for large environments like regions and nations. However, with the... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_73478528" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Understanding of energy consumption patterns is extremely important for optimization of resources and application of green trends. Traditionally, analyses were performed for large environments like regions and nations. However, with the advent of Smart Grids, the study of the behavior of smaller environments has become a necessity to allow a deeper micromanagement of the energy grid. This paper presents a data processing system to analyze energy consumption patterns in industrial parks, based on the cascade application of a Self-Organizing Map (SOM) and the clustering k-means algorithm. The system is validated with real load data from an industrial park in Spain. The validation results show that the system adequately finds different behavior patterns which are meaningful, and is capable of doing so without supervision, and without any prior knowledge about the data.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/73478528" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="76137238760ad1d79329b81676bcbdbd" rel="nofollow" data-download="{&quot;attachment_id&quot;:81986079,&quot;asset_id&quot;:73478528,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/81986079/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="106399701" href="https://independent.academia.edu/LAlvaradoHernandez">Luis angel Alvarado Hernandez</a><script data-card-contents-for-user="106399701" type="text/json">{"id":106399701,"first_name":"Luis angel","last_name":"Alvarado Hernandez","domain_name":"independent","page_name":"LAlvaradoHernandez","display_name":"Luis angel Alvarado Hernandez","profile_url":"https://independent.academia.edu/LAlvaradoHernandez?f_ri=5109","photo":"https://gravatar.com/avatar/40d407aa20172187a30ccac2ffbfa562?s=65"}</script></span></span></li><li class="js-paper-rank-work_73478528 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="73478528"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 73478528, container: ".js-paper-rank-work_73478528", }); });</script></li><li class="js-percentile-work_73478528 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 73478528; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_73478528"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_73478528 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="73478528"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 73478528; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=73478528]").text(description); $(".js-view-count-work_73478528").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_73478528").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="73478528"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">8</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="48" rel="nofollow" href="https://www.academia.edu/Documents/in/Engineering">Engineering</a>,&nbsp;<script data-card-contents-for-ri="48" type="text/json">{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="84990" rel="nofollow" href="https://www.academia.edu/Documents/in/Clustering">Clustering</a>,&nbsp;<script data-card-contents-for-ri="84990" type="text/json">{"id":84990,"name":"Clustering","url":"https://www.academia.edu/Documents/in/Clustering?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="118582" rel="nofollow" href="https://www.academia.edu/Documents/in/Physical_sciences">Physical sciences</a><script data-card-contents-for-ri="118582" type="text/json">{"id":118582,"name":"Physical sciences","url":"https://www.academia.edu/Documents/in/Physical_sciences?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=73478528]'), work: {"id":73478528,"title":"Classification and Clustering of Electricity Demand Patterns in Industrial Parks","created_at":"2022-03-10T07:50:26.554-08:00","url":"https://www.academia.edu/73478528/Classification_and_Clustering_of_Electricity_Demand_Patterns_in_Industrial_Parks?f_ri=5109","dom_id":"work_73478528","summary":"Understanding of energy consumption patterns is extremely important for optimization of resources and application of green trends. Traditionally, analyses were performed for large environments like regions and nations. However, with the advent of Smart Grids, the study of the behavior of smaller environments has become a necessity to allow a deeper micromanagement of the energy grid. This paper presents a data processing system to analyze energy consumption patterns in industrial parks, based on the cascade application of a Self-Organizing Map (SOM) and the clustering k-means algorithm. The system is validated with real load data from an industrial park in Spain. The validation results show that the system adequately finds different behavior patterns which are meaningful, and is capable of doing so without supervision, and without any prior knowledge about the data.","downloadable_attachments":[{"id":81986079,"asset_id":73478528,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":106399701,"first_name":"Luis angel","last_name":"Alvarado Hernandez","domain_name":"independent","page_name":"LAlvaradoHernandez","display_name":"Luis angel Alvarado Hernandez","profile_url":"https://independent.academia.edu/LAlvaradoHernandez?f_ri=5109","photo":"https://gravatar.com/avatar/40d407aa20172187a30ccac2ffbfa562?s=65"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":84990,"name":"Clustering","url":"https://www.academia.edu/Documents/in/Clustering?f_ri=5109","nofollow":true},{"id":118582,"name":"Physical sciences","url":"https://www.academia.edu/Documents/in/Physical_sciences?f_ri=5109","nofollow":true},{"id":627850,"name":"K Means","url":"https://www.academia.edu/Documents/in/K_Means?f_ri=5109"},{"id":1290065,"name":"ENERGIES","url":"https://www.academia.edu/Documents/in/ENERGIES-1?f_ri=5109"},{"id":2323255,"name":"Self Organizing Map","url":"https://www.academia.edu/Documents/in/Self_Organizing_Map?f_ri=5109"},{"id":3307188,"name":"industrial park","url":"https://www.academia.edu/Documents/in/industrial_park?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_73442701" data-work_id="73442701" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/73442701/B_RASH_RAdon_Soft_Hash_algorithm">B.: RASH: RAdon Soft Hash algorithm</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">In this paper, we present a high compression and collision resistant algorithm for images either suitable to extract an indexing pattern of the image and to detect deformations applied to original image. Some transforms are extracting... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_73442701" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">In this paper, we present a high compression and collision resistant algorithm for images either suitable to extract an indexing pattern of the image and to detect deformations applied to original image. Some transforms are extracting characteristics invariant against geometrical deformations (rotation and scalling). Among them, the Radon transform, largely used in magnetic resonance imaging, is also robust against image processing basic attacks (like compression, filtering, blurring, etc...) and strong attacks (Stirmark). This transformation allows to caracterize easily features of geometrical transforms. It permits also an easy extraction of an indexing vector of the image.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/73442701" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="92eb9fa78d23a60b3c9ecb983fcb44eb" rel="nofollow" data-download="{&quot;attachment_id&quot;:81963503,&quot;asset_id&quot;:73442701,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/81963503/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="193353" href="https://uclouvain.academia.edu/JeanDidierLegat">Jean-Didier Legat</a><script data-card-contents-for-user="193353" type="text/json">{"id":193353,"first_name":"Jean-Didier","last_name":"Legat","domain_name":"uclouvain","page_name":"JeanDidierLegat","display_name":"Jean-Didier Legat","profile_url":"https://uclouvain.academia.edu/JeanDidierLegat?f_ri=5109","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_73442701 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="73442701"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 73442701, container: ".js-paper-rank-work_73442701", }); });</script></li><li class="js-percentile-work_73442701 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 73442701; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_73442701"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_73442701 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="73442701"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 73442701; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=73442701]").text(description); $(".js-view-count-work_73442701").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_73442701").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="73442701"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">2</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1684492" rel="nofollow" href="https://www.academia.edu/Documents/in/Hash_Function">Hash Function</a><script data-card-contents-for-ri="1684492" type="text/json">{"id":1684492,"name":"Hash Function","url":"https://www.academia.edu/Documents/in/Hash_Function?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=73442701]'), work: {"id":73442701,"title":"B.: RASH: RAdon Soft Hash algorithm","created_at":"2022-03-09T21:58:37.965-08:00","url":"https://www.academia.edu/73442701/B_RASH_RAdon_Soft_Hash_algorithm?f_ri=5109","dom_id":"work_73442701","summary":"In this paper, we present a high compression and collision resistant algorithm for images either suitable to extract an indexing pattern of the image and to detect deformations applied to original image. Some transforms are extracting characteristics invariant against geometrical deformations (rotation and scalling). Among them, the Radon transform, largely used in magnetic resonance imaging, is also robust against image processing basic attacks (like compression, filtering, blurring, etc...) and strong attacks (Stirmark). This transformation allows to caracterize easily features of geometrical transforms. It permits also an easy extraction of an indexing vector of the image.","downloadable_attachments":[{"id":81963503,"asset_id":73442701,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":193353,"first_name":"Jean-Didier","last_name":"Legat","domain_name":"uclouvain","page_name":"JeanDidierLegat","display_name":"Jean-Didier Legat","profile_url":"https://uclouvain.academia.edu/JeanDidierLegat?f_ri=5109","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":1684492,"name":"Hash Function","url":"https://www.academia.edu/Documents/in/Hash_Function?f_ri=5109","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_70025709" data-work_id="70025709" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/70025709/Efficient_IRIS_Recognition_through_Improvement_of_Feature_Extraction_and_subset_Selection">Efficient IRIS Recognition through Improvement of Feature Extraction and subset Selection</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">The selection of the optimal feature subset and the classification has become an important issue in the field of iris recognition. In this paper we propose several methods for iris feature subset selection and vector creation. The... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_70025709" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">The selection of the optimal feature subset and the classification has become an important issue in the field of iris recognition. In this paper we propose several methods for iris feature subset selection and vector creation. The deterministic feature sequence is extracted from the iris image by using the contourlet transform technique. Contourlet transform captures the intrinsic geometrical structures of</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/70025709" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="b2cdbb2dfbedf68b73602e27f176a041" rel="nofollow" data-download="{&quot;attachment_id&quot;:79909502,&quot;asset_id&quot;:70025709,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/79909502/download_file?st=MTc0MDA0MzU4Miw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="190718931" href="https://independent.academia.edu/AmirAzizi51">Amir Azizi</a><script data-card-contents-for-user="190718931" type="text/json">{"id":190718931,"first_name":"Amir","last_name":"Azizi","domain_name":"independent","page_name":"AmirAzizi51","display_name":"Amir Azizi","profile_url":"https://independent.academia.edu/AmirAzizi51?f_ri=5109","photo":"https://0.academia-photos.com/190718931/53819688/41953013/s65_amir.azizi.jpeg"}</script></span></span></li><li class="js-paper-rank-work_70025709 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="70025709"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 70025709, container: ".js-paper-rank-work_70025709", }); });</script></li><li class="js-percentile-work_70025709 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 70025709; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_70025709"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_70025709 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="70025709"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 70025709; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=70025709]").text(description); $(".js-view-count-work_70025709").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_70025709").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="70025709"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">9</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5109" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a>,&nbsp;<script data-card-contents-for-ri="5109" type="text/json">{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="160144" rel="nofollow" href="https://www.academia.edu/Documents/in/Feature_Extraction">Feature Extraction</a>,&nbsp;<script data-card-contents-for-ri="160144" type="text/json">{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=5109","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="191289" rel="nofollow" href="https://www.academia.edu/Documents/in/Support_vector_machine">Support vector machine</a><script data-card-contents-for-ri="191289" type="text/json">{"id":191289,"name":"Support vector machine","url":"https://www.academia.edu/Documents/in/Support_vector_machine?f_ri=5109","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=70025709]'), work: {"id":70025709,"title":"Efficient IRIS Recognition through Improvement of Feature Extraction and subset Selection","created_at":"2022-01-30T03:49:25.528-08:00","url":"https://www.academia.edu/70025709/Efficient_IRIS_Recognition_through_Improvement_of_Feature_Extraction_and_subset_Selection?f_ri=5109","dom_id":"work_70025709","summary":"The selection of the optimal feature subset and the classification has become an important issue in the field of iris recognition. In this paper we propose several methods for iris feature subset selection and vector creation. The deterministic feature sequence is extracted from the iris image by using the contourlet transform technique. Contourlet transform captures the intrinsic geometrical structures of","downloadable_attachments":[{"id":79909502,"asset_id":70025709,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":190718931,"first_name":"Amir","last_name":"Azizi","domain_name":"independent","page_name":"AmirAzizi51","display_name":"Amir Azizi","profile_url":"https://independent.academia.edu/AmirAzizi51?f_ri=5109","photo":"https://0.academia-photos.com/190718931/53819688/41953013/s65_amir.azizi.jpeg"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=5109","nofollow":true},{"id":5109,"name":"Pattern Recognition","url":"https://www.academia.edu/Documents/in/Pattern_Recognition?f_ri=5109","nofollow":true},{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=5109","nofollow":true},{"id":191289,"name":"Support vector machine","url":"https://www.academia.edu/Documents/in/Support_vector_machine?f_ri=5109","nofollow":true},{"id":871638,"name":"IRIS RECOGNITION","url":"https://www.academia.edu/Documents/in/IRIS_RECOGNITION?f_ri=5109"},{"id":1425602,"name":"Classification Accuracy","url":"https://www.academia.edu/Documents/in/Classification_Accuracy?f_ri=5109"},{"id":1715950,"name":"Feature Subset Selection","url":"https://www.academia.edu/Documents/in/Feature_Subset_Selection?f_ri=5109"},{"id":2006254,"name":"Subset Selection","url":"https://www.academia.edu/Documents/in/Subset_Selection?f_ri=5109"},{"id":3193313,"name":"arXiv","url":"https://www.academia.edu/Documents/in/arXiv?f_ri=5109"}]}, }) } })();</script></ul></li></ul></div></div></div><div class="u-taCenter Pagination"><ul class="pagination"><li class="next_page"><a href="/Documents/in/Pattern_Recognition?after=50%2C70025709" rel="next">Next</a></li><li class="last next"><a href="/Documents/in/Pattern_Recognition?page=last">Last &raquo;</a></li></ul></div></div><div class="hidden-xs hidden-sm"><div class="u-pl6x"><div style="width: 300px;"><div class="panel panel-flat u-mt7x"><div class="panel-heading u-p5x"><div class="u-tcGrayDark u-taCenter u-fw700 u-textUppercase">Related Topics</div></div><ul class="list-group"><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Vision">Computer Vision</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="854">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="854">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="2008">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="2008">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="1185">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="1185">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Artificial_Intelligence">Artificial Intelligence</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="465">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="465">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Data_Mining">Data Mining</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="2009">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="2009">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Artificial_Neural_Networks">Artificial Neural Networks</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="54123">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="54123">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="422">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="422">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Classification_Machine_Learning_">Classification (Machine Learning)</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="4095">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="4095">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Object_Recognition_Pattern_Recognition_">Object Recognition (Pattern Recognition)</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="5112">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="5112">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Face_Recognition">Face Recognition</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="5110">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="5110">Following</a></div></li></ul></div></div></div></div></div></div><script>// MIT License // Copyright © 2011 Sebastian Tschan, https://blueimp.net // Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of // the Software, and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. !function(n){"use strict";function d(n,t){var r=(65535&n)+(65535&t);return(n>>16)+(t>>16)+(r>>16)<<16|65535&r}function f(n,t,r,e,o,u){return d((c=d(d(t,n),d(e,u)))<<(f=o)|c>>>32-f,r);var c,f}function l(n,t,r,e,o,u,c){return f(t&r|~t&e,n,t,o,u,c)}function v(n,t,r,e,o,u,c){return f(t&e|r&~e,n,t,o,u,c)}function g(n,t,r,e,o,u,c){return f(t^r^e,n,t,o,u,c)}function m(n,t,r,e,o,u,c){return f(r^(t|~e),n,t,o,u,c)}function i(n,t){var r,e,o,u;n[t>>5]|=128<<t%32,n[14+(t+64>>>9<<4)]=t;for(var c=1732584193,f=-271733879,i=-1732584194,a=271733878,h=0;h<n.length;h+=16)c=l(r=c,e=f,o=i,u=a,n[h],7,-680876936),a=l(a,c,f,i,n[h+1],12,-389564586),i=l(i,a,c,f,n[h+2],17,606105819),f=l(f,i,a,c,n[h+3],22,-1044525330),c=l(c,f,i,a,n[h+4],7,-176418897),a=l(a,c,f,i,n[h+5],12,1200080426),i=l(i,a,c,f,n[h+6],17,-1473231341),f=l(f,i,a,c,n[h+7],22,-45705983),c=l(c,f,i,a,n[h+8],7,1770035416),a=l(a,c,f,i,n[h+9],12,-1958414417),i=l(i,a,c,f,n[h+10],17,-42063),f=l(f,i,a,c,n[h+11],22,-1990404162),c=l(c,f,i,a,n[h+12],7,1804603682),a=l(a,c,f,i,n[h+13],12,-40341101),i=l(i,a,c,f,n[h+14],17,-1502002290),c=v(c,f=l(f,i,a,c,n[h+15],22,1236535329),i,a,n[h+1],5,-165796510),a=v(a,c,f,i,n[h+6],9,-1069501632),i=v(i,a,c,f,n[h+11],14,643717713),f=v(f,i,a,c,n[h],20,-373897302),c=v(c,f,i,a,n[h+5],5,-701558691),a=v(a,c,f,i,n[h+10],9,38016083),i=v(i,a,c,f,n[h+15],14,-660478335),f=v(f,i,a,c,n[h+4],20,-405537848),c=v(c,f,i,a,n[h+9],5,568446438),a=v(a,c,f,i,n[h+14],9,-1019803690),i=v(i,a,c,f,n[h+3],14,-187363961),f=v(f,i,a,c,n[h+8],20,1163531501),c=v(c,f,i,a,n[h+13],5,-1444681467),a=v(a,c,f,i,n[h+2],9,-51403784),i=v(i,a,c,f,n[h+7],14,1735328473),c=g(c,f=v(f,i,a,c,n[h+12],20,-1926607734),i,a,n[h+5],4,-378558),a=g(a,c,f,i,n[h+8],11,-2022574463),i=g(i,a,c,f,n[h+11],16,1839030562),f=g(f,i,a,c,n[h+14],23,-35309556),c=g(c,f,i,a,n[h+1],4,-1530992060),a=g(a,c,f,i,n[h+4],11,1272893353),i=g(i,a,c,f,n[h+7],16,-155497632),f=g(f,i,a,c,n[h+10],23,-1094730640),c=g(c,f,i,a,n[h+13],4,681279174),a=g(a,c,f,i,n[h],11,-358537222),i=g(i,a,c,f,n[h+3],16,-722521979),f=g(f,i,a,c,n[h+6],23,76029189),c=g(c,f,i,a,n[h+9],4,-640364487),a=g(a,c,f,i,n[h+12],11,-421815835),i=g(i,a,c,f,n[h+15],16,530742520),c=m(c,f=g(f,i,a,c,n[h+2],23,-995338651),i,a,n[h],6,-198630844),a=m(a,c,f,i,n[h+7],10,1126891415),i=m(i,a,c,f,n[h+14],15,-1416354905),f=m(f,i,a,c,n[h+5],21,-57434055),c=m(c,f,i,a,n[h+12],6,1700485571),a=m(a,c,f,i,n[h+3],10,-1894986606),i=m(i,a,c,f,n[h+10],15,-1051523),f=m(f,i,a,c,n[h+1],21,-2054922799),c=m(c,f,i,a,n[h+8],6,1873313359),a=m(a,c,f,i,n[h+15],10,-30611744),i=m(i,a,c,f,n[h+6],15,-1560198380),f=m(f,i,a,c,n[h+13],21,1309151649),c=m(c,f,i,a,n[h+4],6,-145523070),a=m(a,c,f,i,n[h+11],10,-1120210379),i=m(i,a,c,f,n[h+2],15,718787259),f=m(f,i,a,c,n[h+9],21,-343485551),c=d(c,r),f=d(f,e),i=d(i,o),a=d(a,u);return[c,f,i,a]}function a(n){for(var t="",r=32*n.length,e=0;e<r;e+=8)t+=String.fromCharCode(n[e>>5]>>>e%32&255);return t}function h(n){var t=[];for(t[(n.length>>2)-1]=void 0,e=0;e<t.length;e+=1)t[e]=0;for(var r=8*n.length,e=0;e<r;e+=8)t[e>>5]|=(255&n.charCodeAt(e/8))<<e%32;return t}function e(n){for(var t,r="0123456789abcdef",e="",o=0;o<n.length;o+=1)t=n.charCodeAt(o),e+=r.charAt(t>>>4&15)+r.charAt(15&t);return e}function r(n){return unescape(encodeURIComponent(n))}function o(n){return a(i(h(t=r(n)),8*t.length));var t}function u(n,t){return function(n,t){var r,e,o=h(n),u=[],c=[];for(u[15]=c[15]=void 0,16<o.length&&(o=i(o,8*n.length)),r=0;r<16;r+=1)u[r]=909522486^o[r],c[r]=1549556828^o[r];return e=i(u.concat(h(t)),512+8*t.length),a(i(c.concat(e),640))}(r(n),r(t))}function t(n,t,r){return t?r?u(t,n):e(u(t,n)):r?o(n):e(o(n))}"function"==typeof define&&define.amd?define(function(){return t}):"object"==typeof module&&module.exports?module.exports=t:n.md5=t}(this);</script><script>window.AbTest = (function() { return { 'ab_test': (uniqueId, test_name, buckets) => { let override = new URLSearchParams(window.location.search).get(`ab_test[${test_name}]`); if ( override ) { return override; } const bucketNames = buckets.map((bucket) => { return typeof bucket === 'string' ? bucket : Object.keys(bucket)[0]; }); const weights = buckets.map((bucket) => { return typeof bucket === 'string' ? 1 : Object.values(bucket)[0]; }); const total = weights.reduce((sum, weight) => sum + weight); const hash = md5(`${uniqueId}${test_name}`); const hashNum = parseInt(hash.slice(-12), 16); let bucketPoint = total * (hashNum % 100000) / 100000; const bucket = bucketNames.find((_, i) => { if (weights[i] > bucketPoint) { return true; } bucketPoint -= weights[i]; return false; }); return bucket; } }; })();</script><div data-auto_select="false" data-client_id="331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b" data-landing_url="https://www.academia.edu/Documents/in/Pattern_Recognition" data-login_uri="https://www.academia.edu/registrations/google_one_tap" data-moment_callback="onGoogleOneTapEvent" id="g_id_onload"></div><script>function onGoogleOneTapEvent(event) { var momentType = event.getMomentType(); var momentReason = null; if (event.isNotDisplayed()) { momentReason = event.getNotDisplayedReason(); } else if (event.isSkippedMoment()) { momentReason = event.getSkippedReason(); } else if (event.isDismissedMoment()) { momentReason = event.getDismissedReason(); } Aedu.arbitraryEvents.write('GoogleOneTapEvent', { moment_type: momentType, moment_reason: momentReason, }); }</script><script>(function() { var auvid = unescape( document.cookie .split(/; ?/) .find((s) => s.startsWith('auvid')) .substring(6)); var bucket = AbTest.ab_test(auvid, 'lo_ri_one_tap_google_sign_on', ['control', 'one_tap_google_sign_on']); if (bucket === 'control') return; var oneTapTag = document.createElement('script') oneTapTag.async = true oneTapTag.defer = true oneTapTag.src = 'https://accounts.google.com/gsi/client' document.body.appendChild(oneTapTag) })();</script></div></div></div> </div> <div class="bootstrap login"><div class="modal fade login-modal" id="login-modal"><div class="login-modal-dialog modal-dialog"><div class="modal-content"><div class="modal-header"><button class="close close" data-dismiss="modal" type="button"><span aria-hidden="true">&times;</span><span class="sr-only">Close</span></button><h4 class="modal-title text-center"><strong>Log In</strong></h4></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><button class="btn btn-fb btn-lg btn-block btn-v-center-content" id="login-facebook-oauth-button"><svg style="float: left; width: 19px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="facebook-square" class="svg-inline--fa fa-facebook-square fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M400 32H48A48 48 0 0 0 0 80v352a48 48 0 0 0 48 48h137.25V327.69h-63V256h63v-54.64c0-62.15 37-96.48 93.67-96.48 27.14 0 55.52 4.84 55.52 4.84v61h-31.27c-30.81 0-40.42 19.12-40.42 38.73V256h68.78l-11 71.69h-57.78V480H400a48 48 0 0 0 48-48V80a48 48 0 0 0-48-48z"></path></svg><small><strong>Log in</strong> with <strong>Facebook</strong></small></button><br /><button class="btn btn-google btn-lg btn-block btn-v-center-content" id="login-google-oauth-button"><svg style="float: left; width: 22px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="google-plus" class="svg-inline--fa fa-google-plus fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M256,8C119.1,8,8,119.1,8,256S119.1,504,256,504,504,392.9,504,256,392.9,8,256,8ZM185.3,380a124,124,0,0,1,0-248c31.3,0,60.1,11,83,32.3l-33.6,32.6c-13.2-12.9-31.3-19.1-49.4-19.1-42.9,0-77.2,35.5-77.2,78.1S142.3,334,185.3,334c32.6,0,64.9-19.1,70.1-53.3H185.3V238.1H302.2a109.2,109.2,0,0,1,1.9,20.7c0,70.8-47.5,121.2-118.8,121.2ZM415.5,273.8v35.5H380V273.8H344.5V238.3H380V202.8h35.5v35.5h35.2v35.5Z"></path></svg><small><strong>Log in</strong> with <strong>Google</strong></small></button><br /><style type="text/css">.sign-in-with-apple-button { width: 100%; height: 52px; border-radius: 3px; border: 1px solid black; cursor: pointer; } .sign-in-with-apple-button > div { margin: 0 auto; / This centers the Apple-rendered button horizontally }</style><script src="https://appleid.cdn-apple.com/appleauth/static/jsapi/appleid/1/en_US/appleid.auth.js" type="text/javascript"></script><div class="sign-in-with-apple-button" data-border="false" data-color="white" id="appleid-signin"><span &nbsp;&nbsp;="Sign Up with Apple" class="u-fs11"></span></div><script>AppleID.auth.init({ clientId: 'edu.academia.applesignon', scope: 'name email', redirectURI: 'https://www.academia.edu/sessions', state: "9464b114c76807662f458995de724f597c5b799f87ec0b1150f67706de1a3554", });</script><script>// Hacky way of checking if on fast loswp if (window.loswp == null) { (function() { const Google = window?.Aedu?.Auth?.OauthButton?.Login?.Google; const Facebook = window?.Aedu?.Auth?.OauthButton?.Login?.Facebook; if (Google) { new Google({ el: '#login-google-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } if (Facebook) { new Facebook({ el: '#login-facebook-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } })(); }</script></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><div class="hr-heading login-hr-heading"><span class="hr-heading-text">or</span></div></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><form class="js-login-form" action="https://www.academia.edu/sessions" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="hHdLqEU7H_usexOIhO7Xz8O5xdpmr-TWA_KV1Z_sLmiItinj_Rn8HIV82y12UvG5MWfn-Zo4ZHoySBji8cdsVw" autocomplete="off" /><div class="form-group"><label class="control-label" for="login-modal-email-input" style="font-size: 14px;">Email</label><input class="form-control" id="login-modal-email-input" name="login" type="email" /></div><div class="form-group"><label class="control-label" for="login-modal-password-input" style="font-size: 14px;">Password</label><input class="form-control" id="login-modal-password-input" name="password" type="password" /></div><input type="hidden" name="post_login_redirect_url" id="post_login_redirect_url" value="https://www.academia.edu/Documents/in/Pattern_Recognition" autocomplete="off" /><div class="checkbox"><label><input type="checkbox" name="remember_me" id="remember_me" value="1" checked="checked" /><small style="font-size: 12px; margin-top: 2px; display: inline-block;">Remember me on this computer</small></label></div><br><input type="submit" name="commit" value="Log In" class="btn btn-primary btn-block btn-lg js-login-submit" data-disable-with="Log In" /></br></form><script>typeof window?.Aedu?.recaptchaManagedForm === 'function' && window.Aedu.recaptchaManagedForm( document.querySelector('.js-login-form'), document.querySelector('.js-login-submit') );</script><small style="font-size: 12px;"><br />or <a data-target="#login-modal-reset-password-container" data-toggle="collapse" href="javascript:void(0)">reset password</a></small><div class="collapse" id="login-modal-reset-password-container"><br /><div class="well margin-0x"><form class="js-password-reset-form" action="https://www.academia.edu/reset_password" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="lm9Rup1W9IAv25hec5XhKTe0dGJaJ-O0o1wV8AgeY82arjPxJXQXZwbcUPuBKcdfxWpWQaawYxiS5pjHZjUh8g" autocomplete="off" /><p>Enter the email address you signed up with and we&#39;ll email you a reset link.</p><div class="form-group"><input class="form-control" name="email" type="email" /></div><script src="https://recaptcha.net/recaptcha/api.js" async defer></script> <script> var invisibleRecaptchaSubmit = function () { var closestForm = function (ele) { var curEle = ele.parentNode; while (curEle.nodeName !== 'FORM' && curEle.nodeName !== 'BODY'){ curEle = curEle.parentNode; } return curEle.nodeName === 'FORM' ? curEle : null }; var eles = document.getElementsByClassName('g-recaptcha'); if (eles.length > 0) { var form = closestForm(eles[0]); if (form) { form.submit(); } } }; </script> <input type="submit" data-sitekey="6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj" data-callback="invisibleRecaptchaSubmit" class="g-recaptcha btn btn-primary btn-block" value="Email me a link" value=""/> </form></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/collapse-45805421cf446ca5adf7aaa1935b08a3a8d1d9a6cc5d91a62a2a3a00b20b3e6a.js"], function() { // from javascript_helper.rb $("#login-modal-reset-password-container").on("shown.bs.collapse", function() { $(this).find("input[type=email]").focus(); }); }); </script> </div></div></div><div class="modal-footer"><div class="text-center"><small style="font-size: 12px;">Need an account?&nbsp;<a rel="nofollow" href="https://www.academia.edu/signup">Click here to sign up</a></small></div></div></div></div></div></div><script>// If we are on subdomain or non-bootstrapped page, redirect to login page instead of showing modal (function(){ if (typeof $ === 'undefined') return; var host = window.location.hostname; if ((host === $domain || host === "www."+$domain) && (typeof $().modal === 'function')) { $("#nav_log_in").click(function(e) { // Don't follow the link and open the modal e.preventDefault(); $("#login-modal").on('shown.bs.modal', function() { $(this).find("#login-modal-email-input").focus() }).modal('show'); }); } })()</script> <div class="bootstrap" id="footer"><div class="footer-content clearfix text-center padding-top-7x" style="width:100%;"><ul class="footer-links-secondary footer-links-wide list-inline margin-bottom-1x"><li><a href="https://www.academia.edu/about">About</a></li><li><a href="https://www.academia.edu/press">Press</a></li><li><a href="https://www.academia.edu/documents">Papers</a></li><li><a href="https://www.academia.edu/topics">Topics</a></li><li><a href="https://www.academia.edu/journals">Academia.edu Journals</a></li><li><a rel="nofollow" href="https://www.academia.edu/hiring"><svg style="width: 13px; height: 13px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="briefcase" class="svg-inline--fa fa-briefcase fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M320 336c0 8.84-7.16 16-16 16h-96c-8.84 0-16-7.16-16-16v-48H0v144c0 25.6 22.4 48 48 48h416c25.6 0 48-22.4 48-48V288H320v48zm144-208h-80V80c0-25.6-22.4-48-48-48H176c-25.6 0-48 22.4-48 48v48H48c-25.6 0-48 22.4-48 48v80h512v-80c0-25.6-22.4-48-48-48zm-144 0H192V96h128v32z"></path></svg>&nbsp;<strong>We're Hiring!</strong></a></li><li><a rel="nofollow" href="https://support.academia.edu/hc/en-us"><svg style="width: 12px; height: 12px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="question-circle" class="svg-inline--fa fa-question-circle fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M504 256c0 136.997-111.043 248-248 248S8 392.997 8 256C8 119.083 119.043 8 256 8s248 111.083 248 248zM262.655 90c-54.497 0-89.255 22.957-116.549 63.758-3.536 5.286-2.353 12.415 2.715 16.258l34.699 26.31c5.205 3.947 12.621 3.008 16.665-2.122 17.864-22.658 30.113-35.797 57.303-35.797 20.429 0 45.698 13.148 45.698 32.958 0 14.976-12.363 22.667-32.534 33.976C247.128 238.528 216 254.941 216 296v4c0 6.627 5.373 12 12 12h56c6.627 0 12-5.373 12-12v-1.333c0-28.462 83.186-29.647 83.186-106.667 0-58.002-60.165-102-116.531-102zM256 338c-25.365 0-46 20.635-46 46 0 25.364 20.635 46 46 46s46-20.636 46-46c0-25.365-20.635-46-46-46z"></path></svg>&nbsp;<strong>Help Center</strong></a></li></ul><ul class="footer-links-tertiary list-inline margin-bottom-1x"><li class="small">Find new research papers in:</li><li class="small"><a href="https://www.academia.edu/Documents/in/Physics">Physics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Chemistry">Chemistry</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Biology">Biology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Health_Sciences">Health Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Ecology">Ecology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Earth_Sciences">Earth Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a></li></ul></div></div><div class="DesignSystem" id="credit" style="width:100%;"><ul class="u-pl0x footer-links-legal list-inline"><li><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li>Academia &copy;2025</li></ul></div><script> //<![CDATA[ window.detect_gmtoffset = true; window.Academia && window.Academia.set_gmtoffset && Academia.set_gmtoffset('/gmtoffset'); //]]> </script> <div id='overlay_background'></div> <div id='bootstrap-modal-container' class='bootstrap'></div> <div id='ds-modal-container' class='bootstrap DesignSystem'></div> <div id='full-screen-modal'></div> </div> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10